Home | History | Annotate | Download | only in arm
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_ARM
      6 
      7 #include "src/codegen.h"
      8 #include "src/debug/debug.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 #include "src/runtime/runtime.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 #define __ ACCESS_MASM(masm)
     17 
     18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
     19                                 ExitFrameType exit_frame_type) {
     20   // ----------- S t a t e -------------
     21   //  -- r0                 : number of arguments excluding receiver
     22   //  -- r1                 : target
     23   //  -- r3                 : new.target
     24   //  -- sp[0]              : last argument
     25   //  -- ...
     26   //  -- sp[4 * (argc - 1)] : first argument
     27   //  -- sp[4 * argc]       : receiver
     28   // -----------------------------------
     29   __ AssertFunction(r1);
     30 
     31   // Make sure we operate in the context of the called function (for example
     32   // ConstructStubs implemented in C++ will be run in the context of the caller
     33   // instead of the callee, due to the way that [[Construct]] is defined for
     34   // ordinary functions).
     35   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
     36 
     37   // JumpToExternalReference expects r0 to contain the number of arguments
     38   // including the receiver and the extra arguments.
     39   const int num_extra_args = 3;
     40   __ add(r0, r0, Operand(num_extra_args + 1));
     41 
     42   // Insert extra arguments.
     43   __ SmiTag(r0);
     44   __ Push(r0, r1, r3);
     45   __ SmiUntag(r0);
     46 
     47   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
     48                              exit_frame_type == BUILTIN_EXIT);
     49 }
     50 
     51 // Load the built-in InternalArray function from the current context.
     52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     53                                               Register result) {
     54   // Load the InternalArray function from the current native context.
     55   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
     56 }
     57 
     58 // Load the built-in Array function from the current context.
     59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     60   // Load the Array function from the current native context.
     61   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
     62 }
     63 
     64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     65   // ----------- S t a t e -------------
     66   //  -- r0     : number of arguments
     67   //  -- lr     : return address
     68   //  -- sp[...]: constructor arguments
     69   // -----------------------------------
     70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     71 
     72   // Get the InternalArray function.
     73   GenerateLoadInternalArrayFunction(masm, r1);
     74 
     75   if (FLAG_debug_code) {
     76     // Initial map for the builtin InternalArray functions should be maps.
     77     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
     78     __ SmiTst(r2);
     79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
     80     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
     81     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
     82   }
     83 
     84   // Run the native code for the InternalArray function called as a normal
     85   // function.
     86   // tail call a stub
     87   InternalArrayConstructorStub stub(masm->isolate());
     88   __ TailCallStub(&stub);
     89 }
     90 
     91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
     92   // ----------- S t a t e -------------
     93   //  -- r0     : number of arguments
     94   //  -- lr     : return address
     95   //  -- sp[...]: constructor arguments
     96   // -----------------------------------
     97   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     98 
     99   // Get the Array function.
    100   GenerateLoadArrayFunction(masm, r1);
    101 
    102   if (FLAG_debug_code) {
    103     // Initial map for the builtin Array functions should be maps.
    104     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
    105     __ SmiTst(r2);
    106     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
    107     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
    108     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
    109   }
    110 
    111   __ mov(r3, r1);
    112   // Run the native code for the Array function called as a normal function.
    113   // tail call a stub
    114   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
    115   ArrayConstructorStub stub(masm->isolate());
    116   __ TailCallStub(&stub);
    117 }
    118 
    119 // static
    120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
    121   // ----------- S t a t e -------------
    122   //  -- r0                     : number of arguments
    123   //  -- r1                     : function
    124   //  -- cp                     : context
    125   //  -- lr                     : return address
    126   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    127   //  -- sp[argc * 4]           : receiver
    128   // -----------------------------------
    129   Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
    130   Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
    131   Heap::RootListIndex const root_index =
    132       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
    133                                      : Heap::kMinusInfinityValueRootIndex;
    134   DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
    135 
    136   // Load the accumulator with the default return value (either -Infinity or
    137   // +Infinity), with the tagged value in r5 and the double value in d1.
    138   __ LoadRoot(r5, root_index);
    139   __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
    140 
    141   Label done_loop, loop;
    142   __ mov(r4, r0);
    143   __ bind(&loop);
    144   {
    145     // Check if all parameters done.
    146     __ sub(r4, r4, Operand(1), SetCC);
    147     __ b(lt, &done_loop);
    148 
    149     // Load the next parameter tagged value into r2.
    150     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2));
    151 
    152     // Load the double value of the parameter into d2, maybe converting the
    153     // parameter to a number first using the ToNumber builtin if necessary.
    154     Label convert, convert_smi, convert_number, done_convert;
    155     __ bind(&convert);
    156     __ JumpIfSmi(r2, &convert_smi);
    157     __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
    158     __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
    159     {
    160       // Parameter is not a Number, use the ToNumber builtin to convert it.
    161       DCHECK(!FLAG_enable_embedded_constant_pool);
    162       FrameScope scope(masm, StackFrame::MANUAL);
    163       __ SmiTag(r0);
    164       __ SmiTag(r4);
    165       __ EnterBuiltinFrame(cp, r1, r0);
    166       __ Push(r4, r5);
    167       __ mov(r0, r2);
    168       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    169       __ mov(r2, r0);
    170       __ Pop(r4, r5);
    171       __ LeaveBuiltinFrame(cp, r1, r0);
    172       __ SmiUntag(r4);
    173       __ SmiUntag(r0);
    174       {
    175         // Restore the double accumulator value (d1).
    176         Label done_restore;
    177         __ SmiToDouble(d1, r5);
    178         __ JumpIfSmi(r5, &done_restore);
    179         __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
    180         __ bind(&done_restore);
    181       }
    182     }
    183     __ b(&convert);
    184     __ bind(&convert_number);
    185     __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
    186     __ b(&done_convert);
    187     __ bind(&convert_smi);
    188     __ SmiToDouble(d2, r2);
    189     __ bind(&done_convert);
    190 
    191     // Perform the actual comparison with the accumulator value on the left hand
    192     // side (d1) and the next parameter value on the right hand side (d2).
    193     Label compare_nan, compare_swap;
    194     __ VFPCompareAndSetFlags(d1, d2);
    195     __ b(cc_done, &loop);
    196     __ b(cc_swap, &compare_swap);
    197     __ b(vs, &compare_nan);
    198 
    199     // Left and right hand side are equal, check for -0 vs. +0.
    200     __ VmovHigh(ip, reg);
    201     __ cmp(ip, Operand(0x80000000));
    202     __ b(ne, &loop);
    203 
    204     // Result is on the right hand side.
    205     __ bind(&compare_swap);
    206     __ vmov(d1, d2);
    207     __ mov(r5, r2);
    208     __ b(&loop);
    209 
    210     // At least one side is NaN, which means that the result will be NaN too.
    211     __ bind(&compare_nan);
    212     __ LoadRoot(r5, Heap::kNanValueRootIndex);
    213     __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
    214     __ b(&loop);
    215   }
    216 
    217   __ bind(&done_loop);
    218   // Drop all slots, including the receiver.
    219   __ add(r0, r0, Operand(1));
    220   __ Drop(r0);
    221   __ mov(r0, r5);
    222   __ Ret();
    223 }
    224 
    225 // static
    226 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
    227   // ----------- S t a t e -------------
    228   //  -- r0                     : number of arguments
    229   //  -- r1                     : constructor function
    230   //  -- cp                     : context
    231   //  -- lr                     : return address
    232   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    233   //  -- sp[argc * 4]           : receiver
    234   // -----------------------------------
    235 
    236   // 1. Load the first argument into r0.
    237   Label no_arguments;
    238   {
    239     __ mov(r2, r0);  // Store argc in r2.
    240     __ sub(r0, r0, Operand(1), SetCC);
    241     __ b(lo, &no_arguments);
    242     __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2));
    243   }
    244 
    245   // 2a. Convert the first argument to a number.
    246   {
    247     FrameScope scope(masm, StackFrame::MANUAL);
    248     __ SmiTag(r2);
    249     __ EnterBuiltinFrame(cp, r1, r2);
    250     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    251     __ LeaveBuiltinFrame(cp, r1, r2);
    252     __ SmiUntag(r2);
    253   }
    254 
    255   {
    256     // Drop all arguments including the receiver.
    257     __ Drop(r2);
    258     __ Ret(1);
    259   }
    260 
    261   // 2b. No arguments, return +0.
    262   __ bind(&no_arguments);
    263   __ Move(r0, Smi::kZero);
    264   __ Ret(1);
    265 }
    266 
    267 // static
    268 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
    269   // ----------- S t a t e -------------
    270   //  -- r0                     : number of arguments
    271   //  -- r1                     : constructor function
    272   //  -- r3                     : new target
    273   //  -- cp                     : context
    274   //  -- lr                     : return address
    275   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    276   //  -- sp[argc * 4]           : receiver
    277   // -----------------------------------
    278 
    279   // 1. Make sure we operate in the context of the called function.
    280   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
    281 
    282   // 2. Load the first argument into r2.
    283   {
    284     Label no_arguments, done;
    285     __ mov(r6, r0);  // Store argc in r6.
    286     __ sub(r0, r0, Operand(1), SetCC);
    287     __ b(lo, &no_arguments);
    288     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
    289     __ b(&done);
    290     __ bind(&no_arguments);
    291     __ Move(r2, Smi::kZero);
    292     __ bind(&done);
    293   }
    294 
    295   // 3. Make sure r2 is a number.
    296   {
    297     Label done_convert;
    298     __ JumpIfSmi(r2, &done_convert);
    299     __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
    300     __ b(eq, &done_convert);
    301     {
    302       FrameScope scope(masm, StackFrame::MANUAL);
    303       __ SmiTag(r6);
    304       __ EnterBuiltinFrame(cp, r1, r6);
    305       __ Push(r3);
    306       __ Move(r0, r2);
    307       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    308       __ Move(r2, r0);
    309       __ Pop(r3);
    310       __ LeaveBuiltinFrame(cp, r1, r6);
    311       __ SmiUntag(r6);
    312     }
    313     __ bind(&done_convert);
    314   }
    315 
    316   // 4. Check if new target and constructor differ.
    317   Label drop_frame_and_ret, new_object;
    318   __ cmp(r1, r3);
    319   __ b(ne, &new_object);
    320 
    321   // 5. Allocate a JSValue wrapper for the number.
    322   __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
    323   __ b(&drop_frame_and_ret);
    324 
    325   // 6. Fallback to the runtime to create new object.
    326   __ bind(&new_object);
    327   {
    328     FrameScope scope(masm, StackFrame::MANUAL);
    329     __ SmiTag(r6);
    330     __ EnterBuiltinFrame(cp, r1, r6);
    331     __ Push(r2);  // first argument
    332     __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    333             RelocInfo::CODE_TARGET);
    334     __ Pop(r2);
    335     __ LeaveBuiltinFrame(cp, r1, r6);
    336     __ SmiUntag(r6);
    337   }
    338   __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
    339 
    340   __ bind(&drop_frame_and_ret);
    341   {
    342     __ Drop(r6);
    343     __ Ret(1);
    344   }
    345 }
    346 
    347 // static
    348 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
    349   // ----------- S t a t e -------------
    350   //  -- r0                     : number of arguments
    351   //  -- r1                     : constructor function
    352   //  -- cp                     : context
    353   //  -- lr                     : return address
    354   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    355   //  -- sp[argc * 4]           : receiver
    356   // -----------------------------------
    357 
    358   // 1. Load the first argument into r0.
    359   Label no_arguments;
    360   {
    361     __ mov(r2, r0);  // Store argc in r2.
    362     __ sub(r0, r0, Operand(1), SetCC);
    363     __ b(lo, &no_arguments);
    364     __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2));
    365   }
    366 
    367   // 2a. At least one argument, return r0 if it's a string, otherwise
    368   // dispatch to appropriate conversion.
    369   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
    370   {
    371     __ JumpIfSmi(r0, &to_string);
    372     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
    373     __ CompareObjectType(r0, r3, r3, FIRST_NONSTRING_TYPE);
    374     __ b(hi, &to_string);
    375     __ b(eq, &symbol_descriptive_string);
    376     __ b(&drop_frame_and_ret);
    377   }
    378 
    379   // 2b. No arguments, return the empty string (and pop the receiver).
    380   __ bind(&no_arguments);
    381   {
    382     __ LoadRoot(r0, Heap::kempty_stringRootIndex);
    383     __ Ret(1);
    384   }
    385 
    386   // 3a. Convert r0 to a string.
    387   __ bind(&to_string);
    388   {
    389     FrameScope scope(masm, StackFrame::MANUAL);
    390     __ SmiTag(r2);
    391     __ EnterBuiltinFrame(cp, r1, r2);
    392     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
    393     __ LeaveBuiltinFrame(cp, r1, r2);
    394     __ SmiUntag(r2);
    395   }
    396   __ b(&drop_frame_and_ret);
    397 
    398   // 3b. Convert symbol in r0 to a string.
    399   __ bind(&symbol_descriptive_string);
    400   {
    401     __ Drop(r2);
    402     __ Drop(1);
    403     __ Push(r0);
    404     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
    405   }
    406 
    407   __ bind(&drop_frame_and_ret);
    408   {
    409     __ Drop(r2);
    410     __ Ret(1);
    411   }
    412 }
    413 
    414 // static
    415 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
    416   // ----------- S t a t e -------------
    417   //  -- r0                     : number of arguments
    418   //  -- r1                     : constructor function
    419   //  -- r3                     : new target
    420   //  -- cp                     : context
    421   //  -- lr                     : return address
    422   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    423   //  -- sp[argc * 4]           : receiver
    424   // -----------------------------------
    425 
    426   // 1. Make sure we operate in the context of the called function.
    427   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
    428 
    429   // 2. Load the first argument into r2.
    430   {
    431     Label no_arguments, done;
    432     __ mov(r6, r0);  // Store argc in r6.
    433     __ sub(r0, r0, Operand(1), SetCC);
    434     __ b(lo, &no_arguments);
    435     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
    436     __ b(&done);
    437     __ bind(&no_arguments);
    438     __ LoadRoot(r2, Heap::kempty_stringRootIndex);
    439     __ bind(&done);
    440   }
    441 
    442   // 3. Make sure r2 is a string.
    443   {
    444     Label convert, done_convert;
    445     __ JumpIfSmi(r2, &convert);
    446     __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
    447     __ b(lo, &done_convert);
    448     __ bind(&convert);
    449     {
    450       FrameScope scope(masm, StackFrame::MANUAL);
    451       __ SmiTag(r6);
    452       __ EnterBuiltinFrame(cp, r1, r6);
    453       __ Push(r3);
    454       __ Move(r0, r2);
    455       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
    456       __ Move(r2, r0);
    457       __ Pop(r3);
    458       __ LeaveBuiltinFrame(cp, r1, r6);
    459       __ SmiUntag(r6);
    460     }
    461     __ bind(&done_convert);
    462   }
    463 
    464   // 4. Check if new target and constructor differ.
    465   Label drop_frame_and_ret, new_object;
    466   __ cmp(r1, r3);
    467   __ b(ne, &new_object);
    468 
    469   // 5. Allocate a JSValue wrapper for the string.
    470   __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
    471   __ b(&drop_frame_and_ret);
    472 
    473   // 6. Fallback to the runtime to create new object.
    474   __ bind(&new_object);
    475   {
    476     FrameScope scope(masm, StackFrame::MANUAL);
    477     __ SmiTag(r6);
    478     __ EnterBuiltinFrame(cp, r1, r6);
    479     __ Push(r2);  // first argument
    480     __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    481             RelocInfo::CODE_TARGET);
    482     __ Pop(r2);
    483     __ LeaveBuiltinFrame(cp, r1, r6);
    484     __ SmiUntag(r6);
    485   }
    486   __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
    487 
    488   __ bind(&drop_frame_and_ret);
    489   {
    490     __ Drop(r6);
    491     __ Ret(1);
    492   }
    493 }
    494 
    495 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    496   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
    497   __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
    498   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
    499   __ Jump(r2);
    500 }
    501 
    502 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
    503                                            Runtime::FunctionId function_id) {
    504   // ----------- S t a t e -------------
    505   //  -- r0 : argument count (preserved for callee)
    506   //  -- r1 : target function (preserved for callee)
    507   //  -- r3 : new target (preserved for callee)
    508   // -----------------------------------
    509   {
    510     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    511     // Push the number of arguments to the callee.
    512     __ SmiTag(r0);
    513     __ push(r0);
    514     // Push a copy of the target function and the new target.
    515     __ push(r1);
    516     __ push(r3);
    517     // Push function as parameter to the runtime call.
    518     __ Push(r1);
    519 
    520     __ CallRuntime(function_id, 1);
    521     __ mov(r2, r0);
    522 
    523     // Restore target function and new target.
    524     __ pop(r3);
    525     __ pop(r1);
    526     __ pop(r0);
    527     __ SmiUntag(r0, r0);
    528   }
    529   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
    530   __ Jump(r2);
    531 }
    532 
    533 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    534   // Checking whether the queued function is ready for install is optional,
    535   // since we come across interrupts and stack checks elsewhere.  However,
    536   // not checking may delay installing ready functions, and always checking
    537   // would be quite expensive.  A good compromise is to first check against
    538   // stack limit as a cue for an interrupt signal.
    539   Label ok;
    540   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    541   __ cmp(sp, Operand(ip));
    542   __ b(hs, &ok);
    543 
    544   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    545 
    546   __ bind(&ok);
    547   GenerateTailCallToSharedCode(masm);
    548 }
    549 
    550 namespace {
    551 
    552 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
    553                                     bool create_implicit_receiver,
    554                                     bool check_derived_construct) {
    555   Label post_instantiation_deopt_entry;
    556 
    557   // ----------- S t a t e -------------
    558   //  -- r0     : number of arguments
    559   //  -- r1     : constructor function
    560   //  -- r3     : new target
    561   //  -- cp     : context
    562   //  -- lr     : return address
    563   //  -- sp[...]: constructor arguments
    564   // -----------------------------------
    565 
    566   Isolate* isolate = masm->isolate();
    567 
    568   // Enter a construct frame.
    569   {
    570     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
    571 
    572     // Preserve the incoming parameters on the stack.
    573     __ SmiTag(r0);
    574     __ Push(cp, r0);
    575 
    576     if (create_implicit_receiver) {
    577       // Allocate the new receiver object.
    578       __ Push(r1, r3);
    579       __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    580               RelocInfo::CODE_TARGET);
    581       __ mov(r4, r0);
    582       __ Pop(r1, r3);
    583 
    584       // ----------- S t a t e -------------
    585       //  -- r1: constructor function
    586       //  -- r3: new target
    587       //  -- r4: newly allocated object
    588       // -----------------------------------
    589 
    590       // Retrieve smi-tagged arguments count from the stack.
    591       __ ldr(r0, MemOperand(sp));
    592     }
    593 
    594     __ SmiUntag(r0);
    595 
    596     if (create_implicit_receiver) {
    597       // Push the allocated receiver to the stack. We need two copies
    598       // because we may have to return the original one and the calling
    599       // conventions dictate that the called function pops the receiver.
    600       __ push(r4);
    601       __ push(r4);
    602     } else {
    603       __ PushRoot(Heap::kTheHoleValueRootIndex);
    604     }
    605 
    606     // Deoptimizer re-enters stub code here.
    607     __ bind(&post_instantiation_deopt_entry);
    608 
    609     // Set up pointer to last argument.
    610     __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    611 
    612     // Copy arguments and receiver to the expression stack.
    613     // r0: number of arguments
    614     // r1: constructor function
    615     // r2: address of last argument (caller sp)
    616     // r3: new target
    617     // r4: number of arguments (smi-tagged)
    618     // sp[0]: receiver
    619     // sp[1]: receiver
    620     // sp[2]: number of arguments (smi-tagged)
    621     Label loop, entry;
    622     __ SmiTag(r4, r0);
    623     __ b(&entry);
    624     __ bind(&loop);
    625     __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
    626     __ push(ip);
    627     __ bind(&entry);
    628     __ sub(r4, r4, Operand(2), SetCC);
    629     __ b(ge, &loop);
    630 
    631     // Call the function.
    632     // r0: number of arguments
    633     // r1: constructor function
    634     // r3: new target
    635     ParameterCount actual(r0);
    636     __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
    637                       CheckDebugStepCallWrapper());
    638 
    639     // Store offset of return address for deoptimizer.
    640     if (create_implicit_receiver && !is_api_function) {
    641       masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
    642           masm->pc_offset());
    643     }
    644 
    645     // Restore context from the frame.
    646     // r0: result
    647     // sp[0]: receiver
    648     // sp[1]: number of arguments (smi-tagged)
    649     __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    650 
    651     if (create_implicit_receiver) {
    652       // If the result is an object (in the ECMA sense), we should get rid
    653       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    654       // on page 74.
    655       Label use_receiver, exit;
    656 
    657       // If the result is a smi, it is *not* an object in the ECMA sense.
    658       // r0: result
    659       // sp[0]: receiver
    660       // sp[1]: number of arguments (smi-tagged)
    661       __ JumpIfSmi(r0, &use_receiver);
    662 
    663       // If the type of the result (stored in its map) is less than
    664       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    665       __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
    666       __ b(ge, &exit);
    667 
    668       // Throw away the result of the constructor invocation and use the
    669       // on-stack receiver as the result.
    670       __ bind(&use_receiver);
    671       __ ldr(r0, MemOperand(sp));
    672 
    673       // Remove receiver from the stack, remove caller arguments, and
    674       // return.
    675       __ bind(&exit);
    676       // r0: result
    677       // sp[0]: receiver (newly allocated object)
    678       // sp[1]: number of arguments (smi-tagged)
    679       __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
    680     } else {
    681       __ ldr(r1, MemOperand(sp));
    682     }
    683 
    684     // Leave construct frame.
    685   }
    686 
    687   // ES6 9.2.2. Step 13+
    688   // Check that the result is not a Smi, indicating that the constructor result
    689   // from a derived class is neither undefined nor an Object.
    690   if (check_derived_construct) {
    691     Label dont_throw;
    692     __ JumpIfNotSmi(r0, &dont_throw);
    693     {
    694       FrameScope scope(masm, StackFrame::INTERNAL);
    695       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    696     }
    697     __ bind(&dont_throw);
    698   }
    699 
    700   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
    701   __ add(sp, sp, Operand(kPointerSize));
    702   if (create_implicit_receiver) {
    703     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
    704   }
    705   __ Jump(lr);
    706 
    707   // Store offset of trampoline address for deoptimizer. This is the bailout
    708   // point after the receiver instantiation but before the function invocation.
    709   // We need to restore some registers in order to continue the above code.
    710   if (create_implicit_receiver && !is_api_function) {
    711     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
    712         masm->pc_offset());
    713 
    714     // ----------- S t a t e -------------
    715     //  -- r0    : newly allocated object
    716     //  -- sp[0] : constructor function
    717     // -----------------------------------
    718 
    719     __ pop(r1);
    720     __ push(r0);
    721     __ push(r0);
    722 
    723     // Retrieve smi-tagged arguments count from the stack.
    724     __ ldr(r0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
    725     __ SmiUntag(r0);
    726 
    727     // Retrieve the new target value from the stack. This was placed into the
    728     // frame description in place of the receiver by the optimizing compiler.
    729     __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    730     __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2));
    731 
    732     // Continue with constructor function invocation.
    733     __ b(&post_instantiation_deopt_entry);
    734   }
    735 }
    736 
    737 }  // namespace
    738 
    739 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    740   Generate_JSConstructStubHelper(masm, false, true, false);
    741 }
    742 
    743 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    744   Generate_JSConstructStubHelper(masm, true, false, false);
    745 }
    746 
    747 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    748   Generate_JSConstructStubHelper(masm, false, false, false);
    749 }
    750 
    751 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    752     MacroAssembler* masm) {
    753   Generate_JSConstructStubHelper(masm, false, false, true);
    754 }
    755 
    756 // static
    757 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    758   // ----------- S t a t e -------------
    759   //  -- r0 : the value to pass to the generator
    760   //  -- r1 : the JSGeneratorObject to resume
    761   //  -- r2 : the resume mode (tagged)
    762   //  -- lr : return address
    763   // -----------------------------------
    764   __ AssertGeneratorObject(r1);
    765 
    766   // Store input value into generator object.
    767   __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
    768   __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0, r3,
    769                       kLRHasNotBeenSaved, kDontSaveFPRegs);
    770 
    771   // Store resume mode into generator object.
    772   __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kResumeModeOffset));
    773 
    774   // Load suspended function and context.
    775   __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
    776   __ ldr(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
    777 
    778   // Flood function if we are stepping.
    779   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    780   Label stepping_prepared;
    781   ExternalReference debug_hook =
    782       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
    783   __ mov(ip, Operand(debug_hook));
    784   __ ldrsb(ip, MemOperand(ip));
    785   __ cmp(ip, Operand(0));
    786   __ b(ne, &prepare_step_in_if_stepping);
    787 
    788   // Flood function if we need to continue stepping in the suspended generator.
    789   ExternalReference debug_suspended_generator =
    790       ExternalReference::debug_suspended_generator_address(masm->isolate());
    791   __ mov(ip, Operand(debug_suspended_generator));
    792   __ ldr(ip, MemOperand(ip));
    793   __ cmp(ip, Operand(r1));
    794   __ b(eq, &prepare_step_in_suspended_generator);
    795   __ bind(&stepping_prepared);
    796 
    797   // Push receiver.
    798   __ ldr(ip, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
    799   __ Push(ip);
    800 
    801   // ----------- S t a t e -------------
    802   //  -- r1    : the JSGeneratorObject to resume
    803   //  -- r2    : the resume mode (tagged)
    804   //  -- r4    : generator function
    805   //  -- cp    : generator context
    806   //  -- lr    : return address
    807   //  -- sp[0] : generator receiver
    808   // -----------------------------------
    809 
    810   // Push holes for arguments to generator function. Since the parser forced
    811   // context allocation for any variables in generators, the actual argument
    812   // values have already been copied into the context and these dummy values
    813   // will never be used.
    814   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    815   __ ldr(r3,
    816          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
    817   {
    818     Label done_loop, loop;
    819     __ bind(&loop);
    820     __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
    821     __ b(mi, &done_loop);
    822     __ PushRoot(Heap::kTheHoleValueRootIndex);
    823     __ b(&loop);
    824     __ bind(&done_loop);
    825   }
    826 
    827   // Underlying function needs to have bytecode available.
    828   if (FLAG_debug_code) {
    829     __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    830     __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
    831     __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
    832     __ Assert(eq, kMissingBytecodeArray);
    833   }
    834 
    835   // Resume (Ignition/TurboFan) generator object.
    836   {
    837     __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    838     __ ldr(r0, FieldMemOperand(
    839                    r0, SharedFunctionInfo::kFormalParameterCountOffset));
    840     __ SmiUntag(r0);
    841     // We abuse new.target both to indicate that this is a resume call and to
    842     // pass in the generator object.  In ordinary calls, new.target is always
    843     // undefined because generator functions are non-constructable.
    844     __ Move(r3, r1);
    845     __ Move(r1, r4);
    846     __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
    847     __ Jump(r5);
    848   }
    849 
    850   __ bind(&prepare_step_in_if_stepping);
    851   {
    852     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    853     __ Push(r1, r2, r4);
    854     __ CallRuntime(Runtime::kDebugOnFunctionCall);
    855     __ Pop(r1, r2);
    856     __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
    857   }
    858   __ b(&stepping_prepared);
    859 
    860   __ bind(&prepare_step_in_suspended_generator);
    861   {
    862     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    863     __ Push(r1, r2);
    864     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    865     __ Pop(r1, r2);
    866     __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
    867   }
    868   __ b(&stepping_prepared);
    869 }
    870 
    871 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    872   FrameScope scope(masm, StackFrame::INTERNAL);
    873   __ push(r1);
    874   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    875 }
    876 
    877 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
    878 
    879 // Clobbers r2; preserves all other registers.
    880 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
    881                                         IsTagged argc_is_tagged) {
    882   // Check the stack for overflow. We are not trying to catch
    883   // interruptions (e.g. debug break and preemption) here, so the "real stack
    884   // limit" is checked.
    885   Label okay;
    886   __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
    887   // Make r2 the space we have left. The stack might already be overflowed
    888   // here which will cause r2 to become negative.
    889   __ sub(r2, sp, r2);
    890   // Check if the arguments will overflow the stack.
    891   if (argc_is_tagged == kArgcIsSmiTagged) {
    892     __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
    893   } else {
    894     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    895     __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
    896   }
    897   __ b(gt, &okay);  // Signed comparison.
    898 
    899   // Out of stack space.
    900   __ CallRuntime(Runtime::kThrowStackOverflow);
    901 
    902   __ bind(&okay);
    903 }
    904 
    905 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    906                                              bool is_construct) {
    907   // Called from Generate_JS_Entry
    908   // r0: new.target
    909   // r1: function
    910   // r2: receiver
    911   // r3: argc
    912   // r4: argv
    913   // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
    914   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    915 
    916   // Enter an internal frame.
    917   {
    918     FrameScope scope(masm, StackFrame::INTERNAL);
    919 
    920     // Setup the context (we need to use the caller context from the isolate).
    921     ExternalReference context_address(Isolate::kContextAddress,
    922                                       masm->isolate());
    923     __ mov(cp, Operand(context_address));
    924     __ ldr(cp, MemOperand(cp));
    925 
    926     __ InitializeRootRegister();
    927 
    928     // Push the function and the receiver onto the stack.
    929     __ Push(r1, r2);
    930 
    931     // Check if we have enough stack space to push all arguments.
    932     // Clobbers r2.
    933     Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
    934 
    935     // Remember new.target.
    936     __ mov(r5, r0);
    937 
    938     // Copy arguments to the stack in a loop.
    939     // r1: function
    940     // r3: argc
    941     // r4: argv, i.e. points to first arg
    942     Label loop, entry;
    943     __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
    944     // r2 points past last arg.
    945     __ b(&entry);
    946     __ bind(&loop);
    947     __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
    948     __ ldr(r0, MemOperand(r0));                           // dereference handle
    949     __ push(r0);                                          // push parameter
    950     __ bind(&entry);
    951     __ cmp(r4, r2);
    952     __ b(ne, &loop);
    953 
    954     // Setup new.target and argc.
    955     __ mov(r0, Operand(r3));
    956     __ mov(r3, Operand(r5));
    957 
    958     // Initialize all JavaScript callee-saved registers, since they will be seen
    959     // by the garbage collector as part of handlers.
    960     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
    961     __ mov(r5, Operand(r4));
    962     __ mov(r6, Operand(r4));
    963     if (!FLAG_enable_embedded_constant_pool) {
    964       __ mov(r8, Operand(r4));
    965     }
    966     if (kR9Available == 1) {
    967       __ mov(r9, Operand(r4));
    968     }
    969 
    970     // Invoke the code.
    971     Handle<Code> builtin = is_construct
    972                                ? masm->isolate()->builtins()->Construct()
    973                                : masm->isolate()->builtins()->Call();
    974     __ Call(builtin, RelocInfo::CODE_TARGET);
    975 
    976     // Exit the JS frame and remove the parameters (except function), and
    977     // return.
    978     // Respect ABI stack constraint.
    979   }
    980   __ Jump(lr);
    981 
    982   // r0: result
    983 }
    984 
    985 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    986   Generate_JSEntryTrampolineHelper(masm, false);
    987 }
    988 
    989 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    990   Generate_JSEntryTrampolineHelper(masm, true);
    991 }
    992 
    993 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
    994   Register args_count = scratch;
    995 
    996   // Get the arguments + receiver count.
    997   __ ldr(args_count,
    998          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    999   __ ldr(args_count,
   1000          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
   1001 
   1002   // Leave the frame (also dropping the register file).
   1003   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
   1004 
   1005   // Drop receiver + arguments.
   1006   __ add(sp, sp, args_count, LeaveCC);
   1007 }
   1008 
   1009 // Generate code for entering a JS function with the interpreter.
   1010 // On entry to the function the receiver and arguments have been pushed on the
   1011 // stack left to right.  The actual argument count matches the formal parameter
   1012 // count expected by the function.
   1013 //
   1014 // The live registers are:
   1015 //   o r1: the JS function object being called.
   1016 //   o r3: the new target
   1017 //   o cp: our context
   1018 //   o pp: the caller's constant pool pointer (if enabled)
   1019 //   o fp: the caller's frame pointer
   1020 //   o sp: stack pointer
   1021 //   o lr: return address
   1022 //
   1023 // The function builds an interpreter frame.  See InterpreterFrameConstants in
   1024 // frames.h for its layout.
   1025 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
   1026   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1027 
   1028   // Open a frame scope to indicate that there is a frame on the stack.  The
   1029   // MANUAL indicates that the scope shouldn't actually generate code to set up
   1030   // the frame (that is done below).
   1031   FrameScope frame_scope(masm, StackFrame::MANUAL);
   1032   __ PushStandardFrame(r1);
   1033 
   1034   // Get the bytecode array from the function object (or from the DebugInfo if
   1035   // it is present) and load it into kInterpreterBytecodeArrayRegister.
   1036   __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1037   Register debug_info = kInterpreterBytecodeArrayRegister;
   1038   DCHECK(!debug_info.is(r0));
   1039   __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
   1040   __ SmiTst(debug_info);
   1041   // Load original bytecode array or the debug copy.
   1042   __ ldr(kInterpreterBytecodeArrayRegister,
   1043          FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
   1044   __ ldr(kInterpreterBytecodeArrayRegister,
   1045          FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex), ne);
   1046 
   1047   // Check whether we should continue to use the interpreter.
   1048   Label switch_to_different_code_kind;
   1049   __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset));
   1050   __ cmp(r0, Operand(masm->CodeObject()));  // Self-reference to this code.
   1051   __ b(ne, &switch_to_different_code_kind);
   1052 
   1053   // Increment invocation count for the function.
   1054   __ ldr(r2, FieldMemOperand(r1, JSFunction::kFeedbackVectorOffset));
   1055   __ ldr(r2, FieldMemOperand(r2, Cell::kValueOffset));
   1056   __ ldr(r9, FieldMemOperand(
   1057                  r2, FeedbackVector::kInvocationCountIndex * kPointerSize +
   1058                          FeedbackVector::kHeaderSize));
   1059   __ add(r9, r9, Operand(Smi::FromInt(1)));
   1060   __ str(r9, FieldMemOperand(
   1061                  r2, FeedbackVector::kInvocationCountIndex * kPointerSize +
   1062                          FeedbackVector::kHeaderSize));
   1063 
   1064   // Check function data field is actually a BytecodeArray object.
   1065   if (FLAG_debug_code) {
   1066     __ SmiTst(kInterpreterBytecodeArrayRegister);
   1067     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1068     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
   1069                          BYTECODE_ARRAY_TYPE);
   1070     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1071   }
   1072 
   1073   // Reset code age.
   1074   __ mov(r9, Operand(BytecodeArray::kNoAgeBytecodeAge));
   1075   __ strb(r9, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1076                               BytecodeArray::kBytecodeAgeOffset));
   1077 
   1078   // Load the initial bytecode offset.
   1079   __ mov(kInterpreterBytecodeOffsetRegister,
   1080          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
   1081 
   1082   // Push new.target, bytecode array and Smi tagged bytecode array offset.
   1083   __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
   1084   __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
   1085 
   1086   // Allocate the local and temporary register file on the stack.
   1087   {
   1088     // Load frame size from the BytecodeArray object.
   1089     __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1090                                BytecodeArray::kFrameSizeOffset));
   1091 
   1092     // Do a stack check to ensure we don't go over the limit.
   1093     Label ok;
   1094     __ sub(r9, sp, Operand(r4));
   1095     __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
   1096     __ cmp(r9, Operand(r2));
   1097     __ b(hs, &ok);
   1098     __ CallRuntime(Runtime::kThrowStackOverflow);
   1099     __ bind(&ok);
   1100 
   1101     // If ok, push undefined as the initial value for all register file entries.
   1102     Label loop_header;
   1103     Label loop_check;
   1104     __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
   1105     __ b(&loop_check, al);
   1106     __ bind(&loop_header);
   1107     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
   1108     __ push(r9);
   1109     // Continue loop if not done.
   1110     __ bind(&loop_check);
   1111     __ sub(r4, r4, Operand(kPointerSize), SetCC);
   1112     __ b(&loop_header, ge);
   1113   }
   1114 
   1115   // Load accumulator and dispatch table into registers.
   1116   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
   1117   __ mov(kInterpreterDispatchTableRegister,
   1118          Operand(ExternalReference::interpreter_dispatch_table_address(
   1119              masm->isolate())));
   1120 
   1121   // Dispatch to the first bytecode handler for the function.
   1122   __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
   1123                          kInterpreterBytecodeOffsetRegister));
   1124   __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
   1125                         kPointerSizeLog2));
   1126   __ Call(ip);
   1127   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
   1128 
   1129   // The return value is in r0.
   1130   LeaveInterpreterFrame(masm, r2);
   1131   __ Jump(lr);
   1132 
   1133   // If the shared code is no longer this entry trampoline, then the underlying
   1134   // function has been switched to a different kind of code and we heal the
   1135   // closure by switching the code entry field over to the new code as well.
   1136   __ bind(&switch_to_different_code_kind);
   1137   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
   1138   __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1139   __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kCodeOffset));
   1140   __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1141   __ str(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   1142   __ RecordWriteCodeEntryField(r1, r4, r5);
   1143   __ Jump(r4);
   1144 }
   1145 
   1146 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
   1147                                         Register scratch,
   1148                                         Label* stack_overflow) {
   1149   // Check the stack for overflow. We are not trying to catch
   1150   // interruptions (e.g. debug break and preemption) here, so the "real stack
   1151   // limit" is checked.
   1152   __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
   1153   // Make scratch the space we have left. The stack might already be overflowed
   1154   // here which will cause scratch to become negative.
   1155   __ sub(scratch, sp, scratch);
   1156   // Check if the arguments will overflow the stack.
   1157   __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
   1158   __ b(le, stack_overflow);  // Signed comparison.
   1159 }
   1160 
   1161 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
   1162                                          Register num_args, Register index,
   1163                                          Register limit, Register scratch,
   1164                                          Label* stack_overflow) {
   1165   // Add a stack check before pushing arguments.
   1166   Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
   1167 
   1168   // Find the address of the last argument.
   1169   __ mov(limit, num_args);
   1170   __ mov(limit, Operand(limit, LSL, kPointerSizeLog2));
   1171   __ sub(limit, index, limit);
   1172 
   1173   Label loop_header, loop_check;
   1174   __ b(al, &loop_check);
   1175   __ bind(&loop_header);
   1176   __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
   1177   __ push(scratch);
   1178   __ bind(&loop_check);
   1179   __ cmp(index, limit);
   1180   __ b(gt, &loop_header);
   1181 }
   1182 
   1183 // static
   1184 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
   1185     MacroAssembler* masm, TailCallMode tail_call_mode,
   1186     InterpreterPushArgsMode mode) {
   1187   // ----------- S t a t e -------------
   1188   //  -- r0 : the number of arguments (not including the receiver)
   1189   //  -- r2 : the address of the first argument to be pushed. Subsequent
   1190   //          arguments should be consecutive above this, in the same order as
   1191   //          they are to be pushed onto the stack.
   1192   //  -- r1 : the target to call (can be any Object).
   1193   // -----------------------------------
   1194   Label stack_overflow;
   1195 
   1196   __ add(r3, r0, Operand(1));  // Add one for receiver.
   1197 
   1198   // Push the arguments. r2, r4, r5 will be modified.
   1199   Generate_InterpreterPushArgs(masm, r3, r2, r4, r5, &stack_overflow);
   1200 
   1201   // Call the target.
   1202   if (mode == InterpreterPushArgsMode::kJSFunction) {
   1203     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
   1204                                                       tail_call_mode),
   1205             RelocInfo::CODE_TARGET);
   1206   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
   1207     __ Jump(masm->isolate()->builtins()->CallWithSpread(),
   1208             RelocInfo::CODE_TARGET);
   1209   } else {
   1210     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   1211                                               tail_call_mode),
   1212             RelocInfo::CODE_TARGET);
   1213   }
   1214 
   1215   __ bind(&stack_overflow);
   1216   {
   1217     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1218     // Unreachable code.
   1219     __ bkpt(0);
   1220   }
   1221 }
   1222 
   1223 // static
   1224 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
   1225     MacroAssembler* masm, InterpreterPushArgsMode mode) {
   1226   // ----------- S t a t e -------------
   1227   // -- r0 : argument count (not including receiver)
   1228   // -- r3 : new target
   1229   // -- r1 : constructor to call
   1230   // -- r2 : allocation site feedback if available, undefined otherwise.
   1231   // -- r4 : address of the first argument
   1232   // -----------------------------------
   1233   Label stack_overflow;
   1234 
   1235   // Push a slot for the receiver to be constructed.
   1236   __ mov(ip, Operand::Zero());
   1237   __ push(ip);
   1238 
   1239   // Push the arguments. r5, r4, r6 will be modified.
   1240   Generate_InterpreterPushArgs(masm, r0, r4, r5, r6, &stack_overflow);
   1241 
   1242   __ AssertUndefinedOrAllocationSite(r2, r5);
   1243   if (mode == InterpreterPushArgsMode::kJSFunction) {
   1244     __ AssertFunction(r1);
   1245 
   1246     // Tail call to the function-specific construct stub (still in the caller
   1247     // context at this point).
   1248     __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1249     __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
   1250     // Jump to the construct function.
   1251     __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1252   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
   1253     // Call the constructor with r0, r1, and r3 unmodified.
   1254     __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
   1255             RelocInfo::CODE_TARGET);
   1256   } else {
   1257     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
   1258     // Call the constructor with r0, r1, and r3 unmodified.
   1259     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1260   }
   1261 
   1262   __ bind(&stack_overflow);
   1263   {
   1264     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1265     // Unreachable code.
   1266     __ bkpt(0);
   1267   }
   1268 }
   1269 
   1270 // static
   1271 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
   1272     MacroAssembler* masm) {
   1273   // ----------- S t a t e -------------
   1274   // -- r0 : argument count (not including receiver)
   1275   // -- r1 : target to call verified to be Array function
   1276   // -- r2 : allocation site feedback if available, undefined otherwise.
   1277   // -- r3 : address of the first argument
   1278   // -----------------------------------
   1279   Label stack_overflow;
   1280 
   1281   __ add(r4, r0, Operand(1));  // Add one for receiver.
   1282 
   1283   // TODO(mythria): Add a stack check before pushing arguments.
   1284   // Push the arguments. r3, r5, r6 will be modified.
   1285   Generate_InterpreterPushArgs(masm, r4, r3, r5, r6, &stack_overflow);
   1286 
   1287   // Array constructor expects constructor in r3. It is same as r1 here.
   1288   __ mov(r3, r1);
   1289 
   1290   ArrayConstructorStub stub(masm->isolate());
   1291   __ TailCallStub(&stub);
   1292 
   1293   __ bind(&stack_overflow);
   1294   {
   1295     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1296     // Unreachable code.
   1297     __ bkpt(0);
   1298   }
   1299 }
   1300 
   1301 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
   1302   // Set the return address to the correct point in the interpreter entry
   1303   // trampoline.
   1304   Smi* interpreter_entry_return_pc_offset(
   1305       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
   1306   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
   1307   __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline());
   1308   __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() +
   1309                          Code::kHeaderSize - kHeapObjectTag));
   1310 
   1311   // Initialize the dispatch table register.
   1312   __ mov(kInterpreterDispatchTableRegister,
   1313          Operand(ExternalReference::interpreter_dispatch_table_address(
   1314              masm->isolate())));
   1315 
   1316   // Get the bytecode array pointer from the frame.
   1317   __ ldr(kInterpreterBytecodeArrayRegister,
   1318          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1319 
   1320   if (FLAG_debug_code) {
   1321     // Check function data field is actually a BytecodeArray object.
   1322     __ SmiTst(kInterpreterBytecodeArrayRegister);
   1323     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1324     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
   1325                          BYTECODE_ARRAY_TYPE);
   1326     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1327   }
   1328 
   1329   // Get the target bytecode offset from the frame.
   1330   __ ldr(kInterpreterBytecodeOffsetRegister,
   1331          MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1332   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
   1333 
   1334   // Dispatch to the target bytecode.
   1335   __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
   1336                          kInterpreterBytecodeOffsetRegister));
   1337   __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
   1338                         kPointerSizeLog2));
   1339   __ mov(pc, ip);
   1340 }
   1341 
   1342 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
   1343   // Advance the current bytecode offset stored within the given interpreter
   1344   // stack frame. This simulates what all bytecode handlers do upon completion
   1345   // of the underlying operation.
   1346   __ ldr(r1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1347   __ ldr(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1348   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   1349   {
   1350     FrameScope scope(masm, StackFrame::INTERNAL);
   1351     __ Push(kInterpreterAccumulatorRegister, r1, r2);
   1352     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
   1353     __ mov(r2, r0);  // Result is the new bytecode offset.
   1354     __ Pop(kInterpreterAccumulatorRegister);
   1355   }
   1356   __ str(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1357 
   1358   Generate_InterpreterEnterBytecode(masm);
   1359 }
   1360 
   1361 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1362   Generate_InterpreterEnterBytecode(masm);
   1363 }
   1364 
   1365 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1366   // ----------- S t a t e -------------
   1367   //  -- r0 : argument count (preserved for callee)
   1368   //  -- r3 : new target (preserved for callee)
   1369   //  -- r1 : target function (preserved for callee)
   1370   // -----------------------------------
   1371   // First lookup code, maybe we don't need to compile!
   1372   Label gotta_call_runtime, gotta_call_runtime_no_stack;
   1373   Label try_shared;
   1374   Label loop_top, loop_bottom;
   1375 
   1376   Register argument_count = r0;
   1377   Register closure = r1;
   1378   Register new_target = r3;
   1379   Register map = argument_count;
   1380   Register index = r2;
   1381 
   1382   // Do we have a valid feedback vector?
   1383   __ ldr(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
   1384   __ ldr(index, FieldMemOperand(index, Cell::kValueOffset));
   1385   __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
   1386                 &gotta_call_runtime_no_stack);
   1387 
   1388   __ push(argument_count);
   1389   __ push(new_target);
   1390   __ push(closure);
   1391 
   1392   __ ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1393   __ ldr(map,
   1394          FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
   1395   __ ldr(index, FieldMemOperand(map, FixedArray::kLengthOffset));
   1396   __ cmp(index, Operand(Smi::FromInt(2)));
   1397   __ b(lt, &try_shared);
   1398 
   1399   // r3  : native context
   1400   // r2  : length / index
   1401   // r0  : optimized code map
   1402   // stack[0] : new target
   1403   // stack[4] : closure
   1404   Register native_context = r3;
   1405   __ ldr(native_context, NativeContextMemOperand());
   1406 
   1407   __ bind(&loop_top);
   1408   Register temp = r1;
   1409   Register array_pointer = r5;
   1410 
   1411   // Does the native context match?
   1412   __ add(array_pointer, map, Operand::PointerOffsetFromSmiKey(index));
   1413   __ ldr(temp, FieldMemOperand(array_pointer,
   1414                                SharedFunctionInfo::kOffsetToPreviousContext));
   1415   __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1416   __ cmp(temp, native_context);
   1417   __ b(ne, &loop_bottom);
   1418 
   1419   // Code available?
   1420   Register entry = r4;
   1421   __ ldr(entry,
   1422          FieldMemOperand(array_pointer,
   1423                          SharedFunctionInfo::kOffsetToPreviousCachedCode));
   1424   __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1425   __ JumpIfSmi(entry, &try_shared);
   1426 
   1427   // Found code. Get it into the closure and return.
   1428   __ pop(closure);
   1429   // Store code entry in the closure.
   1430   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1431   __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1432   __ RecordWriteCodeEntryField(closure, entry, r5);
   1433 
   1434   // Link the closure into the optimized function list.
   1435   // r4 : code entry
   1436   // r3 : native context
   1437   // r1 : closure
   1438   __ ldr(r5,
   1439          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1440   __ str(r5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
   1441   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r5, r0,
   1442                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1443                       OMIT_SMI_CHECK);
   1444   const int function_list_offset =
   1445       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1446   __ str(closure,
   1447          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1448   // Save closure before the write barrier.
   1449   __ mov(r5, closure);
   1450   __ RecordWriteContextSlot(native_context, function_list_offset, closure, r0,
   1451                             kLRHasNotBeenSaved, kDontSaveFPRegs);
   1452   __ mov(closure, r5);
   1453   __ pop(new_target);
   1454   __ pop(argument_count);
   1455   __ Jump(entry);
   1456 
   1457   __ bind(&loop_bottom);
   1458   __ sub(index, index, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
   1459   __ cmp(index, Operand(Smi::FromInt(1)));
   1460   __ b(gt, &loop_top);
   1461 
   1462   // We found no code.
   1463   __ bind(&try_shared);
   1464   __ pop(closure);
   1465   __ pop(new_target);
   1466   __ pop(argument_count);
   1467   __ ldr(entry,
   1468          FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1469   // Is the shared function marked for tier up?
   1470   __ ldrb(r5, FieldMemOperand(entry,
   1471                               SharedFunctionInfo::kMarkedForTierUpByteOffset));
   1472   __ tst(r5, Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
   1473   __ b(ne, &gotta_call_runtime_no_stack);
   1474 
   1475   // If SFI points to anything other than CompileLazy, install that.
   1476   __ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
   1477   __ Move(r5, masm->CodeObject());
   1478   __ cmp(entry, r5);
   1479   __ b(eq, &gotta_call_runtime_no_stack);
   1480 
   1481   // Install the SFI's code entry.
   1482   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1483   __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1484   __ RecordWriteCodeEntryField(closure, entry, r5);
   1485   __ Jump(entry);
   1486 
   1487   __ bind(&gotta_call_runtime);
   1488   __ pop(closure);
   1489   __ pop(new_target);
   1490   __ pop(argument_count);
   1491   __ bind(&gotta_call_runtime_no_stack);
   1492   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1493 }
   1494 
   1495 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1496   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1497 }
   1498 
   1499 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1500   GenerateTailCallToReturnedCode(masm,
   1501                                  Runtime::kCompileOptimized_NotConcurrent);
   1502 }
   1503 
   1504 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1505   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1506 }
   1507 
   1508 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
   1509   // ----------- S t a t e -------------
   1510   //  -- r0 : argument count (preserved for callee)
   1511   //  -- r1 : new target (preserved for callee)
   1512   //  -- r3 : target function (preserved for callee)
   1513   // -----------------------------------
   1514   Label failed;
   1515   {
   1516     FrameScope scope(masm, StackFrame::INTERNAL);
   1517     // Preserve argument count for later compare.
   1518     __ Move(r4, r0);
   1519     // Push the number of arguments to the callee.
   1520     __ SmiTag(r0);
   1521     __ push(r0);
   1522     // Push a copy of the target function and the new target.
   1523     __ push(r1);
   1524     __ push(r3);
   1525 
   1526     // The function.
   1527     __ push(r1);
   1528     // Copy arguments from caller (stdlib, foreign, heap).
   1529     Label args_done;
   1530     for (int j = 0; j < 4; ++j) {
   1531       Label over;
   1532       if (j < 3) {
   1533         __ cmp(r4, Operand(j));
   1534         __ b(ne, &over);
   1535       }
   1536       for (int i = j - 1; i >= 0; --i) {
   1537         __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
   1538                                       i * kPointerSize));
   1539         __ push(r4);
   1540       }
   1541       for (int i = 0; i < 3 - j; ++i) {
   1542         __ PushRoot(Heap::kUndefinedValueRootIndex);
   1543       }
   1544       if (j < 3) {
   1545         __ jmp(&args_done);
   1546         __ bind(&over);
   1547       }
   1548     }
   1549     __ bind(&args_done);
   1550 
   1551     // Call runtime, on success unwind frame, and parent frame.
   1552     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
   1553     // A smi 0 is returned on failure, an object on success.
   1554     __ JumpIfSmi(r0, &failed);
   1555 
   1556     __ Drop(2);
   1557     __ pop(r4);
   1558     __ SmiUntag(r4);
   1559     scope.GenerateLeaveFrame();
   1560 
   1561     __ add(r4, r4, Operand(1));
   1562     __ Drop(r4);
   1563     __ Ret();
   1564 
   1565     __ bind(&failed);
   1566     // Restore target function and new target.
   1567     __ pop(r3);
   1568     __ pop(r1);
   1569     __ pop(r0);
   1570     __ SmiUntag(r0);
   1571   }
   1572   // On failure, tail call back to regular js.
   1573   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1574 }
   1575 
   1576 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1577   // For now, we are relying on the fact that make_code_young doesn't do any
   1578   // garbage collection which allows us to save/restore the registers without
   1579   // worrying about which of them contain pointers. We also don't build an
   1580   // internal frame to make the code faster, since we shouldn't have to do stack
   1581   // crawls in MakeCodeYoung. This seems a bit fragile.
   1582 
   1583   // The following registers must be saved and restored when calling through to
   1584   // the runtime:
   1585   //   r0 - contains return address (beginning of patch sequence)
   1586   //   r1 - isolate
   1587   //   r3 - new target
   1588   FrameScope scope(masm, StackFrame::MANUAL);
   1589   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1590   __ PrepareCallCFunction(2, 0, r2);
   1591   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1592   __ CallCFunction(
   1593       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1594   __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1595   __ mov(pc, r0);
   1596 }
   1597 
   1598 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                              \
   1599   void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
   1600     GenerateMakeCodeYoungAgainCommon(masm);                               \
   1601   }
   1602 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1603 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1604 
   1605 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1606   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1607   // that make_code_young doesn't do any garbage collection which allows us to
   1608   // save/restore the registers without worrying about which of them contain
   1609   // pointers.
   1610 
   1611   // The following registers must be saved and restored when calling through to
   1612   // the runtime:
   1613   //   r0 - contains return address (beginning of patch sequence)
   1614   //   r1 - isolate
   1615   //   r3 - new target
   1616   FrameScope scope(masm, StackFrame::MANUAL);
   1617   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1618   __ PrepareCallCFunction(2, 0, r2);
   1619   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1620   __ CallCFunction(
   1621       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
   1622       2);
   1623   __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1624 
   1625   // Perform prologue operations usually performed by the young code stub.
   1626   __ PushStandardFrame(r1);
   1627 
   1628   // Jump to point after the code-age stub.
   1629   __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
   1630   __ mov(pc, r0);
   1631 }
   1632 
   1633 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1634   GenerateMakeCodeYoungAgainCommon(masm);
   1635 }
   1636 
   1637 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1638   Generate_MarkCodeAsExecutedOnce(masm);
   1639 }
   1640 
   1641 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1642                                              SaveFPRegsMode save_doubles) {
   1643   {
   1644     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1645 
   1646     // Preserve registers across notification, this is important for compiled
   1647     // stubs that tail call the runtime on deopts passing their parameters in
   1648     // registers.
   1649     __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
   1650     // Pass the function and deoptimization type to the runtime system.
   1651     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1652     __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
   1653   }
   1654 
   1655   __ add(sp, sp, Operand(kPointerSize));  // Ignore state
   1656   __ mov(pc, lr);                         // Jump to miss handler
   1657 }
   1658 
   1659 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1660   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1661 }
   1662 
   1663 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1664   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1665 }
   1666 
   1667 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1668                                              Deoptimizer::BailoutType type) {
   1669   {
   1670     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1671     // Pass the function and deoptimization type to the runtime system.
   1672     __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
   1673     __ push(r0);
   1674     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1675   }
   1676 
   1677   // Get the full codegen state from the stack and untag it -> r6.
   1678   __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
   1679   __ SmiUntag(r6);
   1680   // Switch on the state.
   1681   Label with_tos_register, unknown_state;
   1682   __ cmp(r6,
   1683          Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
   1684   __ b(ne, &with_tos_register);
   1685   __ add(sp, sp, Operand(1 * kPointerSize));  // Remove state.
   1686   __ Ret();
   1687 
   1688   __ bind(&with_tos_register);
   1689   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
   1690   __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
   1691   __ cmp(r6,
   1692          Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
   1693   __ b(ne, &unknown_state);
   1694   __ add(sp, sp, Operand(2 * kPointerSize));  // Remove state.
   1695   __ Ret();
   1696 
   1697   __ bind(&unknown_state);
   1698   __ stop("no cases left");
   1699 }
   1700 
   1701 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1702   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1703 }
   1704 
   1705 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1706   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1707 }
   1708 
   1709 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1710   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1711 }
   1712 
   1713 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   1714                                     Register function_template_info,
   1715                                     Register scratch0, Register scratch1,
   1716                                     Register scratch2,
   1717                                     Label* receiver_check_failed) {
   1718   Register signature = scratch0;
   1719   Register map = scratch1;
   1720   Register constructor = scratch2;
   1721 
   1722   // If there is no signature, return the holder.
   1723   __ ldr(signature, FieldMemOperand(function_template_info,
   1724                                     FunctionTemplateInfo::kSignatureOffset));
   1725   __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
   1726   Label receiver_check_passed;
   1727   __ b(eq, &receiver_check_passed);
   1728 
   1729   // Walk the prototype chain.
   1730   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1731   Label prototype_loop_start;
   1732   __ bind(&prototype_loop_start);
   1733 
   1734   // Get the constructor, if any.
   1735   __ GetMapConstructor(constructor, map, ip, ip);
   1736   __ cmp(ip, Operand(JS_FUNCTION_TYPE));
   1737   Label next_prototype;
   1738   __ b(ne, &next_prototype);
   1739   Register type = constructor;
   1740   __ ldr(type,
   1741          FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   1742   __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   1743 
   1744   // Loop through the chain of inheriting function templates.
   1745   Label function_template_loop;
   1746   __ bind(&function_template_loop);
   1747 
   1748   // If the signatures match, we have a compatible receiver.
   1749   __ cmp(signature, type);
   1750   __ b(eq, &receiver_check_passed);
   1751 
   1752   // If the current type is not a FunctionTemplateInfo, load the next prototype
   1753   // in the chain.
   1754   __ JumpIfSmi(type, &next_prototype);
   1755   __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
   1756 
   1757   // Otherwise load the parent function template and iterate.
   1758   __ ldr(type,
   1759          FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
   1760          eq);
   1761   __ b(&function_template_loop, eq);
   1762 
   1763   // Load the next prototype.
   1764   __ bind(&next_prototype);
   1765   __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
   1766   __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
   1767   __ b(eq, receiver_check_failed);
   1768   __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
   1769   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1770   // Iterate.
   1771   __ b(&prototype_loop_start);
   1772 
   1773   __ bind(&receiver_check_passed);
   1774 }
   1775 
   1776 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   1777   // ----------- S t a t e -------------
   1778   //  -- r0                 : number of arguments excluding receiver
   1779   //  -- r1                 : callee
   1780   //  -- lr                 : return address
   1781   //  -- sp[0]              : last argument
   1782   //  -- ...
   1783   //  -- sp[4 * (argc - 1)] : first argument
   1784   //  -- sp[4 * argc]       : receiver
   1785   // -----------------------------------
   1786 
   1787   // Load the FunctionTemplateInfo.
   1788   __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1789   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
   1790 
   1791   // Do the compatible receiver check.
   1792   Label receiver_check_failed;
   1793   __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   1794   CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
   1795 
   1796   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   1797   // beginning of the code.
   1798   __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
   1799   __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
   1800   __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1801   __ Jump(r4);
   1802 
   1803   // Compatible receiver check failed: throw an Illegal Invocation exception.
   1804   __ bind(&receiver_check_failed);
   1805   // Drop the arguments (including the receiver)
   1806   __ add(r0, r0, Operand(1));
   1807   __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   1808   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   1809 }
   1810 
   1811 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
   1812                                               bool has_handler_frame) {
   1813   // Lookup the function in the JavaScript frame.
   1814   if (has_handler_frame) {
   1815     __ ldr(r0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   1816     __ ldr(r0, MemOperand(r0, JavaScriptFrameConstants::kFunctionOffset));
   1817   } else {
   1818     __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1819   }
   1820 
   1821   {
   1822     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1823     // Pass function as argument.
   1824     __ push(r0);
   1825     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1826   }
   1827 
   1828   // If the code object is null, just return to the caller.
   1829   Label skip;
   1830   __ cmp(r0, Operand(Smi::kZero));
   1831   __ b(ne, &skip);
   1832   __ Ret();
   1833 
   1834   __ bind(&skip);
   1835 
   1836   // Drop any potential handler frame that is be sitting on top of the actual
   1837   // JavaScript frame. This is the case then OSR is triggered from bytecode.
   1838   if (has_handler_frame) {
   1839     __ LeaveFrame(StackFrame::STUB);
   1840   }
   1841 
   1842   // Load deoptimization data from the code object.
   1843   // <deopt_data> = <code>[#deoptimization_data_offset]
   1844   __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
   1845 
   1846   {
   1847     ConstantPoolUnavailableScope constant_pool_unavailable(masm);
   1848     __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
   1849 
   1850     if (FLAG_enable_embedded_constant_pool) {
   1851       __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
   1852     }
   1853 
   1854     // Load the OSR entrypoint offset from the deoptimization data.
   1855     // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1856     __ ldr(r1, FieldMemOperand(
   1857                    r1, FixedArray::OffsetOfElementAt(
   1858                            DeoptimizationInputData::kOsrPcOffsetIndex)));
   1859 
   1860     // Compute the target address = code start + osr_offset
   1861     __ add(lr, r0, Operand::SmiUntag(r1));
   1862 
   1863     // And "return" to the OSR entry point of the function.
   1864     __ Ret();
   1865   }
   1866 }
   1867 
   1868 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1869   Generate_OnStackReplacementHelper(masm, false);
   1870 }
   1871 
   1872 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
   1873   Generate_OnStackReplacementHelper(masm, true);
   1874 }
   1875 
   1876 // static
   1877 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1878   // ----------- S t a t e -------------
   1879   //  -- r0    : argc
   1880   //  -- sp[0] : argArray
   1881   //  -- sp[4] : thisArg
   1882   //  -- sp[8] : receiver
   1883   // -----------------------------------
   1884 
   1885   // 1. Load receiver into r1, argArray into r0 (if present), remove all
   1886   // arguments from the stack (including the receiver), and push thisArg (if
   1887   // present) instead.
   1888   {
   1889     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   1890     __ mov(r3, r2);
   1891     __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));  // receiver
   1892     __ sub(r4, r0, Operand(1), SetCC);
   1893     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // thisArg
   1894     __ sub(r4, r4, Operand(1), SetCC, ge);
   1895     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argArray
   1896     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   1897     __ str(r2, MemOperand(sp, 0));
   1898     __ mov(r0, r3);
   1899   }
   1900 
   1901   // ----------- S t a t e -------------
   1902   //  -- r0    : argArray
   1903   //  -- r1    : receiver
   1904   //  -- sp[0] : thisArg
   1905   // -----------------------------------
   1906 
   1907   // 2. Make sure the receiver is actually callable.
   1908   Label receiver_not_callable;
   1909   __ JumpIfSmi(r1, &receiver_not_callable);
   1910   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   1911   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   1912   __ tst(r4, Operand(1 << Map::kIsCallable));
   1913   __ b(eq, &receiver_not_callable);
   1914 
   1915   // 3. Tail call with no arguments if argArray is null or undefined.
   1916   Label no_arguments;
   1917   __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
   1918   __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
   1919 
   1920   // 4a. Apply the receiver to the given argArray (passing undefined for
   1921   // new.target).
   1922   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   1923   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1924 
   1925   // 4b. The argArray is either null or undefined, so we tail call without any
   1926   // arguments to the receiver.
   1927   __ bind(&no_arguments);
   1928   {
   1929     __ mov(r0, Operand(0));
   1930     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1931   }
   1932 
   1933   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1934   __ bind(&receiver_not_callable);
   1935   {
   1936     __ str(r1, MemOperand(sp, 0));
   1937     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1938   }
   1939 }
   1940 
   1941 // static
   1942 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1943   // 1. Make sure we have at least one argument.
   1944   // r0: actual number of arguments
   1945   {
   1946     Label done;
   1947     __ cmp(r0, Operand::Zero());
   1948     __ b(ne, &done);
   1949     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1950     __ add(r0, r0, Operand(1));
   1951     __ bind(&done);
   1952   }
   1953 
   1954   // 2. Get the callable to call (passed as receiver) from the stack.
   1955   // r0: actual number of arguments
   1956   __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   1957 
   1958   // 3. Shift arguments and return address one slot down on the stack
   1959   //    (overwriting the original receiver).  Adjust argument count to make
   1960   //    the original first argument the new receiver.
   1961   // r0: actual number of arguments
   1962   // r1: callable
   1963   {
   1964     Label loop;
   1965     // Calculate the copy start address (destination). Copy end address is sp.
   1966     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
   1967 
   1968     __ bind(&loop);
   1969     __ ldr(ip, MemOperand(r2, -kPointerSize));
   1970     __ str(ip, MemOperand(r2));
   1971     __ sub(r2, r2, Operand(kPointerSize));
   1972     __ cmp(r2, sp);
   1973     __ b(ne, &loop);
   1974     // Adjust the actual number of arguments and remove the top element
   1975     // (which is a copy of the last argument).
   1976     __ sub(r0, r0, Operand(1));
   1977     __ pop();
   1978   }
   1979 
   1980   // 4. Call the callable.
   1981   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1982 }
   1983 
   1984 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1985   // ----------- S t a t e -------------
   1986   //  -- r0     : argc
   1987   //  -- sp[0]  : argumentsList
   1988   //  -- sp[4]  : thisArgument
   1989   //  -- sp[8]  : target
   1990   //  -- sp[12] : receiver
   1991   // -----------------------------------
   1992 
   1993   // 1. Load target into r1 (if present), argumentsList into r0 (if present),
   1994   // remove all arguments from the stack (including the receiver), and push
   1995   // thisArgument (if present) instead.
   1996   {
   1997     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   1998     __ mov(r2, r1);
   1999     __ mov(r3, r1);
   2000     __ sub(r4, r0, Operand(1), SetCC);
   2001     __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // target
   2002     __ sub(r4, r4, Operand(1), SetCC, ge);
   2003     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // thisArgument
   2004     __ sub(r4, r4, Operand(1), SetCC, ge);
   2005     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argumentsList
   2006     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   2007     __ str(r2, MemOperand(sp, 0));
   2008     __ mov(r0, r3);
   2009   }
   2010 
   2011   // ----------- S t a t e -------------
   2012   //  -- r0    : argumentsList
   2013   //  -- r1    : target
   2014   //  -- sp[0] : thisArgument
   2015   // -----------------------------------
   2016 
   2017   // 2. Make sure the target is actually callable.
   2018   Label target_not_callable;
   2019   __ JumpIfSmi(r1, &target_not_callable);
   2020   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   2021   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   2022   __ tst(r4, Operand(1 << Map::kIsCallable));
   2023   __ b(eq, &target_not_callable);
   2024 
   2025   // 3a. Apply the target to the given argumentsList (passing undefined for
   2026   // new.target).
   2027   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   2028   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   2029 
   2030   // 3b. The target is not callable, throw an appropriate TypeError.
   2031   __ bind(&target_not_callable);
   2032   {
   2033     __ str(r1, MemOperand(sp, 0));
   2034     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   2035   }
   2036 }
   2037 
   2038 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   2039   // ----------- S t a t e -------------
   2040   //  -- r0     : argc
   2041   //  -- sp[0]  : new.target (optional)
   2042   //  -- sp[4]  : argumentsList
   2043   //  -- sp[8]  : target
   2044   //  -- sp[12] : receiver
   2045   // -----------------------------------
   2046 
   2047   // 1. Load target into r1 (if present), argumentsList into r0 (if present),
   2048   // new.target into r3 (if present, otherwise use target), remove all
   2049   // arguments from the stack (including the receiver), and push thisArgument
   2050   // (if present) instead.
   2051   {
   2052     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   2053     __ mov(r2, r1);
   2054     __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));  // receiver
   2055     __ sub(r4, r0, Operand(1), SetCC);
   2056     __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // target
   2057     __ mov(r3, r1);  // new.target defaults to target
   2058     __ sub(r4, r4, Operand(1), SetCC, ge);
   2059     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argumentsList
   2060     __ sub(r4, r4, Operand(1), SetCC, ge);
   2061     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // new.target
   2062     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   2063     __ mov(r0, r2);
   2064   }
   2065 
   2066   // ----------- S t a t e -------------
   2067   //  -- r0    : argumentsList
   2068   //  -- r3    : new.target
   2069   //  -- r1    : target
   2070   //  -- sp[0] : receiver (undefined)
   2071   // -----------------------------------
   2072 
   2073   // 2. Make sure the target is actually a constructor.
   2074   Label target_not_constructor;
   2075   __ JumpIfSmi(r1, &target_not_constructor);
   2076   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   2077   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   2078   __ tst(r4, Operand(1 << Map::kIsConstructor));
   2079   __ b(eq, &target_not_constructor);
   2080 
   2081   // 3. Make sure the target is actually a constructor.
   2082   Label new_target_not_constructor;
   2083   __ JumpIfSmi(r3, &new_target_not_constructor);
   2084   __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
   2085   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   2086   __ tst(r4, Operand(1 << Map::kIsConstructor));
   2087   __ b(eq, &new_target_not_constructor);
   2088 
   2089   // 4a. Construct the target with the given new.target and argumentsList.
   2090   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   2091 
   2092   // 4b. The target is not a constructor, throw an appropriate TypeError.
   2093   __ bind(&target_not_constructor);
   2094   {
   2095     __ str(r1, MemOperand(sp, 0));
   2096     __ TailCallRuntime(Runtime::kThrowNotConstructor);
   2097   }
   2098 
   2099   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   2100   __ bind(&new_target_not_constructor);
   2101   {
   2102     __ str(r3, MemOperand(sp, 0));
   2103     __ TailCallRuntime(Runtime::kThrowNotConstructor);
   2104   }
   2105 }
   2106 
   2107 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2108   __ SmiTag(r0);
   2109   __ mov(r4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   2110   __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
   2111                        (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
   2112                        fp.bit() | lr.bit());
   2113   __ add(fp, sp,
   2114          Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
   2115 }
   2116 
   2117 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2118   // ----------- S t a t e -------------
   2119   //  -- r0 : result being passed through
   2120   // -----------------------------------
   2121   // Get the number of arguments passed (as a smi), tear down the frame and
   2122   // then tear down the parameters.
   2123   __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   2124                               kPointerSize)));
   2125 
   2126   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
   2127   __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
   2128   __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
   2129 }
   2130 
   2131 // static
   2132 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2133   // ----------- S t a t e -------------
   2134   //  -- r0    : argumentsList
   2135   //  -- r1    : target
   2136   //  -- r3    : new.target (checked to be constructor or undefined)
   2137   //  -- sp[0] : thisArgument
   2138   // -----------------------------------
   2139 
   2140   // Create the list of arguments from the array-like argumentsList.
   2141   {
   2142     Label create_arguments, create_array, create_holey_array, create_runtime,
   2143         done_create;
   2144     __ JumpIfSmi(r0, &create_runtime);
   2145 
   2146     // Load the map of argumentsList into r2.
   2147     __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
   2148 
   2149     // Load native context into r4.
   2150     __ ldr(r4, NativeContextMemOperand());
   2151 
   2152     // Check if argumentsList is an (unmodified) arguments object.
   2153     __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2154     __ cmp(ip, r2);
   2155     __ b(eq, &create_arguments);
   2156     __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
   2157     __ cmp(ip, r2);
   2158     __ b(eq, &create_arguments);
   2159 
   2160     // Check if argumentsList is a fast JSArray.
   2161     __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
   2162     __ b(eq, &create_array);
   2163 
   2164     // Ask the runtime to create the list (actually a FixedArray).
   2165     __ bind(&create_runtime);
   2166     {
   2167       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2168       __ Push(r1, r3, r0);
   2169       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2170       __ Pop(r1, r3);
   2171       __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
   2172       __ SmiUntag(r2);
   2173     }
   2174     __ jmp(&done_create);
   2175 
   2176     // Try to create the list from an arguments object.
   2177     __ bind(&create_arguments);
   2178     __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
   2179     __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
   2180     __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
   2181     __ cmp(r2, ip);
   2182     __ b(ne, &create_runtime);
   2183     __ SmiUntag(r2);
   2184     __ mov(r0, r4);
   2185     __ b(&done_create);
   2186 
   2187     // For holey JSArrays we need to check that the array prototype chain
   2188     // protector is intact and our prototype is the Array.prototype actually.
   2189     __ bind(&create_holey_array);
   2190     __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
   2191     __ ldr(r4, ContextMemOperand(r4, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
   2192     __ cmp(r2, r4);
   2193     __ b(ne, &create_runtime);
   2194     __ LoadRoot(r4, Heap::kArrayProtectorRootIndex);
   2195     __ ldr(r2, FieldMemOperand(r4, PropertyCell::kValueOffset));
   2196     __ cmp(r2, Operand(Smi::FromInt(Isolate::kProtectorValid)));
   2197     __ b(ne, &create_runtime);
   2198     __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
   2199     __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
   2200     __ SmiUntag(r2);
   2201     __ b(&done_create);
   2202 
   2203     // Try to create the list from a JSArray object.
   2204     //  -- r2 and r4 must be preserved till bne create_holey_array.
   2205     __ bind(&create_array);
   2206     __ ldr(r5, FieldMemOperand(r2, Map::kBitField2Offset));
   2207     __ DecodeField<Map::ElementsKindBits>(r5);
   2208     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2209     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   2210     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2211     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
   2212     __ cmp(r5, Operand(FAST_HOLEY_ELEMENTS));
   2213     __ b(hi, &create_runtime);
   2214     // Only FAST_XXX after this point, FAST_HOLEY_XXX are odd values.
   2215     __ tst(r5, Operand(1));
   2216     __ b(ne, &create_holey_array);
   2217     // FAST_SMI_ELEMENTS or FAST_ELEMENTS after this point.
   2218     __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
   2219     __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
   2220     __ SmiUntag(r2);
   2221 
   2222     __ bind(&done_create);
   2223   }
   2224 
   2225   // Check for stack overflow.
   2226   {
   2227     // Check the stack for overflow. We are not trying to catch interruptions
   2228     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2229     Label done;
   2230     __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
   2231     // Make ip the space we have left. The stack might already be overflowed
   2232     // here which will cause ip to become negative.
   2233     __ sub(ip, sp, ip);
   2234     // Check if the arguments will overflow the stack.
   2235     __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
   2236     __ b(gt, &done);  // Signed comparison.
   2237     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2238     __ bind(&done);
   2239   }
   2240 
   2241   // ----------- S t a t e -------------
   2242   //  -- r1    : target
   2243   //  -- r0    : args (a FixedArray built from argumentsList)
   2244   //  -- r2    : len (number of elements to push from args)
   2245   //  -- r3    : new.target (checked to be constructor or undefined)
   2246   //  -- sp[0] : thisArgument
   2247   // -----------------------------------
   2248 
   2249   // Push arguments onto the stack (thisArgument is already on the stack).
   2250   {
   2251     __ mov(r4, Operand(0));
   2252     __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
   2253     __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
   2254     Label done, loop;
   2255     __ bind(&loop);
   2256     __ cmp(r4, r2);
   2257     __ b(eq, &done);
   2258     __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
   2259     __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
   2260     __ cmp(r5, ip);
   2261     __ mov(ip, r6, LeaveCC, eq);
   2262     __ Push(ip);
   2263     __ add(r4, r4, Operand(1));
   2264     __ b(&loop);
   2265     __ bind(&done);
   2266     __ Move(r0, r4);
   2267   }
   2268 
   2269   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2270   {
   2271     __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
   2272     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
   2273     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2274   }
   2275 }
   2276 
   2277 // static
   2278 void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
   2279                                            Handle<Code> code) {
   2280   // ----------- S t a t e -------------
   2281   //  -- r1    : the target to call (can be any Object)
   2282   //  -- r2    : start index (to support rest parameters)
   2283   //  -- lr    : return address.
   2284   //  -- sp[0] : thisArgument
   2285   // -----------------------------------
   2286 
   2287   // Check if we have an arguments adaptor frame below the function frame.
   2288   Label arguments_adaptor, arguments_done;
   2289   __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2290   __ ldr(ip, MemOperand(r3, CommonFrameConstants::kContextOrFrameTypeOffset));
   2291   __ cmp(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   2292   __ b(eq, &arguments_adaptor);
   2293   {
   2294     __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   2295     __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
   2296     __ ldr(r0, FieldMemOperand(
   2297                    r0, SharedFunctionInfo::kFormalParameterCountOffset));
   2298     __ mov(r3, fp);
   2299   }
   2300   __ b(&arguments_done);
   2301   __ bind(&arguments_adaptor);
   2302   {
   2303     // Load the length from the ArgumentsAdaptorFrame.
   2304     __ ldr(r0, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2305   }
   2306   __ bind(&arguments_done);
   2307 
   2308   Label stack_empty, stack_done, stack_overflow;
   2309   __ SmiUntag(r0);
   2310   __ sub(r0, r0, r2, SetCC);
   2311   __ b(le, &stack_empty);
   2312   {
   2313     // Check for stack overflow.
   2314     Generate_StackOverflowCheck(masm, r0, r2, &stack_overflow);
   2315 
   2316     // Forward the arguments from the caller frame.
   2317     {
   2318       Label loop;
   2319       __ add(r3, r3, Operand(kPointerSize));
   2320       __ mov(r2, r0);
   2321       __ bind(&loop);
   2322       {
   2323         __ ldr(ip, MemOperand(r3, r2, LSL, kPointerSizeLog2));
   2324         __ push(ip);
   2325         __ sub(r2, r2, Operand(1), SetCC);
   2326         __ b(ne, &loop);
   2327       }
   2328     }
   2329   }
   2330   __ b(&stack_done);
   2331   __ bind(&stack_overflow);
   2332   __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2333   __ bind(&stack_empty);
   2334   {
   2335     // We just pass the receiver, which is already on the stack.
   2336     __ mov(r0, Operand(0));
   2337   }
   2338   __ bind(&stack_done);
   2339 
   2340   __ Jump(code, RelocInfo::CODE_TARGET);
   2341 }
   2342 
   2343 namespace {
   2344 
   2345 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2346 // present) preserving all the arguments prepared for current call.
   2347 // Does nothing if debugger is currently active.
   2348 // ES6 14.6.3. PrepareForTailCall
   2349 //
   2350 // Stack structure for the function g() tail calling f():
   2351 //
   2352 // ------- Caller frame: -------
   2353 // |  ...
   2354 // |  g()'s arg M
   2355 // |  ...
   2356 // |  g()'s arg 1
   2357 // |  g()'s receiver arg
   2358 // |  g()'s caller pc
   2359 // ------- g()'s frame: -------
   2360 // |  g()'s caller fp      <- fp
   2361 // |  g()'s context
   2362 // |  function pointer: g
   2363 // |  -------------------------
   2364 // |  ...
   2365 // |  ...
   2366 // |  f()'s arg N
   2367 // |  ...
   2368 // |  f()'s arg 1
   2369 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
   2370 // ----------------------
   2371 //
   2372 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2373                         Register scratch1, Register scratch2,
   2374                         Register scratch3) {
   2375   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2376   Comment cmnt(masm, "[ PrepareForTailCall");
   2377 
   2378   // Prepare for tail call only if ES2015 tail call elimination is enabled.
   2379   Label done;
   2380   ExternalReference is_tail_call_elimination_enabled =
   2381       ExternalReference::is_tail_call_elimination_enabled_address(
   2382           masm->isolate());
   2383   __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
   2384   __ ldrb(scratch1, MemOperand(scratch1));
   2385   __ cmp(scratch1, Operand(0));
   2386   __ b(eq, &done);
   2387 
   2388   // Drop possible interpreter handler/stub frame.
   2389   {
   2390     Label no_interpreter_frame;
   2391     __ ldr(scratch3,
   2392            MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
   2393     __ cmp(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
   2394     __ b(ne, &no_interpreter_frame);
   2395     __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2396     __ bind(&no_interpreter_frame);
   2397   }
   2398 
   2399   // Check if next frame is an arguments adaptor frame.
   2400   Register caller_args_count_reg = scratch1;
   2401   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2402   __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2403   __ ldr(scratch3,
   2404          MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
   2405   __ cmp(scratch3,
   2406          Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   2407   __ b(ne, &no_arguments_adaptor);
   2408 
   2409   // Drop current frame and load arguments count from arguments adaptor frame.
   2410   __ mov(fp, scratch2);
   2411   __ ldr(caller_args_count_reg,
   2412          MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2413   __ SmiUntag(caller_args_count_reg);
   2414   __ b(&formal_parameter_count_loaded);
   2415 
   2416   __ bind(&no_arguments_adaptor);
   2417   // Load caller's formal parameter count
   2418   __ ldr(scratch1,
   2419          MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
   2420   __ ldr(scratch1,
   2421          FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2422   __ ldr(caller_args_count_reg,
   2423          FieldMemOperand(scratch1,
   2424                          SharedFunctionInfo::kFormalParameterCountOffset));
   2425   __ SmiUntag(caller_args_count_reg);
   2426 
   2427   __ bind(&formal_parameter_count_loaded);
   2428 
   2429   ParameterCount callee_args_count(args_reg);
   2430   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2431                         scratch3);
   2432   __ bind(&done);
   2433 }
   2434 }  // namespace
   2435 
   2436 // static
   2437 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2438                                      ConvertReceiverMode mode,
   2439                                      TailCallMode tail_call_mode) {
   2440   // ----------- S t a t e -------------
   2441   //  -- r0 : the number of arguments (not including the receiver)
   2442   //  -- r1 : the function to call (checked to be a JSFunction)
   2443   // -----------------------------------
   2444   __ AssertFunction(r1);
   2445 
   2446   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2447   // Check that the function is not a "classConstructor".
   2448   Label class_constructor;
   2449   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   2450   __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
   2451   __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
   2452   __ b(ne, &class_constructor);
   2453 
   2454   // Enter the context of the function; ToObject has to run in the function
   2455   // context, and we also need to take the global proxy from the function
   2456   // context in case of conversion.
   2457   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
   2458                 SharedFunctionInfo::kStrictModeByteOffset);
   2459   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
   2460   // We need to convert the receiver for non-native sloppy mode functions.
   2461   Label done_convert;
   2462   __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
   2463   __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
   2464                      (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
   2465   __ b(ne, &done_convert);
   2466   {
   2467     // ----------- S t a t e -------------
   2468     //  -- r0 : the number of arguments (not including the receiver)
   2469     //  -- r1 : the function to call (checked to be a JSFunction)
   2470     //  -- r2 : the shared function info.
   2471     //  -- cp : the function context.
   2472     // -----------------------------------
   2473 
   2474     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2475       // Patch receiver to global proxy.
   2476       __ LoadGlobalProxy(r3);
   2477     } else {
   2478       Label convert_to_object, convert_receiver;
   2479       __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2480       __ JumpIfSmi(r3, &convert_to_object);
   2481       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2482       __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
   2483       __ b(hs, &done_convert);
   2484       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2485         Label convert_global_proxy;
   2486         __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
   2487                       &convert_global_proxy);
   2488         __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
   2489         __ bind(&convert_global_proxy);
   2490         {
   2491           // Patch receiver to global proxy.
   2492           __ LoadGlobalProxy(r3);
   2493         }
   2494         __ b(&convert_receiver);
   2495       }
   2496       __ bind(&convert_to_object);
   2497       {
   2498         // Convert receiver using ToObject.
   2499         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2500         // in the fast case? (fall back to AllocateInNewSpace?)
   2501         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2502         __ SmiTag(r0);
   2503         __ Push(r0, r1);
   2504         __ mov(r0, r3);
   2505         __ Push(cp);
   2506         __ Call(masm->isolate()->builtins()->ToObject(),
   2507                 RelocInfo::CODE_TARGET);
   2508         __ Pop(cp);
   2509         __ mov(r3, r0);
   2510         __ Pop(r0, r1);
   2511         __ SmiUntag(r0);
   2512       }
   2513       __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   2514       __ bind(&convert_receiver);
   2515     }
   2516     __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2517   }
   2518   __ bind(&done_convert);
   2519 
   2520   // ----------- S t a t e -------------
   2521   //  -- r0 : the number of arguments (not including the receiver)
   2522   //  -- r1 : the function to call (checked to be a JSFunction)
   2523   //  -- r2 : the shared function info.
   2524   //  -- cp : the function context.
   2525   // -----------------------------------
   2526 
   2527   if (tail_call_mode == TailCallMode::kAllow) {
   2528     PrepareForTailCall(masm, r0, r3, r4, r5);
   2529   }
   2530 
   2531   __ ldr(r2,
   2532          FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
   2533   __ SmiUntag(r2);
   2534   ParameterCount actual(r0);
   2535   ParameterCount expected(r2);
   2536   __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
   2537                         CheckDebugStepCallWrapper());
   2538 
   2539   // The function is a "classConstructor", need to raise an exception.
   2540   __ bind(&class_constructor);
   2541   {
   2542     FrameScope frame(masm, StackFrame::INTERNAL);
   2543     __ push(r1);
   2544     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2545   }
   2546 }
   2547 
   2548 namespace {
   2549 
   2550 void Generate_PushBoundArguments(MacroAssembler* masm) {
   2551   // ----------- S t a t e -------------
   2552   //  -- r0 : the number of arguments (not including the receiver)
   2553   //  -- r1 : target (checked to be a JSBoundFunction)
   2554   //  -- r3 : new.target (only in case of [[Construct]])
   2555   // -----------------------------------
   2556 
   2557   // Load [[BoundArguments]] into r2 and length of that into r4.
   2558   Label no_bound_arguments;
   2559   __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
   2560   __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
   2561   __ SmiUntag(r4);
   2562   __ cmp(r4, Operand(0));
   2563   __ b(eq, &no_bound_arguments);
   2564   {
   2565     // ----------- S t a t e -------------
   2566     //  -- r0 : the number of arguments (not including the receiver)
   2567     //  -- r1 : target (checked to be a JSBoundFunction)
   2568     //  -- r2 : the [[BoundArguments]] (implemented as FixedArray)
   2569     //  -- r3 : new.target (only in case of [[Construct]])
   2570     //  -- r4 : the number of [[BoundArguments]]
   2571     // -----------------------------------
   2572 
   2573     // Reserve stack space for the [[BoundArguments]].
   2574     {
   2575       Label done;
   2576       __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
   2577       // Check the stack for overflow. We are not trying to catch interruptions
   2578       // (i.e. debug break and preemption) here, so check the "real stack
   2579       // limit".
   2580       __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
   2581       __ b(gt, &done);  // Signed comparison.
   2582       // Restore the stack pointer.
   2583       __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
   2584       {
   2585         FrameScope scope(masm, StackFrame::MANUAL);
   2586         __ EnterFrame(StackFrame::INTERNAL);
   2587         __ CallRuntime(Runtime::kThrowStackOverflow);
   2588       }
   2589       __ bind(&done);
   2590     }
   2591 
   2592     // Relocate arguments down the stack.
   2593     {
   2594       Label loop, done_loop;
   2595       __ mov(r5, Operand(0));
   2596       __ bind(&loop);
   2597       __ cmp(r5, r0);
   2598       __ b(gt, &done_loop);
   2599       __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
   2600       __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
   2601       __ add(r4, r4, Operand(1));
   2602       __ add(r5, r5, Operand(1));
   2603       __ b(&loop);
   2604       __ bind(&done_loop);
   2605     }
   2606 
   2607     // Copy [[BoundArguments]] to the stack (below the arguments).
   2608     {
   2609       Label loop;
   2610       __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
   2611       __ SmiUntag(r4);
   2612       __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2613       __ bind(&loop);
   2614       __ sub(r4, r4, Operand(1), SetCC);
   2615       __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
   2616       __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2617       __ add(r0, r0, Operand(1));
   2618       __ b(gt, &loop);
   2619     }
   2620   }
   2621   __ bind(&no_bound_arguments);
   2622 }
   2623 
   2624 }  // namespace
   2625 
   2626 // static
   2627 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2628                                               TailCallMode tail_call_mode) {
   2629   // ----------- S t a t e -------------
   2630   //  -- r0 : the number of arguments (not including the receiver)
   2631   //  -- r1 : the function to call (checked to be a JSBoundFunction)
   2632   // -----------------------------------
   2633   __ AssertBoundFunction(r1);
   2634 
   2635   if (tail_call_mode == TailCallMode::kAllow) {
   2636     PrepareForTailCall(masm, r0, r3, r4, r5);
   2637   }
   2638 
   2639   // Patch the receiver to [[BoundThis]].
   2640   __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
   2641   __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2642 
   2643   // Push the [[BoundArguments]] onto the stack.
   2644   Generate_PushBoundArguments(masm);
   2645 
   2646   // Call the [[BoundTargetFunction]] via the Call builtin.
   2647   __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
   2648   __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
   2649                                        masm->isolate())));
   2650   __ ldr(ip, MemOperand(ip));
   2651   __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
   2652 }
   2653 
   2654 // static
   2655 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2656                              TailCallMode tail_call_mode) {
   2657   // ----------- S t a t e -------------
   2658   //  -- r0 : the number of arguments (not including the receiver)
   2659   //  -- r1 : the target to call (can be any Object).
   2660   // -----------------------------------
   2661 
   2662   Label non_callable, non_function, non_smi;
   2663   __ JumpIfSmi(r1, &non_callable);
   2664   __ bind(&non_smi);
   2665   __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
   2666   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2667           RelocInfo::CODE_TARGET, eq);
   2668   __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
   2669   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2670           RelocInfo::CODE_TARGET, eq);
   2671 
   2672   // Check if target has a [[Call]] internal method.
   2673   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   2674   __ tst(r4, Operand(1 << Map::kIsCallable));
   2675   __ b(eq, &non_callable);
   2676 
   2677   __ cmp(r5, Operand(JS_PROXY_TYPE));
   2678   __ b(ne, &non_function);
   2679 
   2680   // 0. Prepare for tail call if necessary.
   2681   if (tail_call_mode == TailCallMode::kAllow) {
   2682     PrepareForTailCall(masm, r0, r3, r4, r5);
   2683   }
   2684 
   2685   // 1. Runtime fallback for Proxy [[Call]].
   2686   __ Push(r1);
   2687   // Increase the arguments size to include the pushed function and the
   2688   // existing receiver on the stack.
   2689   __ add(r0, r0, Operand(2));
   2690   // Tail-call to the runtime.
   2691   __ JumpToExternalReference(
   2692       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2693 
   2694   // 2. Call to something else, which might have a [[Call]] internal method (if
   2695   // not we raise an exception).
   2696   __ bind(&non_function);
   2697   // Overwrite the original receiver the (original) target.
   2698   __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2699   // Let the "call_as_function_delegate" take care of the rest.
   2700   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
   2701   __ Jump(masm->isolate()->builtins()->CallFunction(
   2702               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2703           RelocInfo::CODE_TARGET);
   2704 
   2705   // 3. Call to something that is not callable.
   2706   __ bind(&non_callable);
   2707   {
   2708     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2709     __ Push(r1);
   2710     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2711   }
   2712 }
   2713 
   2714 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
   2715   Register argc = r0;
   2716   Register constructor = r1;
   2717   Register new_target = r3;
   2718 
   2719   Register scratch = r2;
   2720   Register scratch2 = r6;
   2721 
   2722   Register spread = r4;
   2723   Register spread_map = r5;
   2724 
   2725   Register spread_len = r5;
   2726 
   2727   Label runtime_call, push_args;
   2728   __ ldr(spread, MemOperand(sp, 0));
   2729   __ JumpIfSmi(spread, &runtime_call);
   2730   __ ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
   2731 
   2732   // Check that the spread is an array.
   2733   __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
   2734   __ b(ne, &runtime_call);
   2735 
   2736   // Check that we have the original ArrayPrototype.
   2737   __ ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
   2738   __ ldr(scratch2, NativeContextMemOperand());
   2739   __ ldr(scratch2,
   2740          ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
   2741   __ cmp(scratch, scratch2);
   2742   __ b(ne, &runtime_call);
   2743 
   2744   // Check that the ArrayPrototype hasn't been modified in a way that would
   2745   // affect iteration.
   2746   __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
   2747   __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
   2748   __ cmp(scratch, Operand(Smi::FromInt(Isolate::kProtectorValid)));
   2749   __ b(ne, &runtime_call);
   2750 
   2751   // Check that the map of the initial array iterator hasn't changed.
   2752   __ ldr(scratch2, NativeContextMemOperand());
   2753   __ ldr(scratch,
   2754          ContextMemOperand(scratch2,
   2755                            Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
   2756   __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
   2757   __ ldr(scratch2,
   2758          ContextMemOperand(
   2759              scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
   2760   __ cmp(scratch, scratch2);
   2761   __ b(ne, &runtime_call);
   2762 
   2763   // For FastPacked kinds, iteration will have the same effect as simply
   2764   // accessing each property in order.
   2765   Label no_protector_check;
   2766   __ ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
   2767   __ DecodeField<Map::ElementsKindBits>(scratch);
   2768   __ cmp(scratch, Operand(FAST_HOLEY_ELEMENTS));
   2769   __ b(hi, &runtime_call);
   2770   // For non-FastHoley kinds, we can skip the protector check.
   2771   __ cmp(scratch, Operand(FAST_SMI_ELEMENTS));
   2772   __ b(eq, &no_protector_check);
   2773   __ cmp(scratch, Operand(FAST_ELEMENTS));
   2774   __ b(eq, &no_protector_check);
   2775   // Check the ArrayProtector cell.
   2776   __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
   2777   __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
   2778   __ cmp(scratch, Operand(Smi::FromInt(Isolate::kProtectorValid)));
   2779   __ b(ne, &runtime_call);
   2780 
   2781   __ bind(&no_protector_check);
   2782   // Load the FixedArray backing store, but use the length from the array.
   2783   __ ldr(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset));
   2784   __ SmiUntag(spread_len);
   2785   __ ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
   2786   __ b(&push_args);
   2787 
   2788   __ bind(&runtime_call);
   2789   {
   2790     // Call the builtin for the result of the spread.
   2791     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2792     __ SmiTag(argc);
   2793     __ Push(constructor);
   2794     __ Push(new_target);
   2795     __ Push(argc);
   2796     __ Push(spread);
   2797     __ CallRuntime(Runtime::kSpreadIterableFixed);
   2798     __ mov(spread, r0);
   2799     __ Pop(argc);
   2800     __ Pop(new_target);
   2801     __ Pop(constructor);
   2802     __ SmiUntag(argc);
   2803   }
   2804 
   2805   {
   2806     // Calculate the new nargs including the result of the spread.
   2807     __ ldr(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
   2808     __ SmiUntag(spread_len);
   2809 
   2810     __ bind(&push_args);
   2811     // argc += spread_len - 1. Subtract 1 for the spread itself.
   2812     __ add(argc, argc, spread_len);
   2813     __ sub(argc, argc, Operand(1));
   2814 
   2815     // Pop the spread argument off the stack.
   2816     __ Pop(scratch);
   2817   }
   2818 
   2819   // Check for stack overflow.
   2820   {
   2821     // Check the stack for overflow. We are not trying to catch interruptions
   2822     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2823     Label done;
   2824     __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
   2825     // Make scratch the space we have left. The stack might already be
   2826     // overflowed here which will cause scratch to become negative.
   2827     __ sub(scratch, sp, scratch);
   2828     // Check if the arguments will overflow the stack.
   2829     __ cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2));
   2830     __ b(gt, &done);  // Signed comparison.
   2831     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2832     __ bind(&done);
   2833   }
   2834 
   2835   // Put the evaluated spread onto the stack as additional arguments.
   2836   {
   2837     __ mov(scratch, Operand(0));
   2838     Label done, push, loop;
   2839     __ bind(&loop);
   2840     __ cmp(scratch, spread_len);
   2841     __ b(eq, &done);
   2842     __ add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2));
   2843     __ ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
   2844     __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
   2845     __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
   2846     __ bind(&push);
   2847     __ Push(scratch2);
   2848     __ add(scratch, scratch, Operand(1));
   2849     __ b(&loop);
   2850     __ bind(&done);
   2851   }
   2852 }
   2853 
   2854 // static
   2855 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
   2856   // ----------- S t a t e -------------
   2857   //  -- r0 : the number of arguments (not including the receiver)
   2858   //  -- r1 : the constructor to call (can be any Object)
   2859   // -----------------------------------
   2860 
   2861   // CheckSpreadAndPushToStack will push r3 to save it.
   2862   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   2863   CheckSpreadAndPushToStack(masm);
   2864   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   2865                                             TailCallMode::kDisallow),
   2866           RelocInfo::CODE_TARGET);
   2867 }
   2868 
   2869 // static
   2870 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2871   // ----------- S t a t e -------------
   2872   //  -- r0 : the number of arguments (not including the receiver)
   2873   //  -- r1 : the constructor to call (checked to be a JSFunction)
   2874   //  -- r3 : the new target (checked to be a constructor)
   2875   // -----------------------------------
   2876   __ AssertFunction(r1);
   2877 
   2878   // Calling convention for function specific ConstructStubs require
   2879   // r2 to contain either an AllocationSite or undefined.
   2880   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   2881 
   2882   // Tail call to the function-specific construct stub (still in the caller
   2883   // context at this point).
   2884   __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   2885   __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
   2886   __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   2887 }
   2888 
   2889 // static
   2890 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2891   // ----------- S t a t e -------------
   2892   //  -- r0 : the number of arguments (not including the receiver)
   2893   //  -- r1 : the function to call (checked to be a JSBoundFunction)
   2894   //  -- r3 : the new target (checked to be a constructor)
   2895   // -----------------------------------
   2896   __ AssertBoundFunction(r1);
   2897 
   2898   // Push the [[BoundArguments]] onto the stack.
   2899   Generate_PushBoundArguments(masm);
   2900 
   2901   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2902   __ cmp(r1, r3);
   2903   __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
   2904          eq);
   2905 
   2906   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2907   __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
   2908   __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
   2909   __ ldr(ip, MemOperand(ip));
   2910   __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
   2911 }
   2912 
   2913 // static
   2914 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2915   // ----------- S t a t e -------------
   2916   //  -- r0 : the number of arguments (not including the receiver)
   2917   //  -- r1 : the constructor to call (checked to be a JSProxy)
   2918   //  -- r3 : the new target (either the same as the constructor or
   2919   //          the JSFunction on which new was invoked initially)
   2920   // -----------------------------------
   2921 
   2922   // Call into the Runtime for Proxy [[Construct]].
   2923   __ Push(r1);
   2924   __ Push(r3);
   2925   // Include the pushed new_target, constructor and the receiver.
   2926   __ add(r0, r0, Operand(3));
   2927   // Tail-call to the runtime.
   2928   __ JumpToExternalReference(
   2929       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2930 }
   2931 
   2932 // static
   2933 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2934   // ----------- S t a t e -------------
   2935   //  -- r0 : the number of arguments (not including the receiver)
   2936   //  -- r1 : the constructor to call (can be any Object)
   2937   //  -- r3 : the new target (either the same as the constructor or
   2938   //          the JSFunction on which new was invoked initially)
   2939   // -----------------------------------
   2940 
   2941   // Check if target is a Smi.
   2942   Label non_constructor;
   2943   __ JumpIfSmi(r1, &non_constructor);
   2944 
   2945   // Dispatch based on instance type.
   2946   __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
   2947   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
   2948           RelocInfo::CODE_TARGET, eq);
   2949 
   2950   // Check if target has a [[Construct]] internal method.
   2951   __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
   2952   __ tst(r2, Operand(1 << Map::kIsConstructor));
   2953   __ b(eq, &non_constructor);
   2954 
   2955   // Only dispatch to bound functions after checking whether they are
   2956   // constructors.
   2957   __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
   2958   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
   2959           RelocInfo::CODE_TARGET, eq);
   2960 
   2961   // Only dispatch to proxies after checking whether they are constructors.
   2962   __ cmp(r5, Operand(JS_PROXY_TYPE));
   2963   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
   2964           eq);
   2965 
   2966   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2967   {
   2968     // Overwrite the original receiver with the (original) target.
   2969     __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2970     // Let the "call_as_constructor_delegate" take care of the rest.
   2971     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
   2972     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2973             RelocInfo::CODE_TARGET);
   2974   }
   2975 
   2976   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2977   // method.
   2978   __ bind(&non_constructor);
   2979   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   2980           RelocInfo::CODE_TARGET);
   2981 }
   2982 
   2983 // static
   2984 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
   2985   // ----------- S t a t e -------------
   2986   //  -- r0 : the number of arguments (not including the receiver)
   2987   //  -- r1 : the constructor to call (can be any Object)
   2988   //  -- r3 : the new target (either the same as the constructor or
   2989   //          the JSFunction on which new was invoked initially)
   2990   // -----------------------------------
   2991 
   2992   CheckSpreadAndPushToStack(masm);
   2993   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2994 }
   2995 
   2996 // static
   2997 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   2998   // ----------- S t a t e -------------
   2999   //  -- r1 : requested object size (untagged)
   3000   //  -- lr : return address
   3001   // -----------------------------------
   3002   __ SmiTag(r1);
   3003   __ Push(r1);
   3004   __ Move(cp, Smi::kZero);
   3005   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   3006 }
   3007 
   3008 // static
   3009 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   3010   // ----------- S t a t e -------------
   3011   //  -- r1 : requested object size (untagged)
   3012   //  -- lr : return address
   3013   // -----------------------------------
   3014   __ SmiTag(r1);
   3015   __ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   3016   __ Push(r1, r2);
   3017   __ Move(cp, Smi::kZero);
   3018   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   3019 }
   3020 
   3021 // static
   3022 void Builtins::Generate_Abort(MacroAssembler* masm) {
   3023   // ----------- S t a t e -------------
   3024   //  -- r1 : message_id as Smi
   3025   //  -- lr : return address
   3026   // -----------------------------------
   3027   __ Push(r1);
   3028   __ Move(cp, Smi::kZero);
   3029   __ TailCallRuntime(Runtime::kAbort);
   3030 }
   3031 
   3032 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   3033   // ----------- S t a t e -------------
   3034   //  -- r0 : actual number of arguments
   3035   //  -- r1 : function (passed through to callee)
   3036   //  -- r2 : expected number of arguments
   3037   //  -- r3 : new target (passed through to callee)
   3038   // -----------------------------------
   3039 
   3040   Label invoke, dont_adapt_arguments, stack_overflow;
   3041 
   3042   Label enough, too_few;
   3043   __ cmp(r0, r2);
   3044   __ b(lt, &too_few);
   3045   __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   3046   __ b(eq, &dont_adapt_arguments);
   3047 
   3048   {  // Enough parameters: actual >= expected
   3049     __ bind(&enough);
   3050     EnterArgumentsAdaptorFrame(masm);
   3051     Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow);
   3052 
   3053     // Calculate copy start address into r0 and copy end address into r4.
   3054     // r0: actual number of arguments as a smi
   3055     // r1: function
   3056     // r2: expected number of arguments
   3057     // r3: new target (passed through to callee)
   3058     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
   3059     // adjust for return address and receiver
   3060     __ add(r0, r0, Operand(2 * kPointerSize));
   3061     __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
   3062 
   3063     // Copy the arguments (including the receiver) to the new stack frame.
   3064     // r0: copy start address
   3065     // r1: function
   3066     // r2: expected number of arguments
   3067     // r3: new target (passed through to callee)
   3068     // r4: copy end address
   3069 
   3070     Label copy;
   3071     __ bind(&copy);
   3072     __ ldr(ip, MemOperand(r0, 0));
   3073     __ push(ip);
   3074     __ cmp(r0, r4);  // Compare before moving to next argument.
   3075     __ sub(r0, r0, Operand(kPointerSize));
   3076     __ b(ne, &copy);
   3077 
   3078     __ b(&invoke);
   3079   }
   3080 
   3081   {  // Too few parameters: Actual < expected
   3082     __ bind(&too_few);
   3083     EnterArgumentsAdaptorFrame(masm);
   3084     Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow);
   3085 
   3086     // Calculate copy start address into r0 and copy end address is fp.
   3087     // r0: actual number of arguments as a smi
   3088     // r1: function
   3089     // r2: expected number of arguments
   3090     // r3: new target (passed through to callee)
   3091     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
   3092 
   3093     // Copy the arguments (including the receiver) to the new stack frame.
   3094     // r0: copy start address
   3095     // r1: function
   3096     // r2: expected number of arguments
   3097     // r3: new target (passed through to callee)
   3098     Label copy;
   3099     __ bind(&copy);
   3100     // Adjust load for return address and receiver.
   3101     __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
   3102     __ push(ip);
   3103     __ cmp(r0, fp);  // Compare before moving to next argument.
   3104     __ sub(r0, r0, Operand(kPointerSize));
   3105     __ b(ne, &copy);
   3106 
   3107     // Fill the remaining expected arguments with undefined.
   3108     // r1: function
   3109     // r2: expected number of arguments
   3110     // r3: new target (passed through to callee)
   3111     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   3112     __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
   3113     // Adjust for frame.
   3114     __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   3115                            2 * kPointerSize));
   3116 
   3117     Label fill;
   3118     __ bind(&fill);
   3119     __ push(ip);
   3120     __ cmp(sp, r4);
   3121     __ b(ne, &fill);
   3122   }
   3123 
   3124   // Call the entry point.
   3125   __ bind(&invoke);
   3126   __ mov(r0, r2);
   3127   // r0 : expected number of arguments
   3128   // r1 : function (passed through to callee)
   3129   // r3 : new target (passed through to callee)
   3130   __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   3131   __ Call(r4);
   3132 
   3133   // Store offset of return address for deoptimizer.
   3134   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   3135 
   3136   // Exit frame and return.
   3137   LeaveArgumentsAdaptorFrame(masm);
   3138   __ Jump(lr);
   3139 
   3140   // -------------------------------------------
   3141   // Dont adapt arguments.
   3142   // -------------------------------------------
   3143   __ bind(&dont_adapt_arguments);
   3144   __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   3145   __ Jump(r4);
   3146 
   3147   __ bind(&stack_overflow);
   3148   {
   3149     FrameScope frame(masm, StackFrame::MANUAL);
   3150     __ CallRuntime(Runtime::kThrowStackOverflow);
   3151     __ bkpt(0);
   3152   }
   3153 }
   3154 
   3155 #undef __
   3156 
   3157 }  // namespace internal
   3158 }  // namespace v8
   3159 
   3160 #endif  // V8_TARGET_ARCH_ARM
   3161