Home | History | Annotate | Download | only in arm
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_ARM
      6 
      7 #include "src/codegen.h"
      8 #include "src/debug/debug.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 #include "src/runtime/runtime.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 
     17 #define __ ACCESS_MASM(masm)
     18 
     19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
     20   // ----------- S t a t e -------------
     21   //  -- r0                 : number of arguments excluding receiver
     22   //  -- r1                 : target
     23   //  -- r3                 : new.target
     24   //  -- sp[0]              : last argument
     25   //  -- ...
     26   //  -- sp[4 * (argc - 1)] : first argument
     27   //  -- sp[4 * argc]       : receiver
     28   // -----------------------------------
     29   __ AssertFunction(r1);
     30 
     31   // Make sure we operate in the context of the called function (for example
     32   // ConstructStubs implemented in C++ will be run in the context of the caller
     33   // instead of the callee, due to the way that [[Construct]] is defined for
     34   // ordinary functions).
     35   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
     36 
     37   // Insert extra arguments.
     38   const int num_extra_args = 2;
     39   __ Push(r1, r3);
     40 
     41   // JumpToExternalReference expects r0 to contain the number of arguments
     42   // including the receiver and the extra arguments.
     43   __ add(r0, r0, Operand(num_extra_args + 1));
     44 
     45   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     46 }
     47 
     48 
     49 // Load the built-in InternalArray function from the current context.
     50 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     51                                               Register result) {
     52   // Load the InternalArray function from the current native context.
     53   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
     54 }
     55 
     56 
     57 // Load the built-in Array function from the current context.
     58 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     59   // Load the Array function from the current native context.
     60   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
     61 }
     62 
     63 
     64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     65   // ----------- S t a t e -------------
     66   //  -- r0     : number of arguments
     67   //  -- lr     : return address
     68   //  -- sp[...]: constructor arguments
     69   // -----------------------------------
     70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     71 
     72   // Get the InternalArray function.
     73   GenerateLoadInternalArrayFunction(masm, r1);
     74 
     75   if (FLAG_debug_code) {
     76     // Initial map for the builtin InternalArray functions should be maps.
     77     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
     78     __ SmiTst(r2);
     79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
     80     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
     81     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
     82   }
     83 
     84   // Run the native code for the InternalArray function called as a normal
     85   // function.
     86   // tail call a stub
     87   InternalArrayConstructorStub stub(masm->isolate());
     88   __ TailCallStub(&stub);
     89 }
     90 
     91 
     92 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
     93   // ----------- S t a t e -------------
     94   //  -- r0     : number of arguments
     95   //  -- lr     : return address
     96   //  -- sp[...]: constructor arguments
     97   // -----------------------------------
     98   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     99 
    100   // Get the Array function.
    101   GenerateLoadArrayFunction(masm, r1);
    102 
    103   if (FLAG_debug_code) {
    104     // Initial map for the builtin Array functions should be maps.
    105     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
    106     __ SmiTst(r2);
    107     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
    108     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
    109     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
    110   }
    111 
    112   __ mov(r3, r1);
    113   // Run the native code for the Array function called as a normal function.
    114   // tail call a stub
    115   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
    116   ArrayConstructorStub stub(masm->isolate());
    117   __ TailCallStub(&stub);
    118 }
    119 
    120 
    121 // static
    122 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
    123   // ----------- S t a t e -------------
    124   //  -- r0                 : number of arguments
    125   //  -- r1                 : function
    126   //  -- cp                 : context
    127   //  -- lr                 : return address
    128   //  -- sp[(argc - n) * 8] : arg[n] (zero-based)
    129   //  -- sp[(argc + 1) * 8] : receiver
    130   // -----------------------------------
    131   Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
    132   Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
    133   Heap::RootListIndex const root_index =
    134       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
    135                                      : Heap::kMinusInfinityValueRootIndex;
    136   DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
    137 
    138   // Load the accumulator with the default return value (either -Infinity or
    139   // +Infinity), with the tagged value in r5 and the double value in d1.
    140   __ LoadRoot(r5, root_index);
    141   __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
    142 
    143   // Remember how many slots to drop (including the receiver).
    144   __ add(r4, r0, Operand(1));
    145 
    146   Label done_loop, loop;
    147   __ bind(&loop);
    148   {
    149     // Check if all parameters done.
    150     __ sub(r0, r0, Operand(1), SetCC);
    151     __ b(lt, &done_loop);
    152 
    153     // Load the next parameter tagged value into r2.
    154     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
    155 
    156     // Load the double value of the parameter into d2, maybe converting the
    157     // parameter to a number first using the ToNumber builtin if necessary.
    158     Label convert, convert_smi, convert_number, done_convert;
    159     __ bind(&convert);
    160     __ JumpIfSmi(r2, &convert_smi);
    161     __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
    162     __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
    163     {
    164       // Parameter is not a Number, use the ToNumber builtin to convert it.
    165       DCHECK(!FLAG_enable_embedded_constant_pool);
    166       FrameScope scope(masm, StackFrame::MANUAL);
    167       __ Push(lr, fp, cp, r1);
    168       __ add(fp, sp, Operand(2 * kPointerSize));
    169       __ SmiTag(r0);
    170       __ SmiTag(r4);
    171       __ Push(r0, r4, r5);
    172       __ mov(r0, r2);
    173       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    174       __ mov(r2, r0);
    175       __ Pop(r0, r4, r5);
    176       {
    177         // Restore the double accumulator value (d1).
    178         Label done_restore;
    179         __ SmiToDouble(d1, r5);
    180         __ JumpIfSmi(r5, &done_restore);
    181         __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
    182         __ bind(&done_restore);
    183       }
    184       __ SmiUntag(r4);
    185       __ SmiUntag(r0);
    186       __ Pop(lr, fp, cp, r1);
    187     }
    188     __ b(&convert);
    189     __ bind(&convert_number);
    190     __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
    191     __ b(&done_convert);
    192     __ bind(&convert_smi);
    193     __ SmiToDouble(d2, r2);
    194     __ bind(&done_convert);
    195 
    196     // Perform the actual comparison with the accumulator value on the left hand
    197     // side (d1) and the next parameter value on the right hand side (d2).
    198     Label compare_nan, compare_swap;
    199     __ VFPCompareAndSetFlags(d1, d2);
    200     __ b(cc_done, &loop);
    201     __ b(cc_swap, &compare_swap);
    202     __ b(vs, &compare_nan);
    203 
    204     // Left and right hand side are equal, check for -0 vs. +0.
    205     __ VmovHigh(ip, reg);
    206     __ cmp(ip, Operand(0x80000000));
    207     __ b(ne, &loop);
    208 
    209     // Result is on the right hand side.
    210     __ bind(&compare_swap);
    211     __ vmov(d1, d2);
    212     __ mov(r5, r2);
    213     __ b(&loop);
    214 
    215     // At least one side is NaN, which means that the result will be NaN too.
    216     __ bind(&compare_nan);
    217     __ LoadRoot(r5, Heap::kNanValueRootIndex);
    218     __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
    219     __ b(&loop);
    220   }
    221 
    222   __ bind(&done_loop);
    223   __ mov(r0, r5);
    224   __ Drop(r4);
    225   __ Ret();
    226 }
    227 
    228 // static
    229 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
    230   // ----------- S t a t e -------------
    231   //  -- r0                     : number of arguments
    232   //  -- r1                     : constructor function
    233   //  -- lr                     : return address
    234   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    235   //  -- sp[argc * 4]           : receiver
    236   // -----------------------------------
    237 
    238   // 1. Load the first argument into r0 and get rid of the rest (including the
    239   // receiver).
    240   Label no_arguments;
    241   {
    242     __ sub(r0, r0, Operand(1), SetCC);
    243     __ b(lo, &no_arguments);
    244     __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
    245     __ Drop(2);
    246   }
    247 
    248   // 2a. Convert the first argument to a number.
    249   __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    250 
    251   // 2b. No arguments, return +0.
    252   __ bind(&no_arguments);
    253   __ Move(r0, Smi::FromInt(0));
    254   __ Ret(1);
    255 }
    256 
    257 
    258 // static
    259 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
    260   // ----------- S t a t e -------------
    261   //  -- r0                     : number of arguments
    262   //  -- r1                     : constructor function
    263   //  -- r3                     : new target
    264   //  -- lr                     : return address
    265   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    266   //  -- sp[argc * 4]           : receiver
    267   // -----------------------------------
    268 
    269   // 1. Make sure we operate in the context of the called function.
    270   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
    271 
    272   // 2. Load the first argument into r2 and get rid of the rest (including the
    273   // receiver).
    274   {
    275     Label no_arguments, done;
    276     __ sub(r0, r0, Operand(1), SetCC);
    277     __ b(lo, &no_arguments);
    278     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
    279     __ Drop(2);
    280     __ b(&done);
    281     __ bind(&no_arguments);
    282     __ Move(r2, Smi::FromInt(0));
    283     __ Drop(1);
    284     __ bind(&done);
    285   }
    286 
    287   // 3. Make sure r2 is a number.
    288   {
    289     Label done_convert;
    290     __ JumpIfSmi(r2, &done_convert);
    291     __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
    292     __ b(eq, &done_convert);
    293     {
    294       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    295       __ Push(r1, r3);
    296       __ Move(r0, r2);
    297       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    298       __ Move(r2, r0);
    299       __ Pop(r1, r3);
    300     }
    301     __ bind(&done_convert);
    302   }
    303 
    304   // 4. Check if new target and constructor differ.
    305   Label new_object;
    306   __ cmp(r1, r3);
    307   __ b(ne, &new_object);
    308 
    309   // 5. Allocate a JSValue wrapper for the number.
    310   __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
    311   __ Ret();
    312 
    313   // 6. Fallback to the runtime to create new object.
    314   __ bind(&new_object);
    315   {
    316     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    317     __ Push(r2);  // first argument
    318     FastNewObjectStub stub(masm->isolate());
    319     __ CallStub(&stub);
    320     __ Pop(r2);
    321   }
    322   __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
    323   __ Ret();
    324 }
    325 
    326 
    327 // static
    328 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
    329   // ----------- S t a t e -------------
    330   //  -- r0                     : number of arguments
    331   //  -- r1                     : constructor function
    332   //  -- lr                     : return address
    333   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    334   //  -- sp[argc * 4]           : receiver
    335   // -----------------------------------
    336 
    337   // 1. Load the first argument into r0 and get rid of the rest (including the
    338   // receiver).
    339   Label no_arguments;
    340   {
    341     __ sub(r0, r0, Operand(1), SetCC);
    342     __ b(lo, &no_arguments);
    343     __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
    344     __ Drop(2);
    345   }
    346 
    347   // 2a. At least one argument, return r0 if it's a string, otherwise
    348   // dispatch to appropriate conversion.
    349   Label to_string, symbol_descriptive_string;
    350   {
    351     __ JumpIfSmi(r0, &to_string);
    352     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
    353     __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
    354     __ b(hi, &to_string);
    355     __ b(eq, &symbol_descriptive_string);
    356     __ Ret();
    357   }
    358 
    359   // 2b. No arguments, return the empty string (and pop the receiver).
    360   __ bind(&no_arguments);
    361   {
    362     __ LoadRoot(r0, Heap::kempty_stringRootIndex);
    363     __ Ret(1);
    364   }
    365 
    366   // 3a. Convert r0 to a string.
    367   __ bind(&to_string);
    368   {
    369     ToStringStub stub(masm->isolate());
    370     __ TailCallStub(&stub);
    371   }
    372 
    373   // 3b. Convert symbol in r0 to a string.
    374   __ bind(&symbol_descriptive_string);
    375   {
    376     __ Push(r0);
    377     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
    378   }
    379 }
    380 
    381 
    382 // static
    383 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
    384   // ----------- S t a t e -------------
    385   //  -- r0                     : number of arguments
    386   //  -- r1                     : constructor function
    387   //  -- r3                     : new target
    388   //  -- lr                     : return address
    389   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    390   //  -- sp[argc * 4]           : receiver
    391   // -----------------------------------
    392 
    393   // 1. Make sure we operate in the context of the called function.
    394   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
    395 
    396   // 2. Load the first argument into r2 and get rid of the rest (including the
    397   // receiver).
    398   {
    399     Label no_arguments, done;
    400     __ sub(r0, r0, Operand(1), SetCC);
    401     __ b(lo, &no_arguments);
    402     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
    403     __ Drop(2);
    404     __ b(&done);
    405     __ bind(&no_arguments);
    406     __ LoadRoot(r2, Heap::kempty_stringRootIndex);
    407     __ Drop(1);
    408     __ bind(&done);
    409   }
    410 
    411   // 3. Make sure r2 is a string.
    412   {
    413     Label convert, done_convert;
    414     __ JumpIfSmi(r2, &convert);
    415     __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
    416     __ b(lo, &done_convert);
    417     __ bind(&convert);
    418     {
    419       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    420       ToStringStub stub(masm->isolate());
    421       __ Push(r1, r3);
    422       __ Move(r0, r2);
    423       __ CallStub(&stub);
    424       __ Move(r2, r0);
    425       __ Pop(r1, r3);
    426     }
    427     __ bind(&done_convert);
    428   }
    429 
    430   // 4. Check if new target and constructor differ.
    431   Label new_object;
    432   __ cmp(r1, r3);
    433   __ b(ne, &new_object);
    434 
    435   // 5. Allocate a JSValue wrapper for the string.
    436   __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
    437   __ Ret();
    438 
    439   // 6. Fallback to the runtime to create new object.
    440   __ bind(&new_object);
    441   {
    442     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    443     __ Push(r2);  // first argument
    444     FastNewObjectStub stub(masm->isolate());
    445     __ CallStub(&stub);
    446     __ Pop(r2);
    447   }
    448   __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
    449   __ Ret();
    450 }
    451 
    452 
    453 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    454   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
    455   __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
    456   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
    457   __ Jump(r2);
    458 }
    459 
    460 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
    461                                            Runtime::FunctionId function_id) {
    462   // ----------- S t a t e -------------
    463   //  -- r0 : argument count (preserved for callee)
    464   //  -- r1 : target function (preserved for callee)
    465   //  -- r3 : new target (preserved for callee)
    466   // -----------------------------------
    467   {
    468     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    469     // Push the number of arguments to the callee.
    470     __ SmiTag(r0);
    471     __ push(r0);
    472     // Push a copy of the target function and the new target.
    473     __ push(r1);
    474     __ push(r3);
    475     // Push function as parameter to the runtime call.
    476     __ Push(r1);
    477 
    478     __ CallRuntime(function_id, 1);
    479     __ mov(r2, r0);
    480 
    481     // Restore target function and new target.
    482     __ pop(r3);
    483     __ pop(r1);
    484     __ pop(r0);
    485     __ SmiUntag(r0, r0);
    486   }
    487   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
    488   __ Jump(r2);
    489 }
    490 
    491 
    492 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    493   // Checking whether the queued function is ready for install is optional,
    494   // since we come across interrupts and stack checks elsewhere.  However,
    495   // not checking may delay installing ready functions, and always checking
    496   // would be quite expensive.  A good compromise is to first check against
    497   // stack limit as a cue for an interrupt signal.
    498   Label ok;
    499   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    500   __ cmp(sp, Operand(ip));
    501   __ b(hs, &ok);
    502 
    503   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    504 
    505   __ bind(&ok);
    506   GenerateTailCallToSharedCode(masm);
    507 }
    508 
    509 
    510 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    511                                            bool is_api_function,
    512                                            bool create_implicit_receiver,
    513                                            bool check_derived_construct) {
    514   // ----------- S t a t e -------------
    515   //  -- r0     : number of arguments
    516   //  -- r1     : constructor function
    517   //  -- r2     : allocation site or undefined
    518   //  -- r3     : new target
    519   //  -- cp     : context
    520   //  -- lr     : return address
    521   //  -- sp[...]: constructor arguments
    522   // -----------------------------------
    523 
    524   Isolate* isolate = masm->isolate();
    525 
    526   // Enter a construct frame.
    527   {
    528     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
    529 
    530     // Preserve the incoming parameters on the stack.
    531     __ AssertUndefinedOrAllocationSite(r2, r4);
    532     __ Push(cp);
    533     __ SmiTag(r0);
    534     __ Push(r2, r0);
    535 
    536     if (create_implicit_receiver) {
    537       // Allocate the new receiver object.
    538       __ Push(r1, r3);
    539       FastNewObjectStub stub(masm->isolate());
    540       __ CallStub(&stub);
    541       __ mov(r4, r0);
    542       __ Pop(r1, r3);
    543 
    544       // ----------- S t a t e -------------
    545       //  -- r1: constructor function
    546       //  -- r3: new target
    547       //  -- r4: newly allocated object
    548       // -----------------------------------
    549 
    550       // Retrieve smi-tagged arguments count from the stack.
    551       __ ldr(r0, MemOperand(sp));
    552     }
    553 
    554     __ SmiUntag(r0);
    555 
    556     if (create_implicit_receiver) {
    557       // Push the allocated receiver to the stack. We need two copies
    558       // because we may have to return the original one and the calling
    559       // conventions dictate that the called function pops the receiver.
    560       __ push(r4);
    561       __ push(r4);
    562     } else {
    563       __ PushRoot(Heap::kTheHoleValueRootIndex);
    564     }
    565 
    566     // Set up pointer to last argument.
    567     __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    568 
    569     // Copy arguments and receiver to the expression stack.
    570     // r0: number of arguments
    571     // r1: constructor function
    572     // r2: address of last argument (caller sp)
    573     // r3: new target
    574     // r4: number of arguments (smi-tagged)
    575     // sp[0]: receiver
    576     // sp[1]: receiver
    577     // sp[2]: number of arguments (smi-tagged)
    578     Label loop, entry;
    579     __ SmiTag(r4, r0);
    580     __ b(&entry);
    581     __ bind(&loop);
    582     __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
    583     __ push(ip);
    584     __ bind(&entry);
    585     __ sub(r4, r4, Operand(2), SetCC);
    586     __ b(ge, &loop);
    587 
    588     // Call the function.
    589     // r0: number of arguments
    590     // r1: constructor function
    591     // r3: new target
    592     ParameterCount actual(r0);
    593     __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
    594                       CheckDebugStepCallWrapper());
    595 
    596     // Store offset of return address for deoptimizer.
    597     if (create_implicit_receiver && !is_api_function) {
    598       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    599     }
    600 
    601     // Restore context from the frame.
    602     // r0: result
    603     // sp[0]: receiver
    604     // sp[1]: number of arguments (smi-tagged)
    605     __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    606 
    607     if (create_implicit_receiver) {
    608       // If the result is an object (in the ECMA sense), we should get rid
    609       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    610       // on page 74.
    611       Label use_receiver, exit;
    612 
    613       // If the result is a smi, it is *not* an object in the ECMA sense.
    614       // r0: result
    615       // sp[0]: receiver
    616       // sp[1]: number of arguments (smi-tagged)
    617       __ JumpIfSmi(r0, &use_receiver);
    618 
    619       // If the type of the result (stored in its map) is less than
    620       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    621       __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
    622       __ b(ge, &exit);
    623 
    624       // Throw away the result of the constructor invocation and use the
    625       // on-stack receiver as the result.
    626       __ bind(&use_receiver);
    627       __ ldr(r0, MemOperand(sp));
    628 
    629       // Remove receiver from the stack, remove caller arguments, and
    630       // return.
    631       __ bind(&exit);
    632       // r0: result
    633       // sp[0]: receiver (newly allocated object)
    634       // sp[1]: number of arguments (smi-tagged)
    635       __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
    636     } else {
    637       __ ldr(r1, MemOperand(sp));
    638     }
    639 
    640     // Leave construct frame.
    641   }
    642 
    643   // ES6 9.2.2. Step 13+
    644   // Check that the result is not a Smi, indicating that the constructor result
    645   // from a derived class is neither undefined nor an Object.
    646   if (check_derived_construct) {
    647     Label dont_throw;
    648     __ JumpIfNotSmi(r0, &dont_throw);
    649     {
    650       FrameScope scope(masm, StackFrame::INTERNAL);
    651       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    652     }
    653     __ bind(&dont_throw);
    654   }
    655 
    656   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
    657   __ add(sp, sp, Operand(kPointerSize));
    658   if (create_implicit_receiver) {
    659     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
    660   }
    661   __ Jump(lr);
    662 }
    663 
    664 
    665 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    666   Generate_JSConstructStubHelper(masm, false, true, false);
    667 }
    668 
    669 
    670 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    671   Generate_JSConstructStubHelper(masm, true, false, false);
    672 }
    673 
    674 
    675 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    676   Generate_JSConstructStubHelper(masm, false, false, false);
    677 }
    678 
    679 
    680 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    681     MacroAssembler* masm) {
    682   Generate_JSConstructStubHelper(masm, false, false, true);
    683 }
    684 
    685 // static
    686 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    687   // ----------- S t a t e -------------
    688   //  -- r0 : the value to pass to the generator
    689   //  -- r1 : the JSGeneratorObject to resume
    690   //  -- r2 : the resume mode (tagged)
    691   //  -- lr : return address
    692   // -----------------------------------
    693   __ AssertGeneratorObject(r1);
    694 
    695   // Store input value into generator object.
    696   __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
    697   __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0, r3,
    698                       kLRHasNotBeenSaved, kDontSaveFPRegs);
    699 
    700   // Store resume mode into generator object.
    701   __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kResumeModeOffset));
    702 
    703   // Load suspended function and context.
    704   __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
    705   __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
    706 
    707   // Flood function if we are stepping.
    708   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    709   Label stepping_prepared;
    710   ExternalReference last_step_action =
    711       ExternalReference::debug_last_step_action_address(masm->isolate());
    712   STATIC_ASSERT(StepFrame > StepIn);
    713   __ mov(ip, Operand(last_step_action));
    714   __ ldrsb(ip, MemOperand(ip));
    715   __ cmp(ip, Operand(StepIn));
    716   __ b(ge, &prepare_step_in_if_stepping);
    717 
    718   // Flood function if we need to continue stepping in the suspended generator.
    719   ExternalReference debug_suspended_generator =
    720       ExternalReference::debug_suspended_generator_address(masm->isolate());
    721   __ mov(ip, Operand(debug_suspended_generator));
    722   __ ldr(ip, MemOperand(ip));
    723   __ cmp(ip, Operand(r1));
    724   __ b(eq, &prepare_step_in_suspended_generator);
    725   __ bind(&stepping_prepared);
    726 
    727   // Push receiver.
    728   __ ldr(ip, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
    729   __ Push(ip);
    730 
    731   // ----------- S t a t e -------------
    732   //  -- r1    : the JSGeneratorObject to resume
    733   //  -- r2    : the resume mode (tagged)
    734   //  -- r4    : generator function
    735   //  -- cp    : generator context
    736   //  -- lr    : return address
    737   //  -- sp[0] : generator receiver
    738   // -----------------------------------
    739 
    740   // Push holes for arguments to generator function. Since the parser forced
    741   // context allocation for any variables in generators, the actual argument
    742   // values have already been copied into the context and these dummy values
    743   // will never be used.
    744   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    745   __ ldr(r3,
    746          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
    747   {
    748     Label done_loop, loop;
    749     __ bind(&loop);
    750     __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
    751     __ b(mi, &done_loop);
    752     __ PushRoot(Heap::kTheHoleValueRootIndex);
    753     __ b(&loop);
    754     __ bind(&done_loop);
    755   }
    756 
    757   // Dispatch on the kind of generator object.
    758   Label old_generator;
    759   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    760   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
    761   __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
    762   __ b(ne, &old_generator);
    763 
    764   // New-style (ignition/turbofan) generator object
    765   {
    766     __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    767     __ ldr(r0,
    768          FieldMemOperand(r0, SharedFunctionInfo::kFormalParameterCountOffset));
    769     __ SmiUntag(r0);
    770     // We abuse new.target both to indicate that this is a resume call and to
    771     // pass in the generator object.  In ordinary calls, new.target is always
    772     // undefined because generator functions are non-constructable.
    773     __ Move(r3, r1);
    774     __ Move(r1, r4);
    775     __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
    776     __ Jump(r5);
    777   }
    778 
    779   // Old-style (full-codegen) generator object
    780   __ bind(&old_generator);
    781   {
    782     // Enter a new JavaScript frame, and initialize its slots as they were when
    783     // the generator was suspended.
    784     DCHECK(!FLAG_enable_embedded_constant_pool);
    785     FrameScope scope(masm, StackFrame::MANUAL);
    786     __ Push(lr, fp);
    787     __ Move(fp, sp);
    788     __ Push(cp, r4);
    789 
    790     // Restore the operand stack.
    791     __ ldr(r0, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
    792     __ ldr(r3, FieldMemOperand(r0, FixedArray::kLengthOffset));
    793     __ add(r0, r0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
    794     __ add(r3, r0, Operand(r3, LSL, kPointerSizeLog2 - 1));
    795     {
    796       Label done_loop, loop;
    797       __ bind(&loop);
    798       __ cmp(r0, r3);
    799       __ b(eq, &done_loop);
    800       __ ldr(ip, MemOperand(r0, kPointerSize, PostIndex));
    801       __ Push(ip);
    802       __ b(&loop);
    803       __ bind(&done_loop);
    804     }
    805 
    806     // Reset operand stack so we don't leak.
    807     __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
    808     __ str(ip, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
    809 
    810     // Resume the generator function at the continuation.
    811     __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
    812     __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
    813     __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
    814     __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
    815     __ add(r3, r3, Operand(r2, ASR, 1));
    816     __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
    817     __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
    818     __ Move(r0, r1);  // Continuation expects generator object in r0.
    819     __ Jump(r3);
    820   }
    821 
    822   __ bind(&prepare_step_in_if_stepping);
    823   {
    824     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    825     __ Push(r1, r2, r4);
    826     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
    827     __ Pop(r1, r2);
    828     __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
    829   }
    830   __ b(&stepping_prepared);
    831 
    832   __ bind(&prepare_step_in_suspended_generator);
    833   {
    834     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    835     __ Push(r1, r2);
    836     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    837     __ Pop(r1, r2);
    838     __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
    839   }
    840   __ b(&stepping_prepared);
    841 }
    842 
    843 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    844   FrameScope scope(masm, StackFrame::INTERNAL);
    845   __ push(r1);
    846   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    847 }
    848 
    849 
    850 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
    851 
    852 
    853 // Clobbers r2; preserves all other registers.
    854 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
    855                                         IsTagged argc_is_tagged) {
    856   // Check the stack for overflow. We are not trying to catch
    857   // interruptions (e.g. debug break and preemption) here, so the "real stack
    858   // limit" is checked.
    859   Label okay;
    860   __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
    861   // Make r2 the space we have left. The stack might already be overflowed
    862   // here which will cause r2 to become negative.
    863   __ sub(r2, sp, r2);
    864   // Check if the arguments will overflow the stack.
    865   if (argc_is_tagged == kArgcIsSmiTagged) {
    866     __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
    867   } else {
    868     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    869     __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
    870   }
    871   __ b(gt, &okay);  // Signed comparison.
    872 
    873   // Out of stack space.
    874   __ CallRuntime(Runtime::kThrowStackOverflow);
    875 
    876   __ bind(&okay);
    877 }
    878 
    879 
    880 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    881                                              bool is_construct) {
    882   // Called from Generate_JS_Entry
    883   // r0: new.target
    884   // r1: function
    885   // r2: receiver
    886   // r3: argc
    887   // r4: argv
    888   // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
    889   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    890 
    891   // Enter an internal frame.
    892   {
    893     FrameScope scope(masm, StackFrame::INTERNAL);
    894 
    895     // Setup the context (we need to use the caller context from the isolate).
    896     ExternalReference context_address(Isolate::kContextAddress,
    897                                       masm->isolate());
    898     __ mov(cp, Operand(context_address));
    899     __ ldr(cp, MemOperand(cp));
    900 
    901     __ InitializeRootRegister();
    902 
    903     // Push the function and the receiver onto the stack.
    904     __ Push(r1, r2);
    905 
    906     // Check if we have enough stack space to push all arguments.
    907     // Clobbers r2.
    908     Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
    909 
    910     // Remember new.target.
    911     __ mov(r5, r0);
    912 
    913     // Copy arguments to the stack in a loop.
    914     // r1: function
    915     // r3: argc
    916     // r4: argv, i.e. points to first arg
    917     Label loop, entry;
    918     __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
    919     // r2 points past last arg.
    920     __ b(&entry);
    921     __ bind(&loop);
    922     __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
    923     __ ldr(r0, MemOperand(r0));  // dereference handle
    924     __ push(r0);  // push parameter
    925     __ bind(&entry);
    926     __ cmp(r4, r2);
    927     __ b(ne, &loop);
    928 
    929     // Setup new.target and argc.
    930     __ mov(r0, Operand(r3));
    931     __ mov(r3, Operand(r5));
    932 
    933     // Initialize all JavaScript callee-saved registers, since they will be seen
    934     // by the garbage collector as part of handlers.
    935     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
    936     __ mov(r5, Operand(r4));
    937     __ mov(r6, Operand(r4));
    938     if (!FLAG_enable_embedded_constant_pool) {
    939       __ mov(r8, Operand(r4));
    940     }
    941     if (kR9Available == 1) {
    942       __ mov(r9, Operand(r4));
    943     }
    944 
    945     // Invoke the code.
    946     Handle<Code> builtin = is_construct
    947                                ? masm->isolate()->builtins()->Construct()
    948                                : masm->isolate()->builtins()->Call();
    949     __ Call(builtin, RelocInfo::CODE_TARGET);
    950 
    951     // Exit the JS frame and remove the parameters (except function), and
    952     // return.
    953     // Respect ABI stack constraint.
    954   }
    955   __ Jump(lr);
    956 
    957   // r0: result
    958 }
    959 
    960 
    961 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    962   Generate_JSEntryTrampolineHelper(masm, false);
    963 }
    964 
    965 
    966 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    967   Generate_JSEntryTrampolineHelper(masm, true);
    968 }
    969 
    970 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
    971   Register args_count = scratch;
    972 
    973   // Get the arguments + receiver count.
    974   __ ldr(args_count,
    975          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    976   __ ldr(args_count,
    977          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
    978 
    979   // Leave the frame (also dropping the register file).
    980   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
    981 
    982   // Drop receiver + arguments.
    983   __ add(sp, sp, args_count, LeaveCC);
    984 }
    985 
    986 // Generate code for entering a JS function with the interpreter.
    987 // On entry to the function the receiver and arguments have been pushed on the
    988 // stack left to right.  The actual argument count matches the formal parameter
    989 // count expected by the function.
    990 //
    991 // The live registers are:
    992 //   o r1: the JS function object being called.
    993 //   o r3: the new target
    994 //   o cp: our context
    995 //   o pp: the caller's constant pool pointer (if enabled)
    996 //   o fp: the caller's frame pointer
    997 //   o sp: stack pointer
    998 //   o lr: return address
    999 //
   1000 // The function builds an interpreter frame.  See InterpreterFrameConstants in
   1001 // frames.h for its layout.
   1002 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
   1003   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1004 
   1005   // Open a frame scope to indicate that there is a frame on the stack.  The
   1006   // MANUAL indicates that the scope shouldn't actually generate code to set up
   1007   // the frame (that is done below).
   1008   FrameScope frame_scope(masm, StackFrame::MANUAL);
   1009   __ PushStandardFrame(r1);
   1010 
   1011   // Get the bytecode array from the function object (or from the DebugInfo if
   1012   // it is present) and load it into kInterpreterBytecodeArrayRegister.
   1013   __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1014   Register debug_info = kInterpreterBytecodeArrayRegister;
   1015   DCHECK(!debug_info.is(r0));
   1016   __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
   1017   __ cmp(debug_info, Operand(DebugInfo::uninitialized()));
   1018   // Load original bytecode array or the debug copy.
   1019   __ ldr(kInterpreterBytecodeArrayRegister,
   1020          FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
   1021   __ ldr(kInterpreterBytecodeArrayRegister,
   1022          FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex), ne);
   1023 
   1024   // Check function data field is actually a BytecodeArray object.
   1025   Label bytecode_array_not_present;
   1026   __ CompareRoot(kInterpreterBytecodeArrayRegister,
   1027                  Heap::kUndefinedValueRootIndex);
   1028   __ b(eq, &bytecode_array_not_present);
   1029   if (FLAG_debug_code) {
   1030     __ SmiTst(kInterpreterBytecodeArrayRegister);
   1031     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1032     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
   1033                          BYTECODE_ARRAY_TYPE);
   1034     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1035   }
   1036 
   1037   // Load the initial bytecode offset.
   1038   __ mov(kInterpreterBytecodeOffsetRegister,
   1039          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
   1040 
   1041   // Push new.target, bytecode array and Smi tagged bytecode array offset.
   1042   __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
   1043   __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
   1044 
   1045   // Allocate the local and temporary register file on the stack.
   1046   {
   1047     // Load frame size from the BytecodeArray object.
   1048     __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1049                                BytecodeArray::kFrameSizeOffset));
   1050 
   1051     // Do a stack check to ensure we don't go over the limit.
   1052     Label ok;
   1053     __ sub(r9, sp, Operand(r4));
   1054     __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
   1055     __ cmp(r9, Operand(r2));
   1056     __ b(hs, &ok);
   1057     __ CallRuntime(Runtime::kThrowStackOverflow);
   1058     __ bind(&ok);
   1059 
   1060     // If ok, push undefined as the initial value for all register file entries.
   1061     Label loop_header;
   1062     Label loop_check;
   1063     __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
   1064     __ b(&loop_check, al);
   1065     __ bind(&loop_header);
   1066     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
   1067     __ push(r9);
   1068     // Continue loop if not done.
   1069     __ bind(&loop_check);
   1070     __ sub(r4, r4, Operand(kPointerSize), SetCC);
   1071     __ b(&loop_header, ge);
   1072   }
   1073 
   1074   // Load accumulator and dispatch table into registers.
   1075   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
   1076   __ mov(kInterpreterDispatchTableRegister,
   1077          Operand(ExternalReference::interpreter_dispatch_table_address(
   1078              masm->isolate())));
   1079 
   1080   // Dispatch to the first bytecode handler for the function.
   1081   __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
   1082                          kInterpreterBytecodeOffsetRegister));
   1083   __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
   1084                         kPointerSizeLog2));
   1085   __ Call(ip);
   1086   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
   1087 
   1088   // The return value is in r0.
   1089   LeaveInterpreterFrame(masm, r2);
   1090   __ Jump(lr);
   1091 
   1092   // If the bytecode array is no longer present, then the underlying function
   1093   // has been switched to a different kind of code and we heal the closure by
   1094   // switching the code entry field over to the new code object as well.
   1095   __ bind(&bytecode_array_not_present);
   1096   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
   1097   __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1098   __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kCodeOffset));
   1099   __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1100   __ str(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   1101   __ RecordWriteCodeEntryField(r1, r4, r5);
   1102   __ Jump(r4);
   1103 }
   1104 
   1105 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
   1106   // Save the function and context for call to CompileBaseline.
   1107   __ ldr(r1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
   1108   __ ldr(kContextRegister,
   1109          MemOperand(fp, StandardFrameConstants::kContextOffset));
   1110 
   1111   // Leave the frame before recompiling for baseline so that we don't count as
   1112   // an activation on the stack.
   1113   LeaveInterpreterFrame(masm, r2);
   1114 
   1115   {
   1116     FrameScope frame_scope(masm, StackFrame::INTERNAL);
   1117     // Push return value.
   1118     __ push(r0);
   1119 
   1120     // Push function as argument and compile for baseline.
   1121     __ push(r1);
   1122     __ CallRuntime(Runtime::kCompileBaseline);
   1123 
   1124     // Restore return value.
   1125     __ pop(r0);
   1126   }
   1127   __ Jump(lr);
   1128 }
   1129 
   1130 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
   1131                                          Register limit, Register scratch) {
   1132   Label loop_header, loop_check;
   1133   __ b(al, &loop_check);
   1134   __ bind(&loop_header);
   1135   __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
   1136   __ push(scratch);
   1137   __ bind(&loop_check);
   1138   __ cmp(index, limit);
   1139   __ b(gt, &loop_header);
   1140 }
   1141 
   1142 // static
   1143 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
   1144     MacroAssembler* masm, TailCallMode tail_call_mode) {
   1145   // ----------- S t a t e -------------
   1146   //  -- r0 : the number of arguments (not including the receiver)
   1147   //  -- r2 : the address of the first argument to be pushed. Subsequent
   1148   //          arguments should be consecutive above this, in the same order as
   1149   //          they are to be pushed onto the stack.
   1150   //  -- r1 : the target to call (can be any Object).
   1151   // -----------------------------------
   1152 
   1153   // Find the address of the last argument.
   1154   __ add(r3, r0, Operand(1));  // Add one for receiver.
   1155   __ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
   1156   __ sub(r3, r2, r3);
   1157 
   1158   // Push the arguments.
   1159   Generate_InterpreterPushArgs(masm, r2, r3, r4);
   1160 
   1161   // Call the target.
   1162   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   1163                                             tail_call_mode),
   1164           RelocInfo::CODE_TARGET);
   1165 }
   1166 
   1167 // static
   1168 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
   1169   // ----------- S t a t e -------------
   1170   // -- r0 : argument count (not including receiver)
   1171   // -- r3 : new target
   1172   // -- r1 : constructor to call
   1173   // -- r2 : address of the first argument
   1174   // -----------------------------------
   1175 
   1176   // Find the address of the last argument.
   1177   __ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
   1178   __ sub(r4, r2, r4);
   1179 
   1180   // Push a slot for the receiver to be constructed.
   1181   __ mov(ip, Operand::Zero());
   1182   __ push(ip);
   1183 
   1184   // Push the arguments.
   1185   Generate_InterpreterPushArgs(masm, r2, r4, r5);
   1186 
   1187   // Call the constructor with r0, r1, and r3 unmodified.
   1188   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1189 }
   1190 
   1191 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1192   // Set the return address to the correct point in the interpreter entry
   1193   // trampoline.
   1194   Smi* interpreter_entry_return_pc_offset(
   1195       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
   1196   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
   1197   __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline());
   1198   __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() +
   1199                          Code::kHeaderSize - kHeapObjectTag));
   1200 
   1201   // Initialize the dispatch table register.
   1202   __ mov(kInterpreterDispatchTableRegister,
   1203          Operand(ExternalReference::interpreter_dispatch_table_address(
   1204              masm->isolate())));
   1205 
   1206   // Get the bytecode array pointer from the frame.
   1207   __ ldr(kInterpreterBytecodeArrayRegister,
   1208          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1209 
   1210   if (FLAG_debug_code) {
   1211     // Check function data field is actually a BytecodeArray object.
   1212     __ SmiTst(kInterpreterBytecodeArrayRegister);
   1213     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1214     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
   1215                          BYTECODE_ARRAY_TYPE);
   1216     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1217   }
   1218 
   1219   // Get the target bytecode offset from the frame.
   1220   __ ldr(kInterpreterBytecodeOffsetRegister,
   1221          MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1222   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
   1223 
   1224   // Dispatch to the target bytecode.
   1225   __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
   1226                          kInterpreterBytecodeOffsetRegister));
   1227   __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
   1228                         kPointerSizeLog2));
   1229   __ mov(pc, ip);
   1230 }
   1231 
   1232 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1233   // ----------- S t a t e -------------
   1234   //  -- r0 : argument count (preserved for callee)
   1235   //  -- r3 : new target (preserved for callee)
   1236   //  -- r1 : target function (preserved for callee)
   1237   // -----------------------------------
   1238   // First lookup code, maybe we don't need to compile!
   1239   Label gotta_call_runtime, gotta_call_runtime_no_stack;
   1240   Label maybe_call_runtime;
   1241   Label try_shared;
   1242   Label loop_top, loop_bottom;
   1243 
   1244   Register argument_count = r0;
   1245   Register closure = r1;
   1246   Register new_target = r3;
   1247   __ push(argument_count);
   1248   __ push(new_target);
   1249   __ push(closure);
   1250 
   1251   Register map = argument_count;
   1252   Register index = r2;
   1253   __ ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1254   __ ldr(map,
   1255          FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
   1256   __ ldr(index, FieldMemOperand(map, FixedArray::kLengthOffset));
   1257   __ cmp(index, Operand(Smi::FromInt(2)));
   1258   __ b(lt, &gotta_call_runtime);
   1259 
   1260   // Find literals.
   1261   // r3  : native context
   1262   // r2  : length / index
   1263   // r0  : optimized code map
   1264   // stack[0] : new target
   1265   // stack[4] : closure
   1266   Register native_context = r3;
   1267   __ ldr(native_context, NativeContextMemOperand());
   1268 
   1269   __ bind(&loop_top);
   1270   Register temp = r1;
   1271   Register array_pointer = r5;
   1272 
   1273   // Does the native context match?
   1274   __ add(array_pointer, map, Operand::PointerOffsetFromSmiKey(index));
   1275   __ ldr(temp, FieldMemOperand(array_pointer,
   1276                                SharedFunctionInfo::kOffsetToPreviousContext));
   1277   __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1278   __ cmp(temp, native_context);
   1279   __ b(ne, &loop_bottom);
   1280   // OSR id set to none?
   1281   __ ldr(temp, FieldMemOperand(array_pointer,
   1282                                SharedFunctionInfo::kOffsetToPreviousOsrAstId));
   1283   const int bailout_id = BailoutId::None().ToInt();
   1284   __ cmp(temp, Operand(Smi::FromInt(bailout_id)));
   1285   __ b(ne, &loop_bottom);
   1286 
   1287   // Literals available?
   1288   Label got_literals, maybe_cleared_weakcell;
   1289   __ ldr(temp, FieldMemOperand(array_pointer,
   1290                                SharedFunctionInfo::kOffsetToPreviousLiterals));
   1291   // temp contains either a WeakCell pointing to the literals array or the
   1292   // literals array directly.
   1293   STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
   1294   __ ldr(r4, FieldMemOperand(temp, WeakCell::kValueOffset));
   1295   __ JumpIfSmi(r4, &maybe_cleared_weakcell);
   1296   // r4 is a pointer, therefore temp is a WeakCell pointing to a literals array.
   1297   __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1298   __ jmp(&got_literals);
   1299 
   1300   // r4 is a smi. If it's 0, then we are looking at a cleared WeakCell
   1301   // around the literals array, and we should visit the runtime. If it's > 0,
   1302   // then temp already contains the literals array.
   1303   __ bind(&maybe_cleared_weakcell);
   1304   __ cmp(r4, Operand(Smi::FromInt(0)));
   1305   __ b(eq, &gotta_call_runtime);
   1306 
   1307   // Save the literals in the closure.
   1308   __ bind(&got_literals);
   1309   __ ldr(r4, MemOperand(sp, 0));
   1310   __ str(temp, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
   1311   __ push(index);
   1312   __ RecordWriteField(r4, JSFunction::kLiteralsOffset, temp, index,
   1313                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1314                       OMIT_SMI_CHECK);
   1315   __ pop(index);
   1316 
   1317   // Code available?
   1318   Register entry = r4;
   1319   __ ldr(entry,
   1320          FieldMemOperand(array_pointer,
   1321                          SharedFunctionInfo::kOffsetToPreviousCachedCode));
   1322   __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1323   __ JumpIfSmi(entry, &maybe_call_runtime);
   1324 
   1325   // Found literals and code. Get them into the closure and return.
   1326   __ pop(closure);
   1327   // Store code entry in the closure.
   1328   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1329 
   1330   Label install_optimized_code_and_tailcall;
   1331   __ bind(&install_optimized_code_and_tailcall);
   1332   __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1333   __ RecordWriteCodeEntryField(closure, entry, r5);
   1334 
   1335   // Link the closure into the optimized function list.
   1336   // r4 : code entry
   1337   // r3 : native context
   1338   // r1 : closure
   1339   __ ldr(r5,
   1340          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1341   __ str(r5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
   1342   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r5, r0,
   1343                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1344                       OMIT_SMI_CHECK);
   1345   const int function_list_offset =
   1346       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1347   __ str(closure,
   1348          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1349   // Save closure before the write barrier.
   1350   __ mov(r5, closure);
   1351   __ RecordWriteContextSlot(native_context, function_list_offset, closure, r0,
   1352                             kLRHasNotBeenSaved, kDontSaveFPRegs);
   1353   __ mov(closure, r5);
   1354   __ pop(new_target);
   1355   __ pop(argument_count);
   1356   __ Jump(entry);
   1357 
   1358   __ bind(&loop_bottom);
   1359   __ sub(index, index, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
   1360   __ cmp(index, Operand(Smi::FromInt(1)));
   1361   __ b(gt, &loop_top);
   1362 
   1363   // We found neither literals nor code.
   1364   __ jmp(&gotta_call_runtime);
   1365 
   1366   __ bind(&maybe_call_runtime);
   1367   __ pop(closure);
   1368 
   1369   // Last possibility. Check the context free optimized code map entry.
   1370   __ ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
   1371                                          SharedFunctionInfo::kSharedCodeIndex));
   1372   __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1373   __ JumpIfSmi(entry, &try_shared);
   1374 
   1375   // Store code entry in the closure.
   1376   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1377   __ jmp(&install_optimized_code_and_tailcall);
   1378 
   1379   __ bind(&try_shared);
   1380   __ pop(new_target);
   1381   __ pop(argument_count);
   1382   // Is the full code valid?
   1383   __ ldr(entry,
   1384          FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1385   __ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
   1386   __ ldr(r5, FieldMemOperand(entry, Code::kFlagsOffset));
   1387   __ and_(r5, r5, Operand(Code::KindField::kMask));
   1388   __ mov(r5, Operand(r5, LSR, Code::KindField::kShift));
   1389   __ cmp(r5, Operand(Code::BUILTIN));
   1390   __ b(eq, &gotta_call_runtime_no_stack);
   1391   // Yes, install the full code.
   1392   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1393   __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1394   __ RecordWriteCodeEntryField(closure, entry, r5);
   1395   __ Jump(entry);
   1396 
   1397   __ bind(&gotta_call_runtime);
   1398   __ pop(closure);
   1399   __ pop(new_target);
   1400   __ pop(argument_count);
   1401   __ bind(&gotta_call_runtime_no_stack);
   1402   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1403 }
   1404 
   1405 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1406   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1407 }
   1408 
   1409 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1410   GenerateTailCallToReturnedCode(masm,
   1411                                  Runtime::kCompileOptimized_NotConcurrent);
   1412 }
   1413 
   1414 
   1415 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1416   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1417 }
   1418 
   1419 
   1420 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1421   // For now, we are relying on the fact that make_code_young doesn't do any
   1422   // garbage collection which allows us to save/restore the registers without
   1423   // worrying about which of them contain pointers. We also don't build an
   1424   // internal frame to make the code faster, since we shouldn't have to do stack
   1425   // crawls in MakeCodeYoung. This seems a bit fragile.
   1426 
   1427   // The following registers must be saved and restored when calling through to
   1428   // the runtime:
   1429   //   r0 - contains return address (beginning of patch sequence)
   1430   //   r1 - isolate
   1431   //   r3 - new target
   1432   FrameScope scope(masm, StackFrame::MANUAL);
   1433   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1434   __ PrepareCallCFunction(2, 0, r2);
   1435   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1436   __ CallCFunction(
   1437       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1438   __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1439   __ mov(pc, r0);
   1440 }
   1441 
   1442 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
   1443 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
   1444     MacroAssembler* masm) {                                  \
   1445   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1446 }                                                            \
   1447 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
   1448     MacroAssembler* masm) {                                  \
   1449   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1450 }
   1451 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1452 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1453 
   1454 
   1455 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1456   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1457   // that make_code_young doesn't do any garbage collection which allows us to
   1458   // save/restore the registers without worrying about which of them contain
   1459   // pointers.
   1460 
   1461   // The following registers must be saved and restored when calling through to
   1462   // the runtime:
   1463   //   r0 - contains return address (beginning of patch sequence)
   1464   //   r1 - isolate
   1465   //   r3 - new target
   1466   FrameScope scope(masm, StackFrame::MANUAL);
   1467   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1468   __ PrepareCallCFunction(2, 0, r2);
   1469   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1470   __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
   1471         masm->isolate()), 2);
   1472   __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
   1473 
   1474   // Perform prologue operations usually performed by the young code stub.
   1475   __ PushStandardFrame(r1);
   1476 
   1477   // Jump to point after the code-age stub.
   1478   __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
   1479   __ mov(pc, r0);
   1480 }
   1481 
   1482 
   1483 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1484   GenerateMakeCodeYoungAgainCommon(masm);
   1485 }
   1486 
   1487 
   1488 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1489   Generate_MarkCodeAsExecutedOnce(masm);
   1490 }
   1491 
   1492 
   1493 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1494                                              SaveFPRegsMode save_doubles) {
   1495   {
   1496     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1497 
   1498     // Preserve registers across notification, this is important for compiled
   1499     // stubs that tail call the runtime on deopts passing their parameters in
   1500     // registers.
   1501     __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
   1502     // Pass the function and deoptimization type to the runtime system.
   1503     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1504     __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
   1505   }
   1506 
   1507   __ add(sp, sp, Operand(kPointerSize));  // Ignore state
   1508   __ mov(pc, lr);  // Jump to miss handler
   1509 }
   1510 
   1511 
   1512 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1513   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1514 }
   1515 
   1516 
   1517 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1518   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1519 }
   1520 
   1521 
   1522 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1523                                              Deoptimizer::BailoutType type) {
   1524   {
   1525     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1526     // Pass the function and deoptimization type to the runtime system.
   1527     __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
   1528     __ push(r0);
   1529     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1530   }
   1531 
   1532   // Get the full codegen state from the stack and untag it -> r6.
   1533   __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
   1534   __ SmiUntag(r6);
   1535   // Switch on the state.
   1536   Label with_tos_register, unknown_state;
   1537   __ cmp(r6,
   1538          Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
   1539   __ b(ne, &with_tos_register);
   1540   __ add(sp, sp, Operand(1 * kPointerSize));  // Remove state.
   1541   __ Ret();
   1542 
   1543   __ bind(&with_tos_register);
   1544   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
   1545   __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
   1546   __ cmp(r6,
   1547          Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
   1548   __ b(ne, &unknown_state);
   1549   __ add(sp, sp, Operand(2 * kPointerSize));  // Remove state.
   1550   __ Ret();
   1551 
   1552   __ bind(&unknown_state);
   1553   __ stop("no cases left");
   1554 }
   1555 
   1556 
   1557 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1558   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1559 }
   1560 
   1561 
   1562 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1563   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1564 }
   1565 
   1566 
   1567 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1568   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1569 }
   1570 
   1571 
   1572 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   1573                                     Register function_template_info,
   1574                                     Register scratch0, Register scratch1,
   1575                                     Register scratch2,
   1576                                     Label* receiver_check_failed) {
   1577   Register signature = scratch0;
   1578   Register map = scratch1;
   1579   Register constructor = scratch2;
   1580 
   1581   // If there is no signature, return the holder.
   1582   __ ldr(signature, FieldMemOperand(function_template_info,
   1583                                     FunctionTemplateInfo::kSignatureOffset));
   1584   __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
   1585   Label receiver_check_passed;
   1586   __ b(eq, &receiver_check_passed);
   1587 
   1588   // Walk the prototype chain.
   1589   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1590   Label prototype_loop_start;
   1591   __ bind(&prototype_loop_start);
   1592 
   1593   // Get the constructor, if any.
   1594   __ GetMapConstructor(constructor, map, ip, ip);
   1595   __ cmp(ip, Operand(JS_FUNCTION_TYPE));
   1596   Label next_prototype;
   1597   __ b(ne, &next_prototype);
   1598   Register type = constructor;
   1599   __ ldr(type,
   1600          FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   1601   __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   1602 
   1603   // Loop through the chain of inheriting function templates.
   1604   Label function_template_loop;
   1605   __ bind(&function_template_loop);
   1606 
   1607   // If the signatures match, we have a compatible receiver.
   1608   __ cmp(signature, type);
   1609   __ b(eq, &receiver_check_passed);
   1610 
   1611   // If the current type is not a FunctionTemplateInfo, load the next prototype
   1612   // in the chain.
   1613   __ JumpIfSmi(type, &next_prototype);
   1614   __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
   1615 
   1616   // Otherwise load the parent function template and iterate.
   1617   __ ldr(type,
   1618          FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
   1619          eq);
   1620   __ b(&function_template_loop, eq);
   1621 
   1622   // Load the next prototype.
   1623   __ bind(&next_prototype);
   1624   __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
   1625   __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
   1626   __ b(eq, receiver_check_failed);
   1627   __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
   1628   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1629   // Iterate.
   1630   __ b(&prototype_loop_start);
   1631 
   1632   __ bind(&receiver_check_passed);
   1633 }
   1634 
   1635 
   1636 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   1637   // ----------- S t a t e -------------
   1638   //  -- r0                 : number of arguments excluding receiver
   1639   //  -- r1                 : callee
   1640   //  -- lr                 : return address
   1641   //  -- sp[0]              : last argument
   1642   //  -- ...
   1643   //  -- sp[4 * (argc - 1)] : first argument
   1644   //  -- sp[4 * argc]       : receiver
   1645   // -----------------------------------
   1646 
   1647   // Load the FunctionTemplateInfo.
   1648   __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1649   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
   1650 
   1651   // Do the compatible receiver check.
   1652   Label receiver_check_failed;
   1653   __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   1654   CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
   1655 
   1656   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   1657   // beginning of the code.
   1658   __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
   1659   __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
   1660   __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1661   __ Jump(r4);
   1662 
   1663   // Compatible receiver check failed: throw an Illegal Invocation exception.
   1664   __ bind(&receiver_check_failed);
   1665   // Drop the arguments (including the receiver)
   1666   __ add(r0, r0, Operand(1));
   1667   __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   1668   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   1669 }
   1670 
   1671 
   1672 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1673   // Lookup the function in the JavaScript frame.
   1674   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1675   {
   1676     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1677     // Pass function as argument.
   1678     __ push(r0);
   1679     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1680   }
   1681 
   1682   // If the code object is null, just return to the unoptimized code.
   1683   Label skip;
   1684   __ cmp(r0, Operand(Smi::FromInt(0)));
   1685   __ b(ne, &skip);
   1686   __ Ret();
   1687 
   1688   __ bind(&skip);
   1689 
   1690   // Load deoptimization data from the code object.
   1691   // <deopt_data> = <code>[#deoptimization_data_offset]
   1692   __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
   1693 
   1694   { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
   1695     __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
   1696 
   1697     if (FLAG_enable_embedded_constant_pool) {
   1698       __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
   1699     }
   1700 
   1701     // Load the OSR entrypoint offset from the deoptimization data.
   1702     // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1703     __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
   1704         DeoptimizationInputData::kOsrPcOffsetIndex)));
   1705 
   1706     // Compute the target address = code start + osr_offset
   1707     __ add(lr, r0, Operand::SmiUntag(r1));
   1708 
   1709     // And "return" to the OSR entry point of the function.
   1710     __ Ret();
   1711   }
   1712 }
   1713 
   1714 
   1715 // static
   1716 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
   1717                                                int field_index) {
   1718   // ----------- S t a t e -------------
   1719   //  -- r0    : number of arguments
   1720   //  -- r1    : function
   1721   //  -- cp    : context
   1722   //  -- lr    : return address
   1723   //  -- sp[0] : receiver
   1724   // -----------------------------------
   1725 
   1726   // 1. Pop receiver into r0 and check that it's actually a JSDate object.
   1727   Label receiver_not_date;
   1728   {
   1729     __ Pop(r0);
   1730     __ JumpIfSmi(r0, &receiver_not_date);
   1731     __ CompareObjectType(r0, r2, r3, JS_DATE_TYPE);
   1732     __ b(ne, &receiver_not_date);
   1733   }
   1734 
   1735   // 2. Load the specified date field, falling back to the runtime as necessary.
   1736   if (field_index == JSDate::kDateValue) {
   1737     __ ldr(r0, FieldMemOperand(r0, JSDate::kValueOffset));
   1738   } else {
   1739     if (field_index < JSDate::kFirstUncachedField) {
   1740       Label stamp_mismatch;
   1741       __ mov(r1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
   1742       __ ldr(r1, MemOperand(r1));
   1743       __ ldr(ip, FieldMemOperand(r0, JSDate::kCacheStampOffset));
   1744       __ cmp(r1, ip);
   1745       __ b(ne, &stamp_mismatch);
   1746       __ ldr(r0, FieldMemOperand(
   1747                      r0, JSDate::kValueOffset + field_index * kPointerSize));
   1748       __ Ret();
   1749       __ bind(&stamp_mismatch);
   1750     }
   1751     FrameScope scope(masm, StackFrame::INTERNAL);
   1752     __ PrepareCallCFunction(2, r1);
   1753     __ mov(r1, Operand(Smi::FromInt(field_index)));
   1754     __ CallCFunction(
   1755         ExternalReference::get_date_field_function(masm->isolate()), 2);
   1756   }
   1757   __ Ret();
   1758 
   1759   // 3. Raise a TypeError if the receiver is not a date.
   1760   __ bind(&receiver_not_date);
   1761   {
   1762     FrameScope scope(masm, StackFrame::MANUAL);
   1763     __ Push(r0, lr, fp);
   1764     __ Move(fp, sp);
   1765     __ Push(cp, r1);
   1766     __ Push(Smi::FromInt(0));
   1767     __ CallRuntime(Runtime::kThrowNotDateError);
   1768   }
   1769 }
   1770 
   1771 // static
   1772 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1773   // ----------- S t a t e -------------
   1774   //  -- r0    : argc
   1775   //  -- sp[0] : argArray
   1776   //  -- sp[4] : thisArg
   1777   //  -- sp[8] : receiver
   1778   // -----------------------------------
   1779 
   1780   // 1. Load receiver into r1, argArray into r0 (if present), remove all
   1781   // arguments from the stack (including the receiver), and push thisArg (if
   1782   // present) instead.
   1783   {
   1784     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   1785     __ mov(r3, r2);
   1786     __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));  // receiver
   1787     __ sub(r4, r0, Operand(1), SetCC);
   1788     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // thisArg
   1789     __ sub(r4, r4, Operand(1), SetCC, ge);
   1790     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argArray
   1791     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   1792     __ str(r2, MemOperand(sp, 0));
   1793     __ mov(r0, r3);
   1794   }
   1795 
   1796   // ----------- S t a t e -------------
   1797   //  -- r0    : argArray
   1798   //  -- r1    : receiver
   1799   //  -- sp[0] : thisArg
   1800   // -----------------------------------
   1801 
   1802   // 2. Make sure the receiver is actually callable.
   1803   Label receiver_not_callable;
   1804   __ JumpIfSmi(r1, &receiver_not_callable);
   1805   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   1806   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   1807   __ tst(r4, Operand(1 << Map::kIsCallable));
   1808   __ b(eq, &receiver_not_callable);
   1809 
   1810   // 3. Tail call with no arguments if argArray is null or undefined.
   1811   Label no_arguments;
   1812   __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
   1813   __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
   1814 
   1815   // 4a. Apply the receiver to the given argArray (passing undefined for
   1816   // new.target).
   1817   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   1818   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1819 
   1820   // 4b. The argArray is either null or undefined, so we tail call without any
   1821   // arguments to the receiver.
   1822   __ bind(&no_arguments);
   1823   {
   1824     __ mov(r0, Operand(0));
   1825     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1826   }
   1827 
   1828   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1829   __ bind(&receiver_not_callable);
   1830   {
   1831     __ str(r1, MemOperand(sp, 0));
   1832     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1833   }
   1834 }
   1835 
   1836 
   1837 // static
   1838 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1839   // 1. Make sure we have at least one argument.
   1840   // r0: actual number of arguments
   1841   {
   1842     Label done;
   1843     __ cmp(r0, Operand::Zero());
   1844     __ b(ne, &done);
   1845     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1846     __ add(r0, r0, Operand(1));
   1847     __ bind(&done);
   1848   }
   1849 
   1850   // 2. Get the callable to call (passed as receiver) from the stack.
   1851   // r0: actual number of arguments
   1852   __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   1853 
   1854   // 3. Shift arguments and return address one slot down on the stack
   1855   //    (overwriting the original receiver).  Adjust argument count to make
   1856   //    the original first argument the new receiver.
   1857   // r0: actual number of arguments
   1858   // r1: callable
   1859   {
   1860     Label loop;
   1861     // Calculate the copy start address (destination). Copy end address is sp.
   1862     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
   1863 
   1864     __ bind(&loop);
   1865     __ ldr(ip, MemOperand(r2, -kPointerSize));
   1866     __ str(ip, MemOperand(r2));
   1867     __ sub(r2, r2, Operand(kPointerSize));
   1868     __ cmp(r2, sp);
   1869     __ b(ne, &loop);
   1870     // Adjust the actual number of arguments and remove the top element
   1871     // (which is a copy of the last argument).
   1872     __ sub(r0, r0, Operand(1));
   1873     __ pop();
   1874   }
   1875 
   1876   // 4. Call the callable.
   1877   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1878 }
   1879 
   1880 
   1881 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1882   // ----------- S t a t e -------------
   1883   //  -- r0     : argc
   1884   //  -- sp[0]  : argumentsList
   1885   //  -- sp[4]  : thisArgument
   1886   //  -- sp[8]  : target
   1887   //  -- sp[12] : receiver
   1888   // -----------------------------------
   1889 
   1890   // 1. Load target into r1 (if present), argumentsList into r0 (if present),
   1891   // remove all arguments from the stack (including the receiver), and push
   1892   // thisArgument (if present) instead.
   1893   {
   1894     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   1895     __ mov(r2, r1);
   1896     __ mov(r3, r1);
   1897     __ sub(r4, r0, Operand(1), SetCC);
   1898     __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // target
   1899     __ sub(r4, r4, Operand(1), SetCC, ge);
   1900     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // thisArgument
   1901     __ sub(r4, r4, Operand(1), SetCC, ge);
   1902     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argumentsList
   1903     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   1904     __ str(r2, MemOperand(sp, 0));
   1905     __ mov(r0, r3);
   1906   }
   1907 
   1908   // ----------- S t a t e -------------
   1909   //  -- r0    : argumentsList
   1910   //  -- r1    : target
   1911   //  -- sp[0] : thisArgument
   1912   // -----------------------------------
   1913 
   1914   // 2. Make sure the target is actually callable.
   1915   Label target_not_callable;
   1916   __ JumpIfSmi(r1, &target_not_callable);
   1917   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   1918   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   1919   __ tst(r4, Operand(1 << Map::kIsCallable));
   1920   __ b(eq, &target_not_callable);
   1921 
   1922   // 3a. Apply the target to the given argumentsList (passing undefined for
   1923   // new.target).
   1924   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   1925   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1926 
   1927   // 3b. The target is not callable, throw an appropriate TypeError.
   1928   __ bind(&target_not_callable);
   1929   {
   1930     __ str(r1, MemOperand(sp, 0));
   1931     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1932   }
   1933 }
   1934 
   1935 
   1936 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1937   // ----------- S t a t e -------------
   1938   //  -- r0     : argc
   1939   //  -- sp[0]  : new.target (optional)
   1940   //  -- sp[4]  : argumentsList
   1941   //  -- sp[8]  : target
   1942   //  -- sp[12] : receiver
   1943   // -----------------------------------
   1944 
   1945   // 1. Load target into r1 (if present), argumentsList into r0 (if present),
   1946   // new.target into r3 (if present, otherwise use target), remove all
   1947   // arguments from the stack (including the receiver), and push thisArgument
   1948   // (if present) instead.
   1949   {
   1950     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   1951     __ mov(r2, r1);
   1952     __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));  // receiver
   1953     __ sub(r4, r0, Operand(1), SetCC);
   1954     __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // target
   1955     __ mov(r3, r1);  // new.target defaults to target
   1956     __ sub(r4, r4, Operand(1), SetCC, ge);
   1957     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argumentsList
   1958     __ sub(r4, r4, Operand(1), SetCC, ge);
   1959     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // new.target
   1960     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
   1961     __ mov(r0, r2);
   1962   }
   1963 
   1964   // ----------- S t a t e -------------
   1965   //  -- r0    : argumentsList
   1966   //  -- r3    : new.target
   1967   //  -- r1    : target
   1968   //  -- sp[0] : receiver (undefined)
   1969   // -----------------------------------
   1970 
   1971   // 2. Make sure the target is actually a constructor.
   1972   Label target_not_constructor;
   1973   __ JumpIfSmi(r1, &target_not_constructor);
   1974   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   1975   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   1976   __ tst(r4, Operand(1 << Map::kIsConstructor));
   1977   __ b(eq, &target_not_constructor);
   1978 
   1979   // 3. Make sure the target is actually a constructor.
   1980   Label new_target_not_constructor;
   1981   __ JumpIfSmi(r3, &new_target_not_constructor);
   1982   __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
   1983   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   1984   __ tst(r4, Operand(1 << Map::kIsConstructor));
   1985   __ b(eq, &new_target_not_constructor);
   1986 
   1987   // 4a. Construct the target with the given new.target and argumentsList.
   1988   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1989 
   1990   // 4b. The target is not a constructor, throw an appropriate TypeError.
   1991   __ bind(&target_not_constructor);
   1992   {
   1993     __ str(r1, MemOperand(sp, 0));
   1994     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   1995   }
   1996 
   1997   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   1998   __ bind(&new_target_not_constructor);
   1999   {
   2000     __ str(r3, MemOperand(sp, 0));
   2001     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   2002   }
   2003 }
   2004 
   2005 
   2006 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
   2007                                       Label* stack_overflow) {
   2008   // ----------- S t a t e -------------
   2009   //  -- r0 : actual number of arguments
   2010   //  -- r1 : function (passed through to callee)
   2011   //  -- r2 : expected number of arguments
   2012   //  -- r3 : new target (passed through to callee)
   2013   // -----------------------------------
   2014   // Check the stack for overflow. We are not trying to catch
   2015   // interruptions (e.g. debug break and preemption) here, so the "real stack
   2016   // limit" is checked.
   2017   __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
   2018   // Make r5 the space we have left. The stack might already be overflowed
   2019   // here which will cause r5 to become negative.
   2020   __ sub(r5, sp, r5);
   2021   // Check if the arguments will overflow the stack.
   2022   __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
   2023   __ b(le, stack_overflow);  // Signed comparison.
   2024 }
   2025 
   2026 
   2027 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2028   __ SmiTag(r0);
   2029   __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2030   __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
   2031                        (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
   2032                        fp.bit() | lr.bit());
   2033   __ add(fp, sp,
   2034          Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
   2035 }
   2036 
   2037 
   2038 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2039   // ----------- S t a t e -------------
   2040   //  -- r0 : result being passed through
   2041   // -----------------------------------
   2042   // Get the number of arguments passed (as a smi), tear down the frame and
   2043   // then tear down the parameters.
   2044   __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   2045                               kPointerSize)));
   2046 
   2047   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
   2048   __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
   2049   __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
   2050 }
   2051 
   2052 
   2053 // static
   2054 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2055   // ----------- S t a t e -------------
   2056   //  -- r0    : argumentsList
   2057   //  -- r1    : target
   2058   //  -- r3    : new.target (checked to be constructor or undefined)
   2059   //  -- sp[0] : thisArgument
   2060   // -----------------------------------
   2061 
   2062   // Create the list of arguments from the array-like argumentsList.
   2063   {
   2064     Label create_arguments, create_array, create_runtime, done_create;
   2065     __ JumpIfSmi(r0, &create_runtime);
   2066 
   2067     // Load the map of argumentsList into r2.
   2068     __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
   2069 
   2070     // Load native context into r4.
   2071     __ ldr(r4, NativeContextMemOperand());
   2072 
   2073     // Check if argumentsList is an (unmodified) arguments object.
   2074     __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2075     __ cmp(ip, r2);
   2076     __ b(eq, &create_arguments);
   2077     __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
   2078     __ cmp(ip, r2);
   2079     __ b(eq, &create_arguments);
   2080 
   2081     // Check if argumentsList is a fast JSArray.
   2082     __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
   2083     __ b(eq, &create_array);
   2084 
   2085     // Ask the runtime to create the list (actually a FixedArray).
   2086     __ bind(&create_runtime);
   2087     {
   2088       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2089       __ Push(r1, r3, r0);
   2090       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2091       __ Pop(r1, r3);
   2092       __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
   2093       __ SmiUntag(r2);
   2094     }
   2095     __ jmp(&done_create);
   2096 
   2097     // Try to create the list from an arguments object.
   2098     __ bind(&create_arguments);
   2099     __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
   2100     __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
   2101     __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
   2102     __ cmp(r2, ip);
   2103     __ b(ne, &create_runtime);
   2104     __ SmiUntag(r2);
   2105     __ mov(r0, r4);
   2106     __ b(&done_create);
   2107 
   2108     // Try to create the list from a JSArray object.
   2109     __ bind(&create_array);
   2110     __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset));
   2111     __ DecodeField<Map::ElementsKindBits>(r2);
   2112     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2113     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   2114     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2115     __ cmp(r2, Operand(FAST_ELEMENTS));
   2116     __ b(hi, &create_runtime);
   2117     __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS));
   2118     __ b(eq, &create_runtime);
   2119     __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
   2120     __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
   2121     __ SmiUntag(r2);
   2122 
   2123     __ bind(&done_create);
   2124   }
   2125 
   2126   // Check for stack overflow.
   2127   {
   2128     // Check the stack for overflow. We are not trying to catch interruptions
   2129     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2130     Label done;
   2131     __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
   2132     // Make ip the space we have left. The stack might already be overflowed
   2133     // here which will cause ip to become negative.
   2134     __ sub(ip, sp, ip);
   2135     // Check if the arguments will overflow the stack.
   2136     __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
   2137     __ b(gt, &done);  // Signed comparison.
   2138     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2139     __ bind(&done);
   2140   }
   2141 
   2142   // ----------- S t a t e -------------
   2143   //  -- r1    : target
   2144   //  -- r0    : args (a FixedArray built from argumentsList)
   2145   //  -- r2    : len (number of elements to push from args)
   2146   //  -- r3    : new.target (checked to be constructor or undefined)
   2147   //  -- sp[0] : thisArgument
   2148   // -----------------------------------
   2149 
   2150   // Push arguments onto the stack (thisArgument is already on the stack).
   2151   {
   2152     __ mov(r4, Operand(0));
   2153     Label done, loop;
   2154     __ bind(&loop);
   2155     __ cmp(r4, r2);
   2156     __ b(eq, &done);
   2157     __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
   2158     __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
   2159     __ Push(ip);
   2160     __ add(r4, r4, Operand(1));
   2161     __ b(&loop);
   2162     __ bind(&done);
   2163     __ Move(r0, r4);
   2164   }
   2165 
   2166   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2167   {
   2168     __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
   2169     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
   2170     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2171   }
   2172 }
   2173 
   2174 namespace {
   2175 
   2176 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2177 // present) preserving all the arguments prepared for current call.
   2178 // Does nothing if debugger is currently active.
   2179 // ES6 14.6.3. PrepareForTailCall
   2180 //
   2181 // Stack structure for the function g() tail calling f():
   2182 //
   2183 // ------- Caller frame: -------
   2184 // |  ...
   2185 // |  g()'s arg M
   2186 // |  ...
   2187 // |  g()'s arg 1
   2188 // |  g()'s receiver arg
   2189 // |  g()'s caller pc
   2190 // ------- g()'s frame: -------
   2191 // |  g()'s caller fp      <- fp
   2192 // |  g()'s context
   2193 // |  function pointer: g
   2194 // |  -------------------------
   2195 // |  ...
   2196 // |  ...
   2197 // |  f()'s arg N
   2198 // |  ...
   2199 // |  f()'s arg 1
   2200 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
   2201 // ----------------------
   2202 //
   2203 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2204                         Register scratch1, Register scratch2,
   2205                         Register scratch3) {
   2206   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2207   Comment cmnt(masm, "[ PrepareForTailCall");
   2208 
   2209   // Prepare for tail call only if ES2015 tail call elimination is enabled.
   2210   Label done;
   2211   ExternalReference is_tail_call_elimination_enabled =
   2212       ExternalReference::is_tail_call_elimination_enabled_address(
   2213           masm->isolate());
   2214   __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
   2215   __ ldrb(scratch1, MemOperand(scratch1));
   2216   __ cmp(scratch1, Operand(0));
   2217   __ b(eq, &done);
   2218 
   2219   // Drop possible interpreter handler/stub frame.
   2220   {
   2221     Label no_interpreter_frame;
   2222     __ ldr(scratch3,
   2223            MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
   2224     __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
   2225     __ b(ne, &no_interpreter_frame);
   2226     __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2227     __ bind(&no_interpreter_frame);
   2228   }
   2229 
   2230   // Check if next frame is an arguments adaptor frame.
   2231   Register caller_args_count_reg = scratch1;
   2232   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2233   __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2234   __ ldr(scratch3,
   2235          MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
   2236   __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2237   __ b(ne, &no_arguments_adaptor);
   2238 
   2239   // Drop current frame and load arguments count from arguments adaptor frame.
   2240   __ mov(fp, scratch2);
   2241   __ ldr(caller_args_count_reg,
   2242          MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2243   __ SmiUntag(caller_args_count_reg);
   2244   __ b(&formal_parameter_count_loaded);
   2245 
   2246   __ bind(&no_arguments_adaptor);
   2247   // Load caller's formal parameter count
   2248   __ ldr(scratch1,
   2249          MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
   2250   __ ldr(scratch1,
   2251          FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2252   __ ldr(caller_args_count_reg,
   2253          FieldMemOperand(scratch1,
   2254                          SharedFunctionInfo::kFormalParameterCountOffset));
   2255   __ SmiUntag(caller_args_count_reg);
   2256 
   2257   __ bind(&formal_parameter_count_loaded);
   2258 
   2259   ParameterCount callee_args_count(args_reg);
   2260   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2261                         scratch3);
   2262   __ bind(&done);
   2263 }
   2264 }  // namespace
   2265 
   2266 // static
   2267 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2268                                      ConvertReceiverMode mode,
   2269                                      TailCallMode tail_call_mode) {
   2270   // ----------- S t a t e -------------
   2271   //  -- r0 : the number of arguments (not including the receiver)
   2272   //  -- r1 : the function to call (checked to be a JSFunction)
   2273   // -----------------------------------
   2274   __ AssertFunction(r1);
   2275 
   2276   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2277   // Check that the function is not a "classConstructor".
   2278   Label class_constructor;
   2279   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   2280   __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
   2281   __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
   2282   __ b(ne, &class_constructor);
   2283 
   2284   // Enter the context of the function; ToObject has to run in the function
   2285   // context, and we also need to take the global proxy from the function
   2286   // context in case of conversion.
   2287   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
   2288                 SharedFunctionInfo::kStrictModeByteOffset);
   2289   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
   2290   // We need to convert the receiver for non-native sloppy mode functions.
   2291   Label done_convert;
   2292   __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
   2293   __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
   2294                      (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
   2295   __ b(ne, &done_convert);
   2296   {
   2297     // ----------- S t a t e -------------
   2298     //  -- r0 : the number of arguments (not including the receiver)
   2299     //  -- r1 : the function to call (checked to be a JSFunction)
   2300     //  -- r2 : the shared function info.
   2301     //  -- cp : the function context.
   2302     // -----------------------------------
   2303 
   2304     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2305       // Patch receiver to global proxy.
   2306       __ LoadGlobalProxy(r3);
   2307     } else {
   2308       Label convert_to_object, convert_receiver;
   2309       __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2310       __ JumpIfSmi(r3, &convert_to_object);
   2311       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2312       __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
   2313       __ b(hs, &done_convert);
   2314       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2315         Label convert_global_proxy;
   2316         __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
   2317                       &convert_global_proxy);
   2318         __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
   2319         __ bind(&convert_global_proxy);
   2320         {
   2321           // Patch receiver to global proxy.
   2322           __ LoadGlobalProxy(r3);
   2323         }
   2324         __ b(&convert_receiver);
   2325       }
   2326       __ bind(&convert_to_object);
   2327       {
   2328         // Convert receiver using ToObject.
   2329         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2330         // in the fast case? (fall back to AllocateInNewSpace?)
   2331         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2332         __ SmiTag(r0);
   2333         __ Push(r0, r1);
   2334         __ mov(r0, r3);
   2335         ToObjectStub stub(masm->isolate());
   2336         __ CallStub(&stub);
   2337         __ mov(r3, r0);
   2338         __ Pop(r0, r1);
   2339         __ SmiUntag(r0);
   2340       }
   2341       __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   2342       __ bind(&convert_receiver);
   2343     }
   2344     __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2345   }
   2346   __ bind(&done_convert);
   2347 
   2348   // ----------- S t a t e -------------
   2349   //  -- r0 : the number of arguments (not including the receiver)
   2350   //  -- r1 : the function to call (checked to be a JSFunction)
   2351   //  -- r2 : the shared function info.
   2352   //  -- cp : the function context.
   2353   // -----------------------------------
   2354 
   2355   if (tail_call_mode == TailCallMode::kAllow) {
   2356     PrepareForTailCall(masm, r0, r3, r4, r5);
   2357   }
   2358 
   2359   __ ldr(r2,
   2360          FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
   2361   __ SmiUntag(r2);
   2362   ParameterCount actual(r0);
   2363   ParameterCount expected(r2);
   2364   __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
   2365                         CheckDebugStepCallWrapper());
   2366 
   2367   // The function is a "classConstructor", need to raise an exception.
   2368   __ bind(&class_constructor);
   2369   {
   2370     FrameScope frame(masm, StackFrame::INTERNAL);
   2371     __ push(r1);
   2372     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2373   }
   2374 }
   2375 
   2376 
   2377 namespace {
   2378 
   2379 void Generate_PushBoundArguments(MacroAssembler* masm) {
   2380   // ----------- S t a t e -------------
   2381   //  -- r0 : the number of arguments (not including the receiver)
   2382   //  -- r1 : target (checked to be a JSBoundFunction)
   2383   //  -- r3 : new.target (only in case of [[Construct]])
   2384   // -----------------------------------
   2385 
   2386   // Load [[BoundArguments]] into r2 and length of that into r4.
   2387   Label no_bound_arguments;
   2388   __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
   2389   __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
   2390   __ SmiUntag(r4);
   2391   __ cmp(r4, Operand(0));
   2392   __ b(eq, &no_bound_arguments);
   2393   {
   2394     // ----------- S t a t e -------------
   2395     //  -- r0 : the number of arguments (not including the receiver)
   2396     //  -- r1 : target (checked to be a JSBoundFunction)
   2397     //  -- r2 : the [[BoundArguments]] (implemented as FixedArray)
   2398     //  -- r3 : new.target (only in case of [[Construct]])
   2399     //  -- r4 : the number of [[BoundArguments]]
   2400     // -----------------------------------
   2401 
   2402     // Reserve stack space for the [[BoundArguments]].
   2403     {
   2404       Label done;
   2405       __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
   2406       // Check the stack for overflow. We are not trying to catch interruptions
   2407       // (i.e. debug break and preemption) here, so check the "real stack
   2408       // limit".
   2409       __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
   2410       __ b(gt, &done);  // Signed comparison.
   2411       // Restore the stack pointer.
   2412       __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
   2413       {
   2414         FrameScope scope(masm, StackFrame::MANUAL);
   2415         __ EnterFrame(StackFrame::INTERNAL);
   2416         __ CallRuntime(Runtime::kThrowStackOverflow);
   2417       }
   2418       __ bind(&done);
   2419     }
   2420 
   2421     // Relocate arguments down the stack.
   2422     {
   2423       Label loop, done_loop;
   2424       __ mov(r5, Operand(0));
   2425       __ bind(&loop);
   2426       __ cmp(r5, r0);
   2427       __ b(gt, &done_loop);
   2428       __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
   2429       __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
   2430       __ add(r4, r4, Operand(1));
   2431       __ add(r5, r5, Operand(1));
   2432       __ b(&loop);
   2433       __ bind(&done_loop);
   2434     }
   2435 
   2436     // Copy [[BoundArguments]] to the stack (below the arguments).
   2437     {
   2438       Label loop;
   2439       __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
   2440       __ SmiUntag(r4);
   2441       __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2442       __ bind(&loop);
   2443       __ sub(r4, r4, Operand(1), SetCC);
   2444       __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
   2445       __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2446       __ add(r0, r0, Operand(1));
   2447       __ b(gt, &loop);
   2448     }
   2449   }
   2450   __ bind(&no_bound_arguments);
   2451 }
   2452 
   2453 }  // namespace
   2454 
   2455 
   2456 // static
   2457 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2458                                               TailCallMode tail_call_mode) {
   2459   // ----------- S t a t e -------------
   2460   //  -- r0 : the number of arguments (not including the receiver)
   2461   //  -- r1 : the function to call (checked to be a JSBoundFunction)
   2462   // -----------------------------------
   2463   __ AssertBoundFunction(r1);
   2464 
   2465   if (tail_call_mode == TailCallMode::kAllow) {
   2466     PrepareForTailCall(masm, r0, r3, r4, r5);
   2467   }
   2468 
   2469   // Patch the receiver to [[BoundThis]].
   2470   __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
   2471   __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2472 
   2473   // Push the [[BoundArguments]] onto the stack.
   2474   Generate_PushBoundArguments(masm);
   2475 
   2476   // Call the [[BoundTargetFunction]] via the Call builtin.
   2477   __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
   2478   __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
   2479                                        masm->isolate())));
   2480   __ ldr(ip, MemOperand(ip));
   2481   __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
   2482 }
   2483 
   2484 
   2485 // static
   2486 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2487                              TailCallMode tail_call_mode) {
   2488   // ----------- S t a t e -------------
   2489   //  -- r0 : the number of arguments (not including the receiver)
   2490   //  -- r1 : the target to call (can be any Object).
   2491   // -----------------------------------
   2492 
   2493   Label non_callable, non_function, non_smi;
   2494   __ JumpIfSmi(r1, &non_callable);
   2495   __ bind(&non_smi);
   2496   __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
   2497   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2498           RelocInfo::CODE_TARGET, eq);
   2499   __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
   2500   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2501           RelocInfo::CODE_TARGET, eq);
   2502 
   2503   // Check if target has a [[Call]] internal method.
   2504   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
   2505   __ tst(r4, Operand(1 << Map::kIsCallable));
   2506   __ b(eq, &non_callable);
   2507 
   2508   __ cmp(r5, Operand(JS_PROXY_TYPE));
   2509   __ b(ne, &non_function);
   2510 
   2511   // 0. Prepare for tail call if necessary.
   2512   if (tail_call_mode == TailCallMode::kAllow) {
   2513     PrepareForTailCall(masm, r0, r3, r4, r5);
   2514   }
   2515 
   2516   // 1. Runtime fallback for Proxy [[Call]].
   2517   __ Push(r1);
   2518   // Increase the arguments size to include the pushed function and the
   2519   // existing receiver on the stack.
   2520   __ add(r0, r0, Operand(2));
   2521   // Tail-call to the runtime.
   2522   __ JumpToExternalReference(
   2523       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2524 
   2525   // 2. Call to something else, which might have a [[Call]] internal method (if
   2526   // not we raise an exception).
   2527   __ bind(&non_function);
   2528   // Overwrite the original receiver the (original) target.
   2529   __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2530   // Let the "call_as_function_delegate" take care of the rest.
   2531   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
   2532   __ Jump(masm->isolate()->builtins()->CallFunction(
   2533               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2534           RelocInfo::CODE_TARGET);
   2535 
   2536   // 3. Call to something that is not callable.
   2537   __ bind(&non_callable);
   2538   {
   2539     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2540     __ Push(r1);
   2541     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2542   }
   2543 }
   2544 
   2545 
   2546 // static
   2547 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2548   // ----------- S t a t e -------------
   2549   //  -- r0 : the number of arguments (not including the receiver)
   2550   //  -- r1 : the constructor to call (checked to be a JSFunction)
   2551   //  -- r3 : the new target (checked to be a constructor)
   2552   // -----------------------------------
   2553   __ AssertFunction(r1);
   2554 
   2555   // Calling convention for function specific ConstructStubs require
   2556   // r2 to contain either an AllocationSite or undefined.
   2557   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   2558 
   2559   // Tail call to the function-specific construct stub (still in the caller
   2560   // context at this point).
   2561   __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   2562   __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
   2563   __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
   2564 }
   2565 
   2566 
   2567 // static
   2568 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2569   // ----------- S t a t e -------------
   2570   //  -- r0 : the number of arguments (not including the receiver)
   2571   //  -- r1 : the function to call (checked to be a JSBoundFunction)
   2572   //  -- r3 : the new target (checked to be a constructor)
   2573   // -----------------------------------
   2574   __ AssertBoundFunction(r1);
   2575 
   2576   // Push the [[BoundArguments]] onto the stack.
   2577   Generate_PushBoundArguments(masm);
   2578 
   2579   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2580   __ cmp(r1, r3);
   2581   __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
   2582          eq);
   2583 
   2584   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2585   __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
   2586   __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
   2587   __ ldr(ip, MemOperand(ip));
   2588   __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
   2589 }
   2590 
   2591 
   2592 // static
   2593 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2594   // ----------- S t a t e -------------
   2595   //  -- r0 : the number of arguments (not including the receiver)
   2596   //  -- r1 : the constructor to call (checked to be a JSProxy)
   2597   //  -- r3 : the new target (either the same as the constructor or
   2598   //          the JSFunction on which new was invoked initially)
   2599   // -----------------------------------
   2600 
   2601   // Call into the Runtime for Proxy [[Construct]].
   2602   __ Push(r1);
   2603   __ Push(r3);
   2604   // Include the pushed new_target, constructor and the receiver.
   2605   __ add(r0, r0, Operand(3));
   2606   // Tail-call to the runtime.
   2607   __ JumpToExternalReference(
   2608       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2609 }
   2610 
   2611 
   2612 // static
   2613 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2614   // ----------- S t a t e -------------
   2615   //  -- r0 : the number of arguments (not including the receiver)
   2616   //  -- r1 : the constructor to call (can be any Object)
   2617   //  -- r3 : the new target (either the same as the constructor or
   2618   //          the JSFunction on which new was invoked initially)
   2619   // -----------------------------------
   2620 
   2621   // Check if target is a Smi.
   2622   Label non_constructor;
   2623   __ JumpIfSmi(r1, &non_constructor);
   2624 
   2625   // Dispatch based on instance type.
   2626   __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
   2627   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
   2628           RelocInfo::CODE_TARGET, eq);
   2629 
   2630   // Check if target has a [[Construct]] internal method.
   2631   __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
   2632   __ tst(r2, Operand(1 << Map::kIsConstructor));
   2633   __ b(eq, &non_constructor);
   2634 
   2635   // Only dispatch to bound functions after checking whether they are
   2636   // constructors.
   2637   __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
   2638   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
   2639           RelocInfo::CODE_TARGET, eq);
   2640 
   2641   // Only dispatch to proxies after checking whether they are constructors.
   2642   __ cmp(r5, Operand(JS_PROXY_TYPE));
   2643   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
   2644           eq);
   2645 
   2646   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2647   {
   2648     // Overwrite the original receiver with the (original) target.
   2649     __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   2650     // Let the "call_as_constructor_delegate" take care of the rest.
   2651     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
   2652     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2653             RelocInfo::CODE_TARGET);
   2654   }
   2655 
   2656   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2657   // method.
   2658   __ bind(&non_constructor);
   2659   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   2660           RelocInfo::CODE_TARGET);
   2661 }
   2662 
   2663 // static
   2664 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   2665   // ----------- S t a t e -------------
   2666   //  -- r1 : requested object size (untagged)
   2667   //  -- lr : return address
   2668   // -----------------------------------
   2669   __ SmiTag(r1);
   2670   __ Push(r1);
   2671   __ Move(cp, Smi::FromInt(0));
   2672   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   2673 }
   2674 
   2675 // static
   2676 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   2677   // ----------- S t a t e -------------
   2678   //  -- r1 : requested object size (untagged)
   2679   //  -- lr : return address
   2680   // -----------------------------------
   2681   __ SmiTag(r1);
   2682   __ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   2683   __ Push(r1, r2);
   2684   __ Move(cp, Smi::FromInt(0));
   2685   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   2686 }
   2687 
   2688 // static
   2689 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
   2690   // The StringToNumber stub takes one argument in r0.
   2691   __ AssertString(r0);
   2692 
   2693   // Check if string has a cached array index.
   2694   Label runtime;
   2695   __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset));
   2696   __ tst(r2, Operand(String::kContainsCachedArrayIndexMask));
   2697   __ b(ne, &runtime);
   2698   __ IndexFromHash(r2, r0);
   2699   __ Ret();
   2700 
   2701   __ bind(&runtime);
   2702   {
   2703     FrameScope frame(masm, StackFrame::INTERNAL);
   2704     // Push argument.
   2705     __ Push(r0);
   2706     // We cannot use a tail call here because this builtin can also be called
   2707     // from wasm.
   2708     __ CallRuntime(Runtime::kStringToNumber);
   2709   }
   2710   __ Ret();
   2711 }
   2712 
   2713 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
   2714   // The ToNumber stub takes one argument in r0.
   2715   STATIC_ASSERT(kSmiTag == 0);
   2716   __ tst(r0, Operand(kSmiTagMask));
   2717   __ Ret(eq);
   2718 
   2719   __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
   2720   // r0: receiver
   2721   // r1: receiver instance type
   2722   __ Ret(eq);
   2723 
   2724   __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
   2725           RelocInfo::CODE_TARGET);
   2726 }
   2727 
   2728 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
   2729   // The NonNumberToNumber stub takes one argument in r0.
   2730   __ AssertNotNumber(r0);
   2731 
   2732   __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
   2733   // r0: receiver
   2734   // r1: receiver instance type
   2735   __ Jump(masm->isolate()->builtins()->StringToNumber(), RelocInfo::CODE_TARGET,
   2736           lo);
   2737 
   2738   Label not_oddball;
   2739   __ cmp(r1, Operand(ODDBALL_TYPE));
   2740   __ b(ne, &not_oddball);
   2741   __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
   2742   __ Ret();
   2743   __ bind(&not_oddball);
   2744   {
   2745     FrameScope frame(masm, StackFrame::INTERNAL);
   2746     // Push argument.
   2747     __ Push(r0);
   2748     // We cannot use a tail call here because this builtin can also be called
   2749     // from wasm.
   2750     __ CallRuntime(Runtime::kToNumber);
   2751   }
   2752   __ Ret();
   2753 }
   2754 
   2755 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   2756   // ----------- S t a t e -------------
   2757   //  -- r0 : actual number of arguments
   2758   //  -- r1 : function (passed through to callee)
   2759   //  -- r2 : expected number of arguments
   2760   //  -- r3 : new target (passed through to callee)
   2761   // -----------------------------------
   2762 
   2763   Label invoke, dont_adapt_arguments, stack_overflow;
   2764 
   2765   Label enough, too_few;
   2766   __ cmp(r0, r2);
   2767   __ b(lt, &too_few);
   2768   __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   2769   __ b(eq, &dont_adapt_arguments);
   2770 
   2771   {  // Enough parameters: actual >= expected
   2772     __ bind(&enough);
   2773     EnterArgumentsAdaptorFrame(masm);
   2774     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2775 
   2776     // Calculate copy start address into r0 and copy end address into r4.
   2777     // r0: actual number of arguments as a smi
   2778     // r1: function
   2779     // r2: expected number of arguments
   2780     // r3: new target (passed through to callee)
   2781     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
   2782     // adjust for return address and receiver
   2783     __ add(r0, r0, Operand(2 * kPointerSize));
   2784     __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
   2785 
   2786     // Copy the arguments (including the receiver) to the new stack frame.
   2787     // r0: copy start address
   2788     // r1: function
   2789     // r2: expected number of arguments
   2790     // r3: new target (passed through to callee)
   2791     // r4: copy end address
   2792 
   2793     Label copy;
   2794     __ bind(&copy);
   2795     __ ldr(ip, MemOperand(r0, 0));
   2796     __ push(ip);
   2797     __ cmp(r0, r4);  // Compare before moving to next argument.
   2798     __ sub(r0, r0, Operand(kPointerSize));
   2799     __ b(ne, &copy);
   2800 
   2801     __ b(&invoke);
   2802   }
   2803 
   2804   {  // Too few parameters: Actual < expected
   2805     __ bind(&too_few);
   2806     EnterArgumentsAdaptorFrame(masm);
   2807     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2808 
   2809     // Calculate copy start address into r0 and copy end address is fp.
   2810     // r0: actual number of arguments as a smi
   2811     // r1: function
   2812     // r2: expected number of arguments
   2813     // r3: new target (passed through to callee)
   2814     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
   2815 
   2816     // Copy the arguments (including the receiver) to the new stack frame.
   2817     // r0: copy start address
   2818     // r1: function
   2819     // r2: expected number of arguments
   2820     // r3: new target (passed through to callee)
   2821     Label copy;
   2822     __ bind(&copy);
   2823     // Adjust load for return address and receiver.
   2824     __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
   2825     __ push(ip);
   2826     __ cmp(r0, fp);  // Compare before moving to next argument.
   2827     __ sub(r0, r0, Operand(kPointerSize));
   2828     __ b(ne, &copy);
   2829 
   2830     // Fill the remaining expected arguments with undefined.
   2831     // r1: function
   2832     // r2: expected number of arguments
   2833     // r3: new target (passed through to callee)
   2834     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   2835     __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
   2836     // Adjust for frame.
   2837     __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   2838                            2 * kPointerSize));
   2839 
   2840     Label fill;
   2841     __ bind(&fill);
   2842     __ push(ip);
   2843     __ cmp(sp, r4);
   2844     __ b(ne, &fill);
   2845   }
   2846 
   2847   // Call the entry point.
   2848   __ bind(&invoke);
   2849   __ mov(r0, r2);
   2850   // r0 : expected number of arguments
   2851   // r1 : function (passed through to callee)
   2852   // r3 : new target (passed through to callee)
   2853   __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   2854   __ Call(r4);
   2855 
   2856   // Store offset of return address for deoptimizer.
   2857   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   2858 
   2859   // Exit frame and return.
   2860   LeaveArgumentsAdaptorFrame(masm);
   2861   __ Jump(lr);
   2862 
   2863 
   2864   // -------------------------------------------
   2865   // Dont adapt arguments.
   2866   // -------------------------------------------
   2867   __ bind(&dont_adapt_arguments);
   2868   __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   2869   __ Jump(r4);
   2870 
   2871   __ bind(&stack_overflow);
   2872   {
   2873     FrameScope frame(masm, StackFrame::MANUAL);
   2874     __ CallRuntime(Runtime::kThrowStackOverflow);
   2875     __ bkpt(0);
   2876   }
   2877 }
   2878 
   2879 
   2880 #undef __
   2881 
   2882 }  // namespace internal
   2883 }  // namespace v8
   2884 
   2885 #endif  // V8_TARGET_ARCH_ARM
   2886