Home | History | Annotate | Download | only in s390
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_S390
      6 
      7 #include "src/codegen.h"
      8 #include "src/debug/debug.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 #include "src/runtime/runtime.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 #define __ ACCESS_MASM(masm)
     17 
     18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
     19   // ----------- S t a t e -------------
     20   //  -- r2                 : number of arguments excluding receiver
     21   //  -- r3                 : target
     22   //  -- r5                 : new.target
     23   //  -- sp[0]              : last argument
     24   //  -- ...
     25   //  -- sp[4 * (argc - 1)] : first argument
     26   //  -- sp[4 * argc]       : receiver
     27   // -----------------------------------
     28   __ AssertFunction(r3);
     29 
     30   // Make sure we operate in the context of the called function (for example
     31   // ConstructStubs implemented in C++ will be run in the context of the caller
     32   // instead of the callee, due to the way that [[Construct]] is defined for
     33   // ordinary functions).
     34   __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
     35 
     36   // Insert extra arguments.
     37   const int num_extra_args = 2;
     38   __ Push(r3, r5);
     39   // JumpToExternalReference expects r2 to contain the number of arguments
     40   // including the receiver and the extra arguments.
     41   __ AddP(r2, r2, Operand(num_extra_args + 1));
     42 
     43   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     44 }
     45 
     46 // Load the built-in InternalArray function from the current context.
     47 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     48                                               Register result) {
     49   // Load the InternalArray function from the current native context.
     50   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
     51 }
     52 
     53 // Load the built-in Array function from the current context.
     54 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     55   // Load the Array function from the current native context.
     56   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
     57 }
     58 
     59 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     60   // ----------- S t a t e -------------
     61   //  -- r2     : number of arguments
     62   //  -- lr     : return address
     63   //  -- sp[...]: constructor arguments
     64   // -----------------------------------
     65   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     66 
     67   // Get the InternalArray function.
     68   GenerateLoadInternalArrayFunction(masm, r3);
     69 
     70   if (FLAG_debug_code) {
     71     // Initial map for the builtin InternalArray functions should be maps.
     72     __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
     73     __ TestIfSmi(r4);
     74     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
     75     __ CompareObjectType(r4, r5, r6, MAP_TYPE);
     76     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
     77   }
     78 
     79   // Run the native code for the InternalArray function called as a normal
     80   // function.
     81   // tail call a stub
     82   InternalArrayConstructorStub stub(masm->isolate());
     83   __ TailCallStub(&stub);
     84 }
     85 
     86 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
     87   // ----------- S t a t e -------------
     88   //  -- r2     : number of arguments
     89   //  -- lr     : return address
     90   //  -- sp[...]: constructor arguments
     91   // -----------------------------------
     92   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     93 
     94   // Get the Array function.
     95   GenerateLoadArrayFunction(masm, r3);
     96 
     97   if (FLAG_debug_code) {
     98     // Initial map for the builtin Array functions should be maps.
     99     __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
    100     __ TestIfSmi(r4);
    101     __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
    102     __ CompareObjectType(r4, r5, r6, MAP_TYPE);
    103     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
    104   }
    105 
    106   __ LoadRR(r5, r3);
    107   // Run the native code for the Array function called as a normal function.
    108   // tail call a stub
    109   __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
    110   ArrayConstructorStub stub(masm->isolate());
    111   __ TailCallStub(&stub);
    112 }
    113 
    114 // static
    115 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
    116   // ----------- S t a t e -------------
    117   //  -- r2                 : number of arguments
    118   //  -- r3                 : function
    119   //  -- cp                 : context
    120   //  -- lr                 : return address
    121   //  -- sp[(argc - n) * 8] : arg[n] (zero-based)
    122   //  -- sp[(argc + 1) * 8] : receiver
    123   // -----------------------------------
    124   Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
    125   Heap::RootListIndex const root_index =
    126       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
    127                                      : Heap::kMinusInfinityValueRootIndex;
    128   DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
    129 
    130   // Load the accumulator with the default return value (either -Infinity or
    131   // +Infinity), with the tagged value in r7 and the double value in d1.
    132   __ LoadRoot(r7, root_index);
    133   __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
    134 
    135   // Setup state for loop
    136   // r4: address of arg[0] + kPointerSize
    137   // r5: number of slots to drop at exit (arguments + receiver)
    138   __ AddP(r6, r2, Operand(1));
    139 
    140   Label done_loop, loop;
    141   __ bind(&loop);
    142   {
    143     // Check if all parameters done.
    144     __ SubP(r2, Operand(1));
    145     __ blt(&done_loop);
    146 
    147     // Load the next parameter tagged value into r2.
    148     __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
    149     __ LoadP(r4, MemOperand(sp, r1));
    150 
    151     // Load the double value of the parameter into d2, maybe converting the
    152     // parameter to a number first using the ToNumber builtin if necessary.
    153     Label convert, convert_smi, convert_number, done_convert;
    154     __ bind(&convert);
    155     __ JumpIfSmi(r4, &convert_smi);
    156     __ LoadP(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
    157     __ JumpIfRoot(r5, Heap::kHeapNumberMapRootIndex, &convert_number);
    158     {
    159       // Parameter is not a Number, use the ToNumber builtin to convert it.
    160       DCHECK(!FLAG_enable_embedded_constant_pool);
    161       FrameScope scope(masm, StackFrame::MANUAL);
    162       __ PushStandardFrame(r3);
    163       __ SmiTag(r2);
    164       __ SmiTag(r6);
    165       __ Push(r2, r6, r7);
    166       __ LoadRR(r2, r4);
    167       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    168       __ LoadRR(r4, r2);
    169       __ Pop(r2, r6, r7);
    170       {
    171         // Restore the double accumulator value (d1).
    172         Label done_restore;
    173         __ SmiToDouble(d1, r7);
    174         __ JumpIfSmi(r7, &done_restore);
    175         __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
    176         __ bind(&done_restore);
    177       }
    178       __ SmiUntag(r6);
    179       __ SmiUntag(r2);
    180       __ Pop(r14, fp, cp, r3);
    181     }
    182     __ b(&convert);
    183     __ bind(&convert_number);
    184     __ LoadDouble(d2, FieldMemOperand(r4, HeapNumber::kValueOffset));
    185     __ b(&done_convert);
    186     __ bind(&convert_smi);
    187     __ SmiToDouble(d2, r4);
    188     __ bind(&done_convert);
    189 
    190     // Perform the actual comparison with the accumulator value on the left hand
    191     // side (d1) and the next parameter value on the right hand side (d2).
    192     Label compare_nan, compare_swap;
    193     __ cdbr(d1, d2);
    194     __ bunordered(&compare_nan);
    195     __ b(cond_done, &loop);
    196     __ b(CommuteCondition(cond_done), &compare_swap);
    197 
    198     // Left and right hand side are equal, check for -0 vs. +0.
    199     __ TestDoubleIsMinusZero(reg, r1, r0);
    200     __ bne(&loop);
    201 
    202     // Update accumulator. Result is on the right hand side.
    203     __ bind(&compare_swap);
    204     __ ldr(d1, d2);
    205     __ LoadRR(r7, r4);
    206     __ b(&loop);
    207 
    208     // At least one side is NaN, which means that the result will be NaN too.
    209     // We still need to visit the rest of the arguments.
    210     __ bind(&compare_nan);
    211     __ LoadRoot(r7, Heap::kNanValueRootIndex);
    212     __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
    213     __ b(&loop);
    214   }
    215 
    216   __ bind(&done_loop);
    217   __ LoadRR(r2, r7);
    218   __ Drop(r6);
    219   __ Ret();
    220 }
    221 
    222 // static
    223 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
    224   // ----------- S t a t e -------------
    225   //  -- r2                     : number of arguments
    226   //  -- r3                     : constructor function
    227   //  -- lr                     : return address
    228   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    229   //  -- sp[argc * 4]           : receiver
    230   // -----------------------------------
    231 
    232   // 1. Load the first argument into r2 and get rid of the rest (including the
    233   // receiver).
    234   Label no_arguments;
    235   {
    236     __ CmpP(r2, Operand::Zero());
    237     __ beq(&no_arguments);
    238     __ SubP(r2, r2, Operand(1));
    239     __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
    240     __ la(sp, MemOperand(sp, r2));
    241     __ LoadP(r2, MemOperand(sp));
    242     __ Drop(2);
    243   }
    244 
    245   // 2a. Convert the first argument to a number.
    246   __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    247 
    248   // 2b. No arguments, return +0.
    249   __ bind(&no_arguments);
    250   __ LoadSmiLiteral(r2, Smi::FromInt(0));
    251   __ Ret(1);
    252 }
    253 
    254 // static
    255 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
    256   // ----------- S t a t e -------------
    257   //  -- r2                     : number of arguments
    258   //  -- r3                     : constructor function
    259   //  -- r5                     : new target
    260   //  -- lr                     : return address
    261   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    262   //  -- sp[argc * 4]           : receiver
    263   // -----------------------------------
    264 
    265   // 1. Make sure we operate in the context of the called function.
    266   __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
    267 
    268   // 2. Load the first argument into r4 and get rid of the rest (including the
    269   // receiver).
    270   {
    271     Label no_arguments, done;
    272     __ CmpP(r2, Operand::Zero());
    273     __ beq(&no_arguments);
    274     __ SubP(r2, r2, Operand(1));
    275     __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
    276     __ la(sp, MemOperand(sp, r4));
    277     __ LoadP(r4, MemOperand(sp));
    278     __ Drop(2);
    279     __ b(&done);
    280     __ bind(&no_arguments);
    281     __ LoadSmiLiteral(r4, Smi::FromInt(0));
    282     __ Drop(1);
    283     __ bind(&done);
    284   }
    285 
    286   // 3. Make sure r4 is a number.
    287   {
    288     Label done_convert;
    289     __ JumpIfSmi(r4, &done_convert);
    290     __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE);
    291     __ beq(&done_convert);
    292     {
    293       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    294       __ Push(r3, r5);
    295       __ LoadRR(r2, r4);
    296       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    297       __ LoadRR(r4, r2);
    298       __ Pop(r3, r5);
    299     }
    300     __ bind(&done_convert);
    301   }
    302 
    303   // 4. Check if new target and constructor differ.
    304   Label new_object;
    305   __ CmpP(r3, r5);
    306   __ bne(&new_object);
    307 
    308   // 5. Allocate a JSValue wrapper for the number.
    309   __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
    310   __ Ret();
    311 
    312   // 6. Fallback to the runtime to create new object.
    313   __ bind(&new_object);
    314   {
    315     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    316     __ Push(r4);  // first argument
    317     FastNewObjectStub stub(masm->isolate());
    318     __ CallStub(&stub);
    319     __ Pop(r4);
    320   }
    321   __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
    322   __ Ret();
    323 }
    324 
    325 // static
    326 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
    327   // ----------- S t a t e -------------
    328   //  -- r2                     : number of arguments
    329   //  -- r3                     : constructor function
    330   //  -- lr                     : return address
    331   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    332   //  -- sp[argc * 4]           : receiver
    333   // -----------------------------------
    334   // 1. Load the first argument into r2 and get rid of the rest (including the
    335   // receiver).
    336   Label no_arguments;
    337   {
    338     __ CmpP(r2, Operand::Zero());
    339     __ beq(&no_arguments);
    340     __ SubP(r2, r2, Operand(1));
    341     __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
    342     __ lay(sp, MemOperand(sp, r2));
    343     __ LoadP(r2, MemOperand(sp));
    344     __ Drop(2);
    345   }
    346 
    347   // 2a. At least one argument, return r2 if it's a string, otherwise
    348   // dispatch to appropriate conversion.
    349   Label to_string, symbol_descriptive_string;
    350   {
    351     __ JumpIfSmi(r2, &to_string);
    352     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
    353     __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE);
    354     __ bgt(&to_string);
    355     __ beq(&symbol_descriptive_string);
    356     __ Ret();
    357   }
    358 
    359   // 2b. No arguments, return the empty string (and pop the receiver).
    360   __ bind(&no_arguments);
    361   {
    362     __ LoadRoot(r2, Heap::kempty_stringRootIndex);
    363     __ Ret(1);
    364   }
    365 
    366   // 3a. Convert r2 to a string.
    367   __ bind(&to_string);
    368   {
    369     ToStringStub stub(masm->isolate());
    370     __ TailCallStub(&stub);
    371   }
    372   // 3b. Convert symbol in r2 to a string.
    373   __ bind(&symbol_descriptive_string);
    374   {
    375     __ Push(r2);
    376     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
    377   }
    378 }
    379 
    380 // static
    381 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
    382   // ----------- S t a t e -------------
    383   //  -- r2                     : number of arguments
    384   //  -- r3                     : constructor function
    385   //  -- r5                     : new target
    386   //  -- lr                     : return address
    387   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    388   //  -- sp[argc * 4]           : receiver
    389   // -----------------------------------
    390 
    391   // 1. Make sure we operate in the context of the called function.
    392   __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
    393 
    394   // 2. Load the first argument into r4 and get rid of the rest (including the
    395   // receiver).
    396   {
    397     Label no_arguments, done;
    398     __ CmpP(r2, Operand::Zero());
    399     __ beq(&no_arguments);
    400     __ SubP(r2, r2, Operand(1));
    401     __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
    402     __ lay(sp, MemOperand(sp, r4));
    403     __ LoadP(r4, MemOperand(sp));
    404     __ Drop(2);
    405     __ b(&done);
    406     __ bind(&no_arguments);
    407     __ LoadRoot(r4, Heap::kempty_stringRootIndex);
    408     __ Drop(1);
    409     __ bind(&done);
    410   }
    411 
    412   // 3. Make sure r4 is a string.
    413   {
    414     Label convert, done_convert;
    415     __ JumpIfSmi(r4, &convert);
    416     __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE);
    417     __ blt(&done_convert);
    418     __ bind(&convert);
    419     {
    420       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    421       ToStringStub stub(masm->isolate());
    422       __ Push(r3, r5);
    423       __ LoadRR(r2, r4);
    424       __ CallStub(&stub);
    425       __ LoadRR(r4, r2);
    426       __ Pop(r3, r5);
    427     }
    428     __ bind(&done_convert);
    429   }
    430 
    431   // 4. Check if new target and constructor differ.
    432   Label new_object;
    433   __ CmpP(r3, r5);
    434   __ bne(&new_object);
    435 
    436   // 5. Allocate a JSValue wrapper for the string.
    437   __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
    438   __ Ret();
    439 
    440   // 6. Fallback to the runtime to create new object.
    441   __ bind(&new_object);
    442   {
    443     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    444     __ Push(r4);  // first argument
    445     FastNewObjectStub stub(masm->isolate());
    446     __ CallStub(&stub);
    447     __ Pop(r4);
    448   }
    449   __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
    450   __ Ret();
    451 }
    452 
    453 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    454   __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
    455   __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
    456   __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag));
    457   __ JumpToJSEntry(ip);
    458 }
    459 
    460 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
    461                                            Runtime::FunctionId function_id) {
    462   // ----------- S t a t e -------------
    463   //  -- r2 : argument count (preserved for callee)
    464   //  -- r3 : target function (preserved for callee)
    465   //  -- r5 : new target (preserved for callee)
    466   // -----------------------------------
    467   {
    468     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    469     // Push the number of arguments to the callee.
    470     // Push a copy of the target function and the new target.
    471     // Push function as parameter to the runtime call.
    472     __ SmiTag(r2);
    473     __ Push(r2, r3, r5, r3);
    474 
    475     __ CallRuntime(function_id, 1);
    476     __ LoadRR(r4, r2);
    477 
    478     // Restore target function and new target.
    479     __ Pop(r2, r3, r5);
    480     __ SmiUntag(r2);
    481   }
    482   __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
    483   __ JumpToJSEntry(ip);
    484 }
    485 
    486 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    487   // Checking whether the queued function is ready for install is optional,
    488   // since we come across interrupts and stack checks elsewhere.  However,
    489   // not checking may delay installing ready functions, and always checking
    490   // would be quite expensive.  A good compromise is to first check against
    491   // stack limit as a cue for an interrupt signal.
    492   Label ok;
    493   __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
    494   __ bge(&ok, Label::kNear);
    495 
    496   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    497 
    498   __ bind(&ok);
    499   GenerateTailCallToSharedCode(masm);
    500 }
    501 
    502 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    503                                            bool is_api_function,
    504                                            bool create_implicit_receiver,
    505                                            bool check_derived_construct) {
    506   // ----------- S t a t e -------------
    507   //  -- r2     : number of arguments
    508   //  -- r3     : constructor function
    509   //  -- r4     : allocation site or undefined
    510   //  -- r5     : new target
    511   //  -- cp     : context
    512   //  -- lr     : return address
    513   //  -- sp[...]: constructor arguments
    514   // -----------------------------------
    515 
    516   Isolate* isolate = masm->isolate();
    517 
    518   // Enter a construct frame.
    519   {
    520     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
    521 
    522     // Preserve the incoming parameters on the stack.
    523     __ AssertUndefinedOrAllocationSite(r4, r6);
    524 
    525     if (!create_implicit_receiver) {
    526       __ SmiTag(r6, r2);
    527       __ LoadAndTestP(r6, r6);
    528       __ Push(cp, r4, r6);
    529       __ PushRoot(Heap::kTheHoleValueRootIndex);
    530     } else {
    531       __ SmiTag(r2);
    532       __ Push(cp, r4, r2);
    533 
    534       // Allocate the new receiver object.
    535       __ Push(r3, r5);
    536       FastNewObjectStub stub(masm->isolate());
    537       __ CallStub(&stub);
    538       __ LoadRR(r6, r2);
    539       __ Pop(r3, r5);
    540 
    541       // ----------- S t a t e -------------
    542       //  -- r3: constructor function
    543       //  -- r5: new target
    544       //  -- r6: newly allocated object
    545       // -----------------------------------
    546 
    547       // Retrieve smi-tagged arguments count from the stack.
    548       __ LoadP(r2, MemOperand(sp));
    549       __ SmiUntag(r2);
    550       __ LoadAndTestP(r2, r2);
    551 
    552       // Push the allocated receiver to the stack. We need two copies
    553       // because we may have to return the original one and the calling
    554       // conventions dictate that the called function pops the receiver.
    555       __ Push(r6, r6);
    556     }
    557 
    558     // Set up pointer to last argument.
    559     __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
    560 
    561     // Copy arguments and receiver to the expression stack.
    562     // r2: number of arguments
    563     // r3: constructor function
    564     // r4: address of last argument (caller sp)
    565     // r5: new target
    566     // cr0: condition indicating whether r2 is zero
    567     // sp[0]: receiver
    568     // sp[1]: receiver
    569     // sp[2]: number of arguments (smi-tagged)
    570     Label loop, no_args;
    571     __ beq(&no_args);
    572     __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
    573     __ SubP(sp, sp, ip);
    574     __ LoadRR(r1, r2);
    575     __ bind(&loop);
    576     __ lay(ip, MemOperand(ip, -kPointerSize));
    577     __ LoadP(r0, MemOperand(ip, r4));
    578     __ StoreP(r0, MemOperand(ip, sp));
    579     __ BranchOnCount(r1, &loop);
    580     __ bind(&no_args);
    581 
    582     // Call the function.
    583     // r2: number of arguments
    584     // r3: constructor function
    585     // r5: new target
    586 
    587     ParameterCount actual(r2);
    588     __ InvokeFunction(r3, r5, actual, CALL_FUNCTION,
    589                       CheckDebugStepCallWrapper());
    590 
    591     // Store offset of return address for deoptimizer.
    592     if (create_implicit_receiver && !is_api_function) {
    593       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    594     }
    595 
    596     // Restore context from the frame.
    597     // r2: result
    598     // sp[0]: receiver
    599     // sp[1]: number of arguments (smi-tagged)
    600     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    601 
    602     if (create_implicit_receiver) {
    603       // If the result is an object (in the ECMA sense), we should get rid
    604       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    605       // on page 74.
    606       Label use_receiver, exit;
    607 
    608       // If the result is a smi, it is *not* an object in the ECMA sense.
    609       // r2: result
    610       // sp[0]: receiver
    611       // sp[1]: new.target
    612       // sp[2]: number of arguments (smi-tagged)
    613       __ JumpIfSmi(r2, &use_receiver);
    614 
    615       // If the type of the result (stored in its map) is less than
    616       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    617       __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE);
    618       __ bge(&exit);
    619 
    620       // Throw away the result of the constructor invocation and use the
    621       // on-stack receiver as the result.
    622       __ bind(&use_receiver);
    623       __ LoadP(r2, MemOperand(sp));
    624 
    625       // Remove receiver from the stack, remove caller arguments, and
    626       // return.
    627       __ bind(&exit);
    628       // r2: result
    629       // sp[0]: receiver (newly allocated object)
    630       // sp[1]: number of arguments (smi-tagged)
    631       __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
    632     } else {
    633       __ LoadP(r3, MemOperand(sp));
    634     }
    635 
    636     // Leave construct frame.
    637   }
    638 
    639   // ES6 9.2.2. Step 13+
    640   // Check that the result is not a Smi, indicating that the constructor result
    641   // from a derived class is neither undefined nor an Object.
    642   if (check_derived_construct) {
    643     Label dont_throw;
    644     __ JumpIfNotSmi(r2, &dont_throw);
    645     {
    646       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    647       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    648     }
    649     __ bind(&dont_throw);
    650   }
    651 
    652   __ SmiToPtrArrayOffset(r3, r3);
    653   __ AddP(sp, sp, r3);
    654   __ AddP(sp, sp, Operand(kPointerSize));
    655   if (create_implicit_receiver) {
    656     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4);
    657   }
    658   __ Ret();
    659 }
    660 
    661 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    662   Generate_JSConstructStubHelper(masm, false, true, false);
    663 }
    664 
    665 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    666   Generate_JSConstructStubHelper(masm, true, false, false);
    667 }
    668 
    669 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    670   Generate_JSConstructStubHelper(masm, false, false, false);
    671 }
    672 
    673 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    674     MacroAssembler* masm) {
    675   Generate_JSConstructStubHelper(masm, false, false, true);
    676 }
    677 
    678 // static
    679 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    680   // ----------- S t a t e -------------
    681   //  -- r2 : the value to pass to the generator
    682   //  -- r3 : the JSGeneratorObject to resume
    683   //  -- r4 : the resume mode (tagged)
    684   //  -- lr : return address
    685   // -----------------------------------
    686   __ AssertGeneratorObject(r3);
    687 
    688   // Store input value into generator object.
    689   __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset),
    690             r0);
    691   __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
    692                       kLRHasNotBeenSaved, kDontSaveFPRegs);
    693 
    694   // Store resume mode into generator object.
    695   __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
    696 
    697   // Load suspended function and context.
    698   __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset));
    699   __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
    700 
    701   // Flood function if we are stepping.
    702   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    703   Label stepping_prepared;
    704   ExternalReference last_step_action =
    705       ExternalReference::debug_last_step_action_address(masm->isolate());
    706   STATIC_ASSERT(StepFrame > StepIn);
    707   __ mov(ip, Operand(last_step_action));
    708   __ LoadB(ip, MemOperand(ip));
    709   __ CmpP(ip, Operand(StepIn));
    710   __ bge(&prepare_step_in_if_stepping);
    711 
    712   // Flood function if we need to continue stepping in the suspended generator.
    713 
    714   ExternalReference debug_suspended_generator =
    715       ExternalReference::debug_suspended_generator_address(masm->isolate());
    716 
    717   __ mov(ip, Operand(debug_suspended_generator));
    718   __ LoadP(ip, MemOperand(ip));
    719   __ CmpP(ip, r3);
    720   __ beq(&prepare_step_in_suspended_generator);
    721   __ bind(&stepping_prepared);
    722 
    723   // Push receiver.
    724   __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
    725   __ Push(ip);
    726 
    727   // ----------- S t a t e -------------
    728   //  -- r3    : the JSGeneratorObject to resume
    729   //  -- r4    : the resume mode (tagged)
    730   //  -- r6    : generator function
    731   //  -- cp    : generator context
    732   //  -- lr    : return address
    733   //  -- sp[0] : generator receiver
    734   // -----------------------------------
    735 
    736   // Push holes for arguments to generator function. Since the parser forced
    737   // context allocation for any variables in generators, the actual argument
    738   // values have already been copied into the context and these dummy values
    739   // will never be used.
    740   __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
    741   __ LoadW(
    742       r2, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
    743   {
    744     Label loop, done_loop;
    745     __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    746 #if V8_TARGET_ARCH_S390X
    747     __ CmpP(r2, Operand::Zero());
    748     __ beq(&done_loop);
    749 #else
    750     __ SmiUntag(r2);
    751     __ LoadAndTestP(r2, r2);
    752     __ beq(&done_loop);
    753 #endif
    754     __ LoadRR(r1, r2);
    755     __ bind(&loop);
    756     __ push(ip);
    757     __ BranchOnCount(r1, &loop);
    758     __ bind(&done_loop);
    759   }
    760 
    761   // Dispatch on the kind of generator object.
    762   Label old_generator;
    763   __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
    764   __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
    765   __ bne(&old_generator, Label::kNear);
    766 
    767   // New-style (ignition/turbofan) generator object
    768   {
    769     // We abuse new.target both to indicate that this is a resume call and to
    770     // pass in the generator object.  In ordinary calls, new.target is always
    771     // undefined because generator functions are non-constructable.
    772     __ LoadRR(r5, r3);
    773     __ LoadRR(r3, r6);
    774     __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
    775     __ JumpToJSEntry(ip);
    776   }
    777   // Old-style (full-codegen) generator object
    778   __ bind(&old_generator);
    779   {
    780     // Enter a new JavaScript frame, and initialize its slots as they were when
    781     // the generator was suspended.
    782     FrameScope scope(masm, StackFrame::MANUAL);
    783     __ PushStandardFrame(r6);
    784 
    785     // Restore the operand stack.
    786     __ LoadP(r2, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset));
    787     __ LoadP(r5, FieldMemOperand(r2, FixedArray::kLengthOffset));
    788     __ AddP(r2, r2,
    789             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
    790     {
    791       Label loop, done_loop;
    792       __ SmiUntag(r5);
    793       __ LoadAndTestP(r5, r5);
    794       __ beq(&done_loop);
    795       __ LoadRR(r1, r5);
    796       __ bind(&loop);
    797       __ LoadP(ip, MemOperand(r2, kPointerSize));
    798       __ la(r2, MemOperand(r2, kPointerSize));
    799       __ Push(ip);
    800       __ BranchOnCount(r1, &loop);
    801       __ bind(&done_loop);
    802     }
    803 
    804     // Reset operand stack so we don't leak.
    805     __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
    806     __ StoreP(ip, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset),
    807               r0);
    808 
    809     // Resume the generator function at the continuation.
    810     __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
    811     __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset));
    812     __ AddP(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
    813     {
    814       ConstantPoolUnavailableScope constant_pool_unavailable(masm);
    815       __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset));
    816       __ SmiUntag(r4);
    817       __ AddP(r5, r5, r4);
    818       __ LoadSmiLiteral(r4,
    819                         Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
    820       __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
    821                 r0);
    822       __ LoadRR(r2, r3);  // Continuation expects generator object in r2.
    823       __ Jump(r5);
    824     }
    825   }
    826 
    827   __ bind(&prepare_step_in_if_stepping);
    828   {
    829     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    830     __ Push(r3, r4, r6);
    831     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
    832     __ Pop(r3, r4);
    833     __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
    834   }
    835   __ b(&stepping_prepared);
    836 
    837   __ bind(&prepare_step_in_suspended_generator);
    838   {
    839     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    840     __ Push(r3, r4);
    841     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    842     __ Pop(r3, r4);
    843     __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
    844   }
    845   __ b(&stepping_prepared);
    846 }
    847 
    848 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    849   FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
    850   __ push(r3);
    851   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    852 }
    853 
    854 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
    855 
    856 // Clobbers r4; preserves all other registers.
    857 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
    858                                         IsTagged argc_is_tagged) {
    859   // Check the stack for overflow. We are not trying to catch
    860   // interruptions (e.g. debug break and preemption) here, so the "real stack
    861   // limit" is checked.
    862   Label okay;
    863   __ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
    864   // Make r4 the space we have left. The stack might already be overflowed
    865   // here which will cause r4 to become negative.
    866   __ SubP(r4, sp, r4);
    867   // Check if the arguments will overflow the stack.
    868   if (argc_is_tagged == kArgcIsSmiTagged) {
    869     __ SmiToPtrArrayOffset(r0, argc);
    870   } else {
    871     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    872     __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
    873   }
    874   __ CmpP(r4, r0);
    875   __ bgt(&okay);  // Signed comparison.
    876 
    877   // Out of stack space.
    878   __ CallRuntime(Runtime::kThrowStackOverflow);
    879 
    880   __ bind(&okay);
    881 }
    882 
    883 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    884                                              bool is_construct) {
    885   // Called from Generate_JS_Entry
    886   // r2: new.target
    887   // r3: function
    888   // r4: receiver
    889   // r5: argc
    890   // r6: argv
    891   // r0,r7-r9, cp may be clobbered
    892   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    893 
    894   // Enter an internal frame.
    895   {
    896     // FrameScope ends up calling MacroAssembler::EnterFrame here
    897     FrameScope scope(masm, StackFrame::INTERNAL);
    898 
    899     // Setup the context (we need to use the caller context from the isolate).
    900     ExternalReference context_address(Isolate::kContextAddress,
    901                                       masm->isolate());
    902     __ mov(cp, Operand(context_address));
    903     __ LoadP(cp, MemOperand(cp));
    904 
    905     __ InitializeRootRegister();
    906 
    907     // Push the function and the receiver onto the stack.
    908     __ Push(r3, r4);
    909 
    910     // Check if we have enough stack space to push all arguments.
    911     // Clobbers r4.
    912     Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt);
    913 
    914     // Copy arguments to the stack in a loop from argv to sp.
    915     // The arguments are actually placed in reverse order on sp
    916     // compared to argv (i.e. arg1 is highest memory in sp).
    917     // r3: function
    918     // r5: argc
    919     // r6: argv, i.e. points to first arg
    920     // r7: scratch reg to hold scaled argc
    921     // r8: scratch reg to hold arg handle
    922     // r9: scratch reg to hold index into argv
    923     Label argLoop, argExit;
    924     intptr_t zero = 0;
    925     __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
    926     __ SubRR(sp, r7);                // Buy the stack frame to fit args
    927     __ LoadImmP(r9, Operand(zero));  // Initialize argv index
    928     __ bind(&argLoop);
    929     __ CmpPH(r7, Operand(zero));
    930     __ beq(&argExit, Label::kNear);
    931     __ lay(r7, MemOperand(r7, -kPointerSize));
    932     __ LoadP(r8, MemOperand(r9, r6));         // read next parameter
    933     __ la(r9, MemOperand(r9, kPointerSize));  // r9++;
    934     __ LoadP(r0, MemOperand(r8));             // dereference handle
    935     __ StoreP(r0, MemOperand(r7, sp));        // push parameter
    936     __ b(&argLoop);
    937     __ bind(&argExit);
    938 
    939     // Setup new.target and argc.
    940     __ LoadRR(r6, r2);
    941     __ LoadRR(r2, r5);
    942     __ LoadRR(r5, r6);
    943 
    944     // Initialize all JavaScript callee-saved registers, since they will be seen
    945     // by the garbage collector as part of handlers.
    946     __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
    947     __ LoadRR(r7, r6);
    948     __ LoadRR(r8, r6);
    949     __ LoadRR(r9, r6);
    950 
    951     // Invoke the code.
    952     Handle<Code> builtin = is_construct
    953                                ? masm->isolate()->builtins()->Construct()
    954                                : masm->isolate()->builtins()->Call();
    955     __ Call(builtin, RelocInfo::CODE_TARGET);
    956 
    957     // Exit the JS frame and remove the parameters (except function), and
    958     // return.
    959   }
    960   __ b(r14);
    961 
    962   // r2: result
    963 }
    964 
    965 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    966   Generate_JSEntryTrampolineHelper(masm, false);
    967 }
    968 
    969 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    970   Generate_JSEntryTrampolineHelper(masm, true);
    971 }
    972 
    973 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
    974   Register args_count = scratch;
    975 
    976   // Get the arguments + receiver count.
    977   __ LoadP(args_count,
    978            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    979   __ LoadlW(args_count,
    980             FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
    981 
    982   // Leave the frame (also dropping the register file).
    983   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
    984 
    985   __ AddP(sp, sp, args_count);
    986 }
    987 
    988 // Generate code for entering a JS function with the interpreter.
    989 // On entry to the function the receiver and arguments have been pushed on the
    990 // stack left to right.  The actual argument count matches the formal parameter
    991 // count expected by the function.
    992 //
    993 // The live registers are:
    994 //   o r3: the JS function object being called.
    995 //   o r5: the new target
    996 //   o cp: our context
    997 //   o pp: the caller's constant pool pointer (if enabled)
    998 //   o fp: the caller's frame pointer
    999 //   o sp: stack pointer
   1000 //   o lr: return address
   1001 //
   1002 // The function builds an interpreter frame.  See InterpreterFrameConstants in
   1003 // frames.h for its layout.
   1004 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
   1005   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1006 
   1007   // Open a frame scope to indicate that there is a frame on the stack.  The
   1008   // MANUAL indicates that the scope shouldn't actually generate code to set up
   1009   // the frame (that is done below).
   1010   FrameScope frame_scope(masm, StackFrame::MANUAL);
   1011   __ PushStandardFrame(r3);
   1012 
   1013   // Get the bytecode array from the function object (or from the DebugInfo if
   1014   // it is present) and load it into kInterpreterBytecodeArrayRegister.
   1015   __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   1016   Label array_done;
   1017   Register debug_info = r4;
   1018   DCHECK(!debug_info.is(r2));
   1019   __ LoadP(debug_info,
   1020            FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
   1021   // Load original bytecode array or the debug copy.
   1022   __ LoadP(kInterpreterBytecodeArrayRegister,
   1023            FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
   1024   __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
   1025   __ beq(&array_done);
   1026   __ LoadP(kInterpreterBytecodeArrayRegister,
   1027            FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
   1028   __ bind(&array_done);
   1029 
   1030   // Check function data field is actually a BytecodeArray object.
   1031   Label bytecode_array_not_present;
   1032   __ CompareRoot(kInterpreterBytecodeArrayRegister,
   1033                  Heap::kUndefinedValueRootIndex);
   1034   __ beq(&bytecode_array_not_present);
   1035 
   1036   if (FLAG_debug_code) {
   1037     __ TestIfSmi(kInterpreterBytecodeArrayRegister);
   1038     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1039     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
   1040                          BYTECODE_ARRAY_TYPE);
   1041     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1042   }
   1043 
   1044   // Load the initial bytecode offset.
   1045   __ mov(kInterpreterBytecodeOffsetRegister,
   1046          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
   1047 
   1048   // Push new.target, bytecode array and Smi tagged bytecode array offset.
   1049   __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
   1050   __ Push(r5, kInterpreterBytecodeArrayRegister, r4);
   1051 
   1052   // Allocate the local and temporary register file on the stack.
   1053   {
   1054     // Load frame size (word) from the BytecodeArray object.
   1055     __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1056                                   BytecodeArray::kFrameSizeOffset));
   1057 
   1058     // Do a stack check to ensure we don't go over the limit.
   1059     Label ok;
   1060     __ SubP(r5, sp, r4);
   1061     __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
   1062     __ CmpLogicalP(r5, r0);
   1063     __ bge(&ok);
   1064     __ CallRuntime(Runtime::kThrowStackOverflow);
   1065     __ bind(&ok);
   1066 
   1067     // If ok, push undefined as the initial value for all register file entries.
   1068     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
   1069     Label loop, no_args;
   1070     __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
   1071     __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
   1072     __ LoadAndTestP(r4, r4);
   1073     __ beq(&no_args);
   1074     __ LoadRR(r1, r4);
   1075     __ bind(&loop);
   1076     __ push(r5);
   1077     __ SubP(r1, Operand(1));
   1078     __ bne(&loop);
   1079     __ bind(&no_args);
   1080   }
   1081 
   1082   // Load accumulator and dispatch table into registers.
   1083   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
   1084   __ mov(kInterpreterDispatchTableRegister,
   1085          Operand(ExternalReference::interpreter_dispatch_table_address(
   1086              masm->isolate())));
   1087 
   1088   // Dispatch to the first bytecode handler for the function.
   1089   __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
   1090                            kInterpreterBytecodeOffsetRegister));
   1091   __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
   1092   __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
   1093   __ Call(ip);
   1094 
   1095   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
   1096 
   1097   // The return value is in r2.
   1098   LeaveInterpreterFrame(masm, r4);
   1099   __ Ret();
   1100 
   1101   // If the bytecode array is no longer present, then the underlying function
   1102   // has been switched to a different kind of code and we heal the closure by
   1103   // switching the code entry field over to the new code object as well.
   1104   __ bind(&bytecode_array_not_present);
   1105   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
   1106   __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   1107   __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
   1108   __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
   1109   __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0);
   1110   __ RecordWriteCodeEntryField(r3, r6, r7);
   1111   __ JumpToJSEntry(r6);
   1112 }
   1113 
   1114 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
   1115   // Save the function and context for call to CompileBaseline.
   1116   __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
   1117   __ LoadP(kContextRegister,
   1118            MemOperand(fp, StandardFrameConstants::kContextOffset));
   1119 
   1120   // Leave the frame before recompiling for baseline so that we don't count as
   1121   // an activation on the stack.
   1122   LeaveInterpreterFrame(masm, r4);
   1123 
   1124   {
   1125     FrameScope frame_scope(masm, StackFrame::INTERNAL);
   1126     // Push return value.
   1127     __ push(r2);
   1128 
   1129     // Push function as argument and compile for baseline.
   1130     __ push(r3);
   1131     __ CallRuntime(Runtime::kCompileBaseline);
   1132 
   1133     // Restore return value.
   1134     __ pop(r2);
   1135   }
   1136   __ Ret();
   1137 }
   1138 
   1139 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
   1140                                          Register count, Register scratch) {
   1141   Label loop;
   1142   __ AddP(index, index, Operand(kPointerSize));  // Bias up for LoadPU
   1143   __ LoadRR(r0, count);
   1144   __ bind(&loop);
   1145   __ LoadP(scratch, MemOperand(index, -kPointerSize));
   1146   __ lay(index, MemOperand(index, -kPointerSize));
   1147   __ push(scratch);
   1148   __ SubP(r0, Operand(1));
   1149   __ bne(&loop);
   1150 }
   1151 
   1152 // static
   1153 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
   1154     MacroAssembler* masm, TailCallMode tail_call_mode) {
   1155   // ----------- S t a t e -------------
   1156   //  -- r2 : the number of arguments (not including the receiver)
   1157   //  -- r4 : the address of the first argument to be pushed. Subsequent
   1158   //          arguments should be consecutive above this, in the same order as
   1159   //          they are to be pushed onto the stack.
   1160   //  -- r3 : the target to call (can be any Object).
   1161   // -----------------------------------
   1162 
   1163   // Calculate number of arguments (AddP one for receiver).
   1164   __ AddP(r5, r2, Operand(1));
   1165 
   1166   // Push the arguments.
   1167   Generate_InterpreterPushArgs(masm, r4, r5, r6);
   1168 
   1169   // Call the target.
   1170   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   1171                                             tail_call_mode),
   1172           RelocInfo::CODE_TARGET);
   1173 }
   1174 
   1175 // static
   1176 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
   1177   // ----------- S t a t e -------------
   1178   // -- r2 : argument count (not including receiver)
   1179   // -- r5 : new target
   1180   // -- r3 : constructor to call
   1181   // -- r4 : address of the first argument
   1182   // -----------------------------------
   1183 
   1184   // Push a slot for the receiver to be constructed.
   1185   __ LoadImmP(r0, Operand::Zero());
   1186   __ push(r0);
   1187 
   1188   // Push the arguments (skip if none).
   1189   Label skip;
   1190   __ CmpP(r2, Operand::Zero());
   1191   __ beq(&skip);
   1192   Generate_InterpreterPushArgs(masm, r4, r2, r6);
   1193   __ bind(&skip);
   1194 
   1195   // Call the constructor with r2, r3, and r5 unmodified.
   1196   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1197 }
   1198 
   1199 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1200   // Set the return address to the correct point in the interpreter entry
   1201   // trampoline.
   1202   Smi* interpreter_entry_return_pc_offset(
   1203       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
   1204   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
   1205   __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline());
   1206   __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() +
   1207                            Code::kHeaderSize - kHeapObjectTag));
   1208 
   1209   // Initialize the dispatch table register.
   1210   __ mov(kInterpreterDispatchTableRegister,
   1211          Operand(ExternalReference::interpreter_dispatch_table_address(
   1212              masm->isolate())));
   1213 
   1214   // Get the bytecode array pointer from the frame.
   1215   __ LoadP(kInterpreterBytecodeArrayRegister,
   1216            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1217 
   1218   if (FLAG_debug_code) {
   1219     // Check function data field is actually a BytecodeArray object.
   1220     __ TestIfSmi(kInterpreterBytecodeArrayRegister);
   1221     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1222     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
   1223                          BYTECODE_ARRAY_TYPE);
   1224     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1225   }
   1226 
   1227   // Get the target bytecode offset from the frame.
   1228   __ LoadP(kInterpreterBytecodeOffsetRegister,
   1229            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1230   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
   1231 
   1232   // Dispatch to the target bytecode.
   1233   __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
   1234                            kInterpreterBytecodeOffsetRegister));
   1235   __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
   1236   __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
   1237   __ Jump(ip);
   1238 }
   1239 
   1240 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1241   // ----------- S t a t e -------------
   1242   //  -- r2 : argument count (preserved for callee)
   1243   //  -- r5 : new target (preserved for callee)
   1244   //  -- r3 : target function (preserved for callee)
   1245   // -----------------------------------
   1246   // First lookup code, maybe we don't need to compile!
   1247   Label gotta_call_runtime;
   1248   Label maybe_call_runtime;
   1249   Label try_shared;
   1250   Label loop_top, loop_bottom;
   1251 
   1252   Register closure = r3;
   1253   Register map = r8;
   1254   Register index = r4;
   1255   __ LoadP(map,
   1256            FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1257   __ LoadP(map,
   1258            FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
   1259   __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
   1260   __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
   1261   __ blt(&gotta_call_runtime);
   1262 
   1263   // Find literals.
   1264   // r9 : native context
   1265   // r4  : length / index
   1266   // r8  : optimized code map
   1267   // r5  : new target
   1268   // r3  : closure
   1269   Register native_context = r9;
   1270   __ LoadP(native_context, NativeContextMemOperand());
   1271 
   1272   __ bind(&loop_top);
   1273   Register temp = r1;
   1274   Register array_pointer = r7;
   1275 
   1276   // Does the native context match?
   1277   __ SmiToPtrArrayOffset(array_pointer, index);
   1278   __ AddP(array_pointer, map, array_pointer);
   1279   __ LoadP(temp, FieldMemOperand(array_pointer,
   1280                                  SharedFunctionInfo::kOffsetToPreviousContext));
   1281   __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1282   __ CmpP(temp, native_context);
   1283   __ bne(&loop_bottom, Label::kNear);
   1284   // OSR id set to none?
   1285   __ LoadP(temp,
   1286            FieldMemOperand(array_pointer,
   1287                            SharedFunctionInfo::kOffsetToPreviousOsrAstId));
   1288   const int bailout_id = BailoutId::None().ToInt();
   1289   __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0);
   1290   __ bne(&loop_bottom, Label::kNear);
   1291   // Literals available?
   1292   __ LoadP(temp,
   1293            FieldMemOperand(array_pointer,
   1294                            SharedFunctionInfo::kOffsetToPreviousLiterals));
   1295   __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1296   __ JumpIfSmi(temp, &gotta_call_runtime);
   1297 
   1298   // Save the literals in the closure.
   1299   __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
   1300   __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r6,
   1301                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1302                       OMIT_SMI_CHECK);
   1303 
   1304   // Code available?
   1305   Register entry = r6;
   1306   __ LoadP(entry,
   1307            FieldMemOperand(array_pointer,
   1308                            SharedFunctionInfo::kOffsetToPreviousCachedCode));
   1309   __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1310   __ JumpIfSmi(entry, &maybe_call_runtime);
   1311 
   1312   // Found literals and code. Get them into the closure and return.
   1313   // Store code entry in the closure.
   1314   __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1315 
   1316   Label install_optimized_code_and_tailcall;
   1317   __ bind(&install_optimized_code_and_tailcall);
   1318   __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
   1319   __ RecordWriteCodeEntryField(closure, entry, r7);
   1320 
   1321   // Link the closure into the optimized function list.
   1322   // r6 : code entry
   1323   // r9: native context
   1324   // r3 : closure
   1325   __ LoadP(
   1326       r7, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1327   __ StoreP(r7, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
   1328             r0);
   1329   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r7, temp,
   1330                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1331                       OMIT_SMI_CHECK);
   1332   const int function_list_offset =
   1333       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1334   __ StoreP(
   1335       closure,
   1336       ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
   1337   // Save closure before the write barrier.
   1338   __ LoadRR(r7, closure);
   1339   __ RecordWriteContextSlot(native_context, function_list_offset, r7, temp,
   1340                             kLRHasNotBeenSaved, kDontSaveFPRegs);
   1341   __ JumpToJSEntry(entry);
   1342 
   1343   __ bind(&loop_bottom);
   1344   __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
   1345                    r0);
   1346   __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
   1347   __ bgt(&loop_top);
   1348 
   1349   // We found neither literals nor code.
   1350   __ b(&gotta_call_runtime);
   1351 
   1352   __ bind(&maybe_call_runtime);
   1353 
   1354   // Last possibility. Check the context free optimized code map entry.
   1355   __ LoadP(entry,
   1356            FieldMemOperand(map, FixedArray::kHeaderSize +
   1357                                     SharedFunctionInfo::kSharedCodeIndex));
   1358   __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1359   __ JumpIfSmi(entry, &try_shared);
   1360 
   1361   // Store code entry in the closure.
   1362   __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1363   __ b(&install_optimized_code_and_tailcall);
   1364 
   1365   __ bind(&try_shared);
   1366   // Is the full code valid?
   1367   __ LoadP(entry,
   1368            FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1369   __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
   1370   __ LoadlW(r7, FieldMemOperand(entry, Code::kFlagsOffset));
   1371   __ DecodeField<Code::KindField>(r7);
   1372   __ CmpP(r7, Operand(Code::BUILTIN));
   1373   __ beq(&gotta_call_runtime);
   1374   // Yes, install the full code.
   1375   __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1376   __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
   1377   __ RecordWriteCodeEntryField(closure, entry, r7);
   1378   __ JumpToJSEntry(entry);
   1379 
   1380   __ bind(&gotta_call_runtime);
   1381   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1382 }
   1383 
   1384 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1385   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1386 }
   1387 
   1388 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1389   GenerateTailCallToReturnedCode(masm,
   1390                                  Runtime::kCompileOptimized_NotConcurrent);
   1391 }
   1392 
   1393 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1394   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1395 }
   1396 
   1397 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1398   // For now, we are relying on the fact that make_code_young doesn't do any
   1399   // garbage collection which allows us to save/restore the registers without
   1400   // worrying about which of them contain pointers. We also don't build an
   1401   // internal frame to make the code faster, since we shouldn't have to do stack
   1402   // crawls in MakeCodeYoung. This seems a bit fragile.
   1403 
   1404   // Point r2 at the start of the PlatformCodeAge sequence.
   1405   __ CleanseP(r14);
   1406   __ SubP(r14, Operand(kCodeAgingSequenceLength));
   1407   __ LoadRR(r2, r14);
   1408 
   1409   __ pop(r14);
   1410 
   1411   // The following registers must be saved and restored when calling through to
   1412   // the runtime:
   1413   //   r2 - contains return address (beginning of patch sequence)
   1414   //   r3 - isolate
   1415   //   r5 - new target
   1416   //   lr - return address
   1417   FrameScope scope(masm, StackFrame::MANUAL);
   1418   __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
   1419   __ PrepareCallCFunction(2, 0, r4);
   1420   __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
   1421   __ CallCFunction(
   1422       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1423   __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
   1424   __ LoadRR(ip, r2);
   1425   __ Jump(ip);
   1426 }
   1427 
   1428 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
   1429   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
   1430       MacroAssembler* masm) {                                 \
   1431     GenerateMakeCodeYoungAgainCommon(masm);                   \
   1432   }                                                           \
   1433   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
   1434       MacroAssembler* masm) {                                 \
   1435     GenerateMakeCodeYoungAgainCommon(masm);                   \
   1436   }
   1437 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1438 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1439 
   1440 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1441   // For now, we are relying on the fact that make_code_young doesn't do any
   1442   // garbage collection which allows us to save/restore the registers without
   1443   // worrying about which of them contain pointers. We also don't build an
   1444   // internal frame to make the code faster, since we shouldn't have to do stack
   1445   // crawls in MakeCodeYoung. This seems a bit fragile.
   1446 
   1447   // Point r2 at the start of the PlatformCodeAge sequence.
   1448   __ CleanseP(r14);
   1449   __ SubP(r14, Operand(kCodeAgingSequenceLength));
   1450   __ LoadRR(r2, r14);
   1451 
   1452   __ pop(r14);
   1453 
   1454   // The following registers must be saved and restored when calling through to
   1455   // the runtime:
   1456   //   r2 - contains return address (beginning of patch sequence)
   1457   //   r3 - isolate
   1458   //   r5 - new target
   1459   //   lr - return address
   1460   FrameScope scope(masm, StackFrame::MANUAL);
   1461   __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
   1462   __ PrepareCallCFunction(2, 0, r4);
   1463   __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
   1464   __ CallCFunction(
   1465       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
   1466       2);
   1467   __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
   1468   __ LoadRR(ip, r2);
   1469 
   1470   // Perform prologue operations usually performed by the young code stub.
   1471   __ PushStandardFrame(r3);
   1472 
   1473   // Jump to point after the code-age stub.
   1474   __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength));
   1475   __ Jump(r2);
   1476 }
   1477 
   1478 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1479   GenerateMakeCodeYoungAgainCommon(masm);
   1480 }
   1481 
   1482 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1483   Generate_MarkCodeAsExecutedOnce(masm);
   1484 }
   1485 
   1486 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1487                                              SaveFPRegsMode save_doubles) {
   1488   {
   1489     FrameScope scope(masm, StackFrame::INTERNAL);
   1490 
   1491     // Preserve registers across notification, this is important for compiled
   1492     // stubs that tail call the runtime on deopts passing their parameters in
   1493     // registers.
   1494     __ MultiPush(kJSCallerSaved | kCalleeSaved);
   1495     // Pass the function and deoptimization type to the runtime system.
   1496     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1497     __ MultiPop(kJSCallerSaved | kCalleeSaved);
   1498   }
   1499 
   1500   __ la(sp, MemOperand(sp, kPointerSize));  // Ignore state
   1501   __ Ret();                                 // Jump to miss handler
   1502 }
   1503 
   1504 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1505   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1506 }
   1507 
   1508 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1509   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1510 }
   1511 
   1512 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1513                                              Deoptimizer::BailoutType type) {
   1514   {
   1515     FrameScope scope(masm, StackFrame::INTERNAL);
   1516     // Pass the function and deoptimization type to the runtime system.
   1517     __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type)));
   1518     __ push(r2);
   1519     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1520   }
   1521 
   1522   // Get the full codegen state from the stack and untag it -> r8.
   1523   __ LoadP(r8, MemOperand(sp, 0 * kPointerSize));
   1524   __ SmiUntag(r8);
   1525   // Switch on the state.
   1526   Label with_tos_register, unknown_state;
   1527   __ CmpP(
   1528       r8,
   1529       Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
   1530   __ bne(&with_tos_register);
   1531   __ la(sp, MemOperand(sp, 1 * kPointerSize));  // Remove state.
   1532   __ Ret();
   1533 
   1534   __ bind(&with_tos_register);
   1535   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
   1536   __ LoadP(r2, MemOperand(sp, 1 * kPointerSize));
   1537   __ CmpP(
   1538       r8,
   1539       Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
   1540   __ bne(&unknown_state);
   1541   __ la(sp, MemOperand(sp, 2 * kPointerSize));  // Remove state.
   1542   __ Ret();
   1543 
   1544   __ bind(&unknown_state);
   1545   __ stop("no cases left");
   1546 }
   1547 
   1548 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1549   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1550 }
   1551 
   1552 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1553   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1554 }
   1555 
   1556 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1557   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1558 }
   1559 
   1560 // Clobbers registers {r6, r7, r8, r9}.
   1561 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   1562                              Register function_template_info,
   1563                              Label* receiver_check_failed) {
   1564   Register signature = r6;
   1565   Register map = r7;
   1566   Register constructor = r8;
   1567   Register scratch = r9;
   1568 
   1569   // If there is no signature, return the holder.
   1570   __ LoadP(signature, FieldMemOperand(function_template_info,
   1571                                       FunctionTemplateInfo::kSignatureOffset));
   1572   Label receiver_check_passed;
   1573   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
   1574                 &receiver_check_passed);
   1575 
   1576   // Walk the prototype chain.
   1577   __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1578   Label prototype_loop_start;
   1579   __ bind(&prototype_loop_start);
   1580 
   1581   // Get the constructor, if any.
   1582   __ GetMapConstructor(constructor, map, scratch, scratch);
   1583   __ CmpP(scratch, Operand(JS_FUNCTION_TYPE));
   1584   Label next_prototype;
   1585   __ bne(&next_prototype);
   1586   Register type = constructor;
   1587   __ LoadP(type,
   1588            FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   1589   __ LoadP(type,
   1590            FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   1591 
   1592   // Loop through the chain of inheriting function templates.
   1593   Label function_template_loop;
   1594   __ bind(&function_template_loop);
   1595 
   1596   // If the signatures match, we have a compatible receiver.
   1597   __ CmpP(signature, type);
   1598   __ beq(&receiver_check_passed);
   1599 
   1600   // If the current type is not a FunctionTemplateInfo, load the next prototype
   1601   // in the chain.
   1602   __ JumpIfSmi(type, &next_prototype);
   1603   __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
   1604   __ bne(&next_prototype);
   1605 
   1606   // Otherwise load the parent function template and iterate.
   1607   __ LoadP(type,
   1608            FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
   1609   __ b(&function_template_loop);
   1610 
   1611   // Load the next prototype.
   1612   __ bind(&next_prototype);
   1613   __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset));
   1614   __ DecodeField<Map::HasHiddenPrototype>(scratch);
   1615   __ beq(receiver_check_failed);
   1616 
   1617   __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
   1618   __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1619   // Iterate.
   1620   __ b(&prototype_loop_start);
   1621 
   1622   __ bind(&receiver_check_passed);
   1623 }
   1624 
   1625 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   1626   // ----------- S t a t e -------------
   1627   //  -- r2                 : number of arguments excluding receiver
   1628   //  -- r3                 : callee
   1629   //  -- lr                 : return address
   1630   //  -- sp[0]              : last argument
   1631   //  -- ...
   1632   //  -- sp[4 * (argc - 1)] : first argument
   1633   //  -- sp[4 * argc]       : receiver
   1634   // -----------------------------------
   1635 
   1636   // Load the FunctionTemplateInfo.
   1637   __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   1638   __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
   1639 
   1640   // Do the compatible receiver check.
   1641   Label receiver_check_failed;
   1642   __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
   1643   __ LoadP(r4, MemOperand(sp, r1));
   1644   CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed);
   1645 
   1646   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   1647   // beginning of the code.
   1648   __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset));
   1649   __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset));
   1650   __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
   1651   __ JumpToJSEntry(ip);
   1652 
   1653   // Compatible receiver check failed: throw an Illegal Invocation exception.
   1654   __ bind(&receiver_check_failed);
   1655   // Drop the arguments (including the receiver);
   1656   __ AddP(r1, r1, Operand(kPointerSize));
   1657   __ AddP(sp, sp, r1);
   1658   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   1659 }
   1660 
   1661 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1662   // Lookup the function in the JavaScript frame.
   1663   __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1664   {
   1665     FrameScope scope(masm, StackFrame::INTERNAL);
   1666     // Pass function as argument.
   1667     __ push(r2);
   1668     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1669   }
   1670 
   1671   // If the code object is null, just return to the unoptimized code.
   1672   Label skip;
   1673   __ CmpSmiLiteral(r2, Smi::FromInt(0), r0);
   1674   __ bne(&skip);
   1675   __ Ret();
   1676 
   1677   __ bind(&skip);
   1678 
   1679   // Load deoptimization data from the code object.
   1680   // <deopt_data> = <code>[#deoptimization_data_offset]
   1681   __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
   1682 
   1683   // Load the OSR entrypoint offset from the deoptimization data.
   1684   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1685   __ LoadP(
   1686       r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
   1687                                   DeoptimizationInputData::kOsrPcOffsetIndex)));
   1688   __ SmiUntag(r3);
   1689 
   1690   // Compute the target address = code_obj + header_size + osr_offset
   1691   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
   1692   __ AddP(r2, r3);
   1693   __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
   1694   __ LoadRR(r14, r0);
   1695 
   1696   // And "return" to the OSR entry point of the function.
   1697   __ Ret();
   1698 }
   1699 
   1700 // static
   1701 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
   1702                                                int field_index) {
   1703   // ----------- S t a t e -------------
   1704   //  -- r2    : number of arguments
   1705   //  -- r3    : function
   1706   //  -- cp    : context
   1707 
   1708   //  -- lr    : return address
   1709   //  -- sp[0] : receiver
   1710   // -----------------------------------
   1711 
   1712   // 1. Pop receiver into r2 and check that it's actually a JSDate object.
   1713   Label receiver_not_date;
   1714   {
   1715     __ Pop(r2);
   1716     __ JumpIfSmi(r2, &receiver_not_date);
   1717     __ CompareObjectType(r2, r4, r5, JS_DATE_TYPE);
   1718     __ bne(&receiver_not_date);
   1719   }
   1720 
   1721   // 2. Load the specified date field, falling back to the runtime as necessary.
   1722   if (field_index == JSDate::kDateValue) {
   1723     __ LoadP(r2, FieldMemOperand(r2, JSDate::kValueOffset));
   1724   } else {
   1725     if (field_index < JSDate::kFirstUncachedField) {
   1726       Label stamp_mismatch;
   1727       __ mov(r3, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
   1728       __ LoadP(r3, MemOperand(r3));
   1729       __ LoadP(ip, FieldMemOperand(r2, JSDate::kCacheStampOffset));
   1730       __ CmpP(r3, ip);
   1731       __ bne(&stamp_mismatch);
   1732       __ LoadP(r2, FieldMemOperand(
   1733                        r2, JSDate::kValueOffset + field_index * kPointerSize));
   1734       __ Ret();
   1735       __ bind(&stamp_mismatch);
   1736     }
   1737     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   1738     __ PrepareCallCFunction(2, r3);
   1739     __ LoadSmiLiteral(r3, Smi::FromInt(field_index));
   1740     __ CallCFunction(
   1741         ExternalReference::get_date_field_function(masm->isolate()), 2);
   1742   }
   1743   __ Ret();
   1744 
   1745   // 3. Raise a TypeError if the receiver is not a date.
   1746   __ bind(&receiver_not_date);
   1747   {
   1748     FrameScope scope(masm, StackFrame::MANUAL);
   1749     __ push(r2);
   1750     __ PushStandardFrame(r3);
   1751     __ LoadSmiLiteral(r6, Smi::FromInt(0));
   1752     __ push(r6);
   1753     __ CallRuntime(Runtime::kThrowNotDateError);
   1754   }
   1755 }
   1756 
   1757 // static
   1758 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1759   // ----------- S t a t e -------------
   1760   //  -- r2    : argc
   1761   //  -- sp[0] : argArray
   1762   //  -- sp[4] : thisArg
   1763   //  -- sp[8] : receiver
   1764   // -----------------------------------
   1765 
   1766   // 1. Load receiver into r3, argArray into r2 (if present), remove all
   1767   // arguments from the stack (including the receiver), and push thisArg (if
   1768   // present) instead.
   1769   {
   1770     Label skip;
   1771     Register arg_size = r4;
   1772     Register new_sp = r5;
   1773     Register scratch = r6;
   1774     __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
   1775     __ AddP(new_sp, sp, arg_size);
   1776     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   1777     __ LoadRR(scratch, r2);
   1778     __ LoadP(r3, MemOperand(new_sp, 0));  // receiver
   1779     __ CmpP(arg_size, Operand(kPointerSize));
   1780     __ blt(&skip);
   1781     __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));  // thisArg
   1782     __ beq(&skip);
   1783     __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize));  // argArray
   1784     __ bind(&skip);
   1785     __ LoadRR(sp, new_sp);
   1786     __ StoreP(scratch, MemOperand(sp, 0));
   1787   }
   1788 
   1789   // ----------- S t a t e -------------
   1790   //  -- r2    : argArray
   1791   //  -- r3    : receiver
   1792   //  -- sp[0] : thisArg
   1793   // -----------------------------------
   1794 
   1795   // 2. Make sure the receiver is actually callable.
   1796   Label receiver_not_callable;
   1797   __ JumpIfSmi(r3, &receiver_not_callable);
   1798   __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
   1799   __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
   1800   __ TestBit(r6, Map::kIsCallable);
   1801   __ beq(&receiver_not_callable);
   1802 
   1803   // 3. Tail call with no arguments if argArray is null or undefined.
   1804   Label no_arguments;
   1805   __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments);
   1806   __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments);
   1807 
   1808   // 4a. Apply the receiver to the given argArray (passing undefined for
   1809   // new.target).
   1810   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
   1811   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1812 
   1813   // 4b. The argArray is either null or undefined, so we tail call without any
   1814   // arguments to the receiver.
   1815   __ bind(&no_arguments);
   1816   {
   1817     __ LoadImmP(r2, Operand::Zero());
   1818     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1819   }
   1820 
   1821   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1822   __ bind(&receiver_not_callable);
   1823   {
   1824     __ StoreP(r3, MemOperand(sp, 0));
   1825     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1826   }
   1827 }
   1828 
   1829 // static
   1830 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1831   // 1. Make sure we have at least one argument.
   1832   // r2: actual number of arguments
   1833   {
   1834     Label done;
   1835     __ CmpP(r2, Operand::Zero());
   1836     __ bne(&done, Label::kNear);
   1837     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1838     __ AddP(r2, Operand(1));
   1839     __ bind(&done);
   1840   }
   1841 
   1842   // r2: actual number of arguments
   1843   // 2. Get the callable to call (passed as receiver) from the stack.
   1844   __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
   1845   __ LoadP(r3, MemOperand(sp, r4));
   1846 
   1847   // 3. Shift arguments and return address one slot down on the stack
   1848   //    (overwriting the original receiver).  Adjust argument count to make
   1849   //    the original first argument the new receiver.
   1850   // r2: actual number of arguments
   1851   // r3: callable
   1852   {
   1853     Label loop;
   1854     // Calculate the copy start address (destination). Copy end address is sp.
   1855     __ AddP(r4, sp, r4);
   1856 
   1857     __ bind(&loop);
   1858     __ LoadP(ip, MemOperand(r4, -kPointerSize));
   1859     __ StoreP(ip, MemOperand(r4));
   1860     __ SubP(r4, Operand(kPointerSize));
   1861     __ CmpP(r4, sp);
   1862     __ bne(&loop);
   1863     // Adjust the actual number of arguments and remove the top element
   1864     // (which is a copy of the last argument).
   1865     __ SubP(r2, Operand(1));
   1866     __ pop();
   1867   }
   1868 
   1869   // 4. Call the callable.
   1870   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1871 }
   1872 
   1873 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1874   // ----------- S t a t e -------------
   1875   //  -- r2     : argc
   1876   //  -- sp[0]  : argumentsList
   1877   //  -- sp[4]  : thisArgument
   1878   //  -- sp[8]  : target
   1879   //  -- sp[12] : receiver
   1880   // -----------------------------------
   1881 
   1882   // 1. Load target into r3 (if present), argumentsList into r2 (if present),
   1883   // remove all arguments from the stack (including the receiver), and push
   1884   // thisArgument (if present) instead.
   1885   {
   1886     Label skip;
   1887     Register arg_size = r4;
   1888     Register new_sp = r5;
   1889     Register scratch = r6;
   1890     __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
   1891     __ AddP(new_sp, sp, arg_size);
   1892     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   1893     __ LoadRR(scratch, r3);
   1894     __ LoadRR(r2, r3);
   1895     __ CmpP(arg_size, Operand(kPointerSize));
   1896     __ blt(&skip);
   1897     __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize));  // target
   1898     __ beq(&skip);
   1899     __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));  // thisArgument
   1900     __ CmpP(arg_size, Operand(2 * kPointerSize));
   1901     __ beq(&skip);
   1902     __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize));  // argumentsList
   1903     __ bind(&skip);
   1904     __ LoadRR(sp, new_sp);
   1905     __ StoreP(scratch, MemOperand(sp, 0));
   1906   }
   1907 
   1908   // ----------- S t a t e -------------
   1909   //  -- r2    : argumentsList
   1910   //  -- r3    : target
   1911   //  -- sp[0] : thisArgument
   1912   // -----------------------------------
   1913 
   1914   // 2. Make sure the target is actually callable.
   1915   Label target_not_callable;
   1916   __ JumpIfSmi(r3, &target_not_callable);
   1917   __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
   1918   __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
   1919   __ TestBit(r6, Map::kIsCallable);
   1920   __ beq(&target_not_callable);
   1921 
   1922   // 3a. Apply the target to the given argumentsList (passing undefined for
   1923   // new.target).
   1924   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
   1925   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1926 
   1927   // 3b. The target is not callable, throw an appropriate TypeError.
   1928   __ bind(&target_not_callable);
   1929   {
   1930     __ StoreP(r3, MemOperand(sp, 0));
   1931     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1932   }
   1933 }
   1934 
   1935 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1936   // ----------- S t a t e -------------
   1937   //  -- r2     : argc
   1938   //  -- sp[0]  : new.target (optional)
   1939   //  -- sp[4]  : argumentsList
   1940   //  -- sp[8]  : target
   1941   //  -- sp[12] : receiver
   1942   // -----------------------------------
   1943 
   1944   // 1. Load target into r3 (if present), argumentsList into r2 (if present),
   1945   // new.target into r5 (if present, otherwise use target), remove all
   1946   // arguments from the stack (including the receiver), and push thisArgument
   1947   // (if present) instead.
   1948   {
   1949     Label skip;
   1950     Register arg_size = r4;
   1951     Register new_sp = r6;
   1952     __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
   1953     __ AddP(new_sp, sp, arg_size);
   1954     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   1955     __ LoadRR(r2, r3);
   1956     __ LoadRR(r5, r3);
   1957     __ StoreP(r3, MemOperand(new_sp, 0));  // receiver (undefined)
   1958     __ CmpP(arg_size, Operand(kPointerSize));
   1959     __ blt(&skip);
   1960     __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize));  // target
   1961     __ LoadRR(r5, r3);  // new.target defaults to target
   1962     __ beq(&skip);
   1963     __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize));  // argumentsList
   1964     __ CmpP(arg_size, Operand(2 * kPointerSize));
   1965     __ beq(&skip);
   1966     __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize));  // new.target
   1967     __ bind(&skip);
   1968     __ LoadRR(sp, new_sp);
   1969   }
   1970 
   1971   // ----------- S t a t e -------------
   1972   //  -- r2    : argumentsList
   1973   //  -- r5    : new.target
   1974   //  -- r3    : target
   1975   //  -- sp[0] : receiver (undefined)
   1976   // -----------------------------------
   1977 
   1978   // 2. Make sure the target is actually a constructor.
   1979   Label target_not_constructor;
   1980   __ JumpIfSmi(r3, &target_not_constructor);
   1981   __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
   1982   __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
   1983   __ TestBit(r6, Map::kIsConstructor);
   1984   __ beq(&target_not_constructor);
   1985 
   1986   // 3. Make sure the target is actually a constructor.
   1987   Label new_target_not_constructor;
   1988   __ JumpIfSmi(r5, &new_target_not_constructor);
   1989   __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
   1990   __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
   1991   __ TestBit(r6, Map::kIsConstructor);
   1992   __ beq(&new_target_not_constructor);
   1993 
   1994   // 4a. Construct the target with the given new.target and argumentsList.
   1995   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1996 
   1997   // 4b. The target is not a constructor, throw an appropriate TypeError.
   1998   __ bind(&target_not_constructor);
   1999   {
   2000     __ StoreP(r3, MemOperand(sp, 0));
   2001     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   2002   }
   2003 
   2004   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   2005   __ bind(&new_target_not_constructor);
   2006   {
   2007     __ StoreP(r5, MemOperand(sp, 0));
   2008     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   2009   }
   2010 }
   2011 
   2012 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
   2013                                       Label* stack_overflow) {
   2014   // ----------- S t a t e -------------
   2015   //  -- r2 : actual number of arguments
   2016   //  -- r3 : function (passed through to callee)
   2017   //  -- r4 : expected number of arguments
   2018   //  -- r5 : new target (passed through to callee)
   2019   // -----------------------------------
   2020   // Check the stack for overflow. We are not trying to catch
   2021   // interruptions (e.g. debug break and preemption) here, so the "real stack
   2022   // limit" is checked.
   2023   __ LoadRoot(r7, Heap::kRealStackLimitRootIndex);
   2024   // Make r7 the space we have left. The stack might already be overflowed
   2025   // here which will cause r7 to become negative.
   2026   __ SubP(r7, sp, r7);
   2027   // Check if the arguments will overflow the stack.
   2028   __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
   2029   __ CmpP(r7, r0);
   2030   __ ble(stack_overflow);  // Signed comparison.
   2031 }
   2032 
   2033 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2034   __ SmiTag(r2);
   2035   __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2036   // Stack updated as such:
   2037   //    old SP --->
   2038   //                 R14 Return Addr
   2039   //                 Old FP                     <--- New FP
   2040   //                 Argument Adapter SMI
   2041   //                 Function
   2042   //                 ArgC as SMI                <--- New SP
   2043   __ lay(sp, MemOperand(sp, -5 * kPointerSize));
   2044 
   2045   // Cleanse the top nibble of 31-bit pointers.
   2046   __ CleanseP(r14);
   2047   __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
   2048   __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
   2049   __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
   2050   __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
   2051   __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
   2052   __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp +
   2053                                kPointerSize));
   2054 }
   2055 
   2056 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2057   // ----------- S t a t e -------------
   2058   //  -- r2 : result being passed through
   2059   // -----------------------------------
   2060   // Get the number of arguments passed (as a smi), tear down the frame and
   2061   // then tear down the parameters.
   2062   __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   2063                                 kPointerSize)));
   2064   int stack_adjustment = kPointerSize;  // adjust for receiver
   2065   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
   2066   __ SmiToPtrArrayOffset(r3, r3);
   2067   __ lay(sp, MemOperand(sp, r3));
   2068 }
   2069 
   2070 // static
   2071 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2072   // ----------- S t a t e -------------
   2073   //  -- r2    : argumentsList
   2074   //  -- r3    : target
   2075   //  -- r5    : new.target (checked to be constructor or undefined)
   2076   //  -- sp[0] : thisArgument
   2077   // -----------------------------------
   2078 
   2079   // Create the list of arguments from the array-like argumentsList.
   2080   {
   2081     Label create_arguments, create_array, create_runtime, done_create;
   2082     __ JumpIfSmi(r2, &create_runtime);
   2083 
   2084     // Load the map of argumentsList into r4.
   2085     __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
   2086 
   2087     // Load native context into r6.
   2088     __ LoadP(r6, NativeContextMemOperand());
   2089 
   2090     // Check if argumentsList is an (unmodified) arguments object.
   2091     __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2092     __ CmpP(ip, r4);
   2093     __ beq(&create_arguments);
   2094     __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX));
   2095     __ CmpP(ip, r4);
   2096     __ beq(&create_arguments);
   2097 
   2098     // Check if argumentsList is a fast JSArray.
   2099     __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE);
   2100     __ beq(&create_array);
   2101 
   2102     // Ask the runtime to create the list (actually a FixedArray).
   2103     __ bind(&create_runtime);
   2104     {
   2105       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2106       __ Push(r3, r5, r2);
   2107       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2108       __ Pop(r3, r5);
   2109       __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
   2110       __ SmiUntag(r4);
   2111     }
   2112     __ b(&done_create);
   2113 
   2114     // Try to create the list from an arguments object.
   2115     __ bind(&create_arguments);
   2116     __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset));
   2117     __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
   2118     __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset));
   2119     __ CmpP(r4, ip);
   2120     __ bne(&create_runtime);
   2121     __ SmiUntag(r4);
   2122     __ LoadRR(r2, r6);
   2123     __ b(&done_create);
   2124 
   2125     // Try to create the list from a JSArray object.
   2126     __ bind(&create_array);
   2127     __ LoadlB(r4, FieldMemOperand(r4, Map::kBitField2Offset));
   2128     __ DecodeField<Map::ElementsKindBits>(r4);
   2129     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2130     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   2131     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2132     __ CmpP(r4, Operand(FAST_ELEMENTS));
   2133     __ bgt(&create_runtime);
   2134     __ CmpP(r4, Operand(FAST_HOLEY_SMI_ELEMENTS));
   2135     __ beq(&create_runtime);
   2136     __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset));
   2137     __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset));
   2138     __ SmiUntag(r4);
   2139 
   2140     __ bind(&done_create);
   2141   }
   2142 
   2143   // Check for stack overflow.
   2144   {
   2145     // Check the stack for overflow. We are not trying to catch interruptions
   2146     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2147     Label done;
   2148     __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
   2149     // Make ip the space we have left. The stack might already be overflowed
   2150     // here which will cause ip to become negative.
   2151     __ SubP(ip, sp, ip);
   2152     // Check if the arguments will overflow the stack.
   2153     __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
   2154     __ CmpP(ip, r0);  // Signed comparison.
   2155     __ bgt(&done);
   2156     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2157     __ bind(&done);
   2158   }
   2159 
   2160   // ----------- S t a t e -------------
   2161   //  -- r3    : target
   2162   //  -- r2    : args (a FixedArray built from argumentsList)
   2163   //  -- r4    : len (number of elements to push from args)
   2164   //  -- r5    : new.target (checked to be constructor or undefined)
   2165   //  -- sp[0] : thisArgument
   2166   // -----------------------------------
   2167 
   2168   // Push arguments onto the stack (thisArgument is already on the stack).
   2169   {
   2170     Label loop, no_args;
   2171     __ CmpP(r4, Operand::Zero());
   2172     __ beq(&no_args);
   2173     __ AddP(r2, r2,
   2174             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
   2175     __ LoadRR(r1, r4);
   2176     __ bind(&loop);
   2177     __ LoadP(r0, MemOperand(r2, kPointerSize));
   2178     __ la(r2, MemOperand(r2, kPointerSize));
   2179     __ push(r0);
   2180     __ BranchOnCount(r1, &loop);
   2181     __ bind(&no_args);
   2182     __ LoadRR(r2, r4);
   2183   }
   2184 
   2185   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2186   {
   2187     __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
   2188     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
   2189     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2190   }
   2191 }
   2192 
   2193 namespace {
   2194 
   2195 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2196 // present) preserving all the arguments prepared for current call.
   2197 // Does nothing if debugger is currently active.
   2198 // ES6 14.6.3. PrepareForTailCall
   2199 //
   2200 // Stack structure for the function g() tail calling f():
   2201 //
   2202 // ------- Caller frame: -------
   2203 // |  ...
   2204 // |  g()'s arg M
   2205 // |  ...
   2206 // |  g()'s arg 1
   2207 // |  g()'s receiver arg
   2208 // |  g()'s caller pc
   2209 // ------- g()'s frame: -------
   2210 // |  g()'s caller fp      <- fp
   2211 // |  g()'s context
   2212 // |  function pointer: g
   2213 // |  -------------------------
   2214 // |  ...
   2215 // |  ...
   2216 // |  f()'s arg N
   2217 // |  ...
   2218 // |  f()'s arg 1
   2219 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
   2220 // ----------------------
   2221 //
   2222 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2223                         Register scratch1, Register scratch2,
   2224                         Register scratch3) {
   2225   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2226   Comment cmnt(masm, "[ PrepareForTailCall");
   2227 
   2228   // Prepare for tail call only if ES2015 tail call elimination is active.
   2229   Label done;
   2230   ExternalReference is_tail_call_elimination_enabled =
   2231       ExternalReference::is_tail_call_elimination_enabled_address(
   2232           masm->isolate());
   2233   __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
   2234   __ LoadlB(scratch1, MemOperand(scratch1));
   2235   __ CmpP(scratch1, Operand::Zero());
   2236   __ beq(&done);
   2237 
   2238   // Drop possible interpreter handler/stub frame.
   2239   {
   2240     Label no_interpreter_frame;
   2241     __ LoadP(scratch3,
   2242              MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
   2243     __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
   2244     __ bne(&no_interpreter_frame);
   2245     __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2246     __ bind(&no_interpreter_frame);
   2247   }
   2248 
   2249   // Check if next frame is an arguments adaptor frame.
   2250   Register caller_args_count_reg = scratch1;
   2251   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2252   __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2253   __ LoadP(
   2254       scratch3,
   2255       MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
   2256   __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
   2257   __ bne(&no_arguments_adaptor);
   2258 
   2259   // Drop current frame and load arguments count from arguments adaptor frame.
   2260   __ LoadRR(fp, scratch2);
   2261   __ LoadP(caller_args_count_reg,
   2262            MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2263   __ SmiUntag(caller_args_count_reg);
   2264   __ b(&formal_parameter_count_loaded);
   2265 
   2266   __ bind(&no_arguments_adaptor);
   2267   // Load caller's formal parameter count
   2268   __ LoadP(scratch1,
   2269            MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
   2270   __ LoadP(scratch1,
   2271            FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2272   __ LoadW(caller_args_count_reg,
   2273            FieldMemOperand(scratch1,
   2274                            SharedFunctionInfo::kFormalParameterCountOffset));
   2275 #if !V8_TARGET_ARCH_S390X
   2276   __ SmiUntag(caller_args_count_reg);
   2277 #endif
   2278 
   2279   __ bind(&formal_parameter_count_loaded);
   2280 
   2281   ParameterCount callee_args_count(args_reg);
   2282   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2283                         scratch3);
   2284   __ bind(&done);
   2285 }
   2286 }  // namespace
   2287 
   2288 // static
   2289 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2290                                      ConvertReceiverMode mode,
   2291                                      TailCallMode tail_call_mode) {
   2292   // ----------- S t a t e -------------
   2293   //  -- r2 : the number of arguments (not including the receiver)
   2294   //  -- r3 : the function to call (checked to be a JSFunction)
   2295   // -----------------------------------
   2296   __ AssertFunction(r3);
   2297 
   2298   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2299   // Check that the function is not a "classConstructor".
   2300   Label class_constructor;
   2301   __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   2302   __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
   2303   __ TestBitMask(r5, SharedFunctionInfo::kClassConstructorBits, r0);
   2304   __ bne(&class_constructor);
   2305 
   2306   // Enter the context of the function; ToObject has to run in the function
   2307   // context, and we also need to take the global proxy from the function
   2308   // context in case of conversion.
   2309   __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
   2310   // We need to convert the receiver for non-native sloppy mode functions.
   2311   Label done_convert;
   2312   __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
   2313                           (1 << SharedFunctionInfo::kNativeBit)));
   2314   __ bne(&done_convert);
   2315   {
   2316     // ----------- S t a t e -------------
   2317     //  -- r2 : the number of arguments (not including the receiver)
   2318     //  -- r3 : the function to call (checked to be a JSFunction)
   2319     //  -- r4 : the shared function info.
   2320     //  -- cp : the function context.
   2321     // -----------------------------------
   2322 
   2323     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2324       // Patch receiver to global proxy.
   2325       __ LoadGlobalProxy(r5);
   2326     } else {
   2327       Label convert_to_object, convert_receiver;
   2328       __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
   2329       __ LoadP(r5, MemOperand(sp, r5));
   2330       __ JumpIfSmi(r5, &convert_to_object);
   2331       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2332       __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
   2333       __ bge(&done_convert);
   2334       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2335         Label convert_global_proxy;
   2336         __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
   2337                       &convert_global_proxy);
   2338         __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
   2339         __ bind(&convert_global_proxy);
   2340         {
   2341           // Patch receiver to global proxy.
   2342           __ LoadGlobalProxy(r5);
   2343         }
   2344         __ b(&convert_receiver);
   2345       }
   2346       __ bind(&convert_to_object);
   2347       {
   2348         // Convert receiver using ToObject.
   2349         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2350         // in the fast case? (fall back to AllocateInNewSpace?)
   2351         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2352         __ SmiTag(r2);
   2353         __ Push(r2, r3);
   2354         __ LoadRR(r2, r5);
   2355         ToObjectStub stub(masm->isolate());
   2356         __ CallStub(&stub);
   2357         __ LoadRR(r5, r2);
   2358         __ Pop(r2, r3);
   2359         __ SmiUntag(r2);
   2360       }
   2361       __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   2362       __ bind(&convert_receiver);
   2363     }
   2364     __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
   2365     __ StoreP(r5, MemOperand(sp, r6));
   2366   }
   2367   __ bind(&done_convert);
   2368 
   2369   // ----------- S t a t e -------------
   2370   //  -- r2 : the number of arguments (not including the receiver)
   2371   //  -- r3 : the function to call (checked to be a JSFunction)
   2372   //  -- r4 : the shared function info.
   2373   //  -- cp : the function context.
   2374   // -----------------------------------
   2375 
   2376   if (tail_call_mode == TailCallMode::kAllow) {
   2377     PrepareForTailCall(masm, r2, r5, r6, r7);
   2378   }
   2379 
   2380   __ LoadW(
   2381       r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
   2382 #if !V8_TARGET_ARCH_S390X
   2383   __ SmiUntag(r4);
   2384 #endif
   2385   ParameterCount actual(r2);
   2386   ParameterCount expected(r4);
   2387   __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION,
   2388                         CheckDebugStepCallWrapper());
   2389 
   2390   // The function is a "classConstructor", need to raise an exception.
   2391   __ bind(&class_constructor);
   2392   {
   2393     FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
   2394     __ push(r3);
   2395     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2396   }
   2397 }
   2398 
   2399 namespace {
   2400 
   2401 void Generate_PushBoundArguments(MacroAssembler* masm) {
   2402   // ----------- S t a t e -------------
   2403   //  -- r2 : the number of arguments (not including the receiver)
   2404   //  -- r3 : target (checked to be a JSBoundFunction)
   2405   //  -- r5 : new.target (only in case of [[Construct]])
   2406   // -----------------------------------
   2407 
   2408   // Load [[BoundArguments]] into r4 and length of that into r6.
   2409   Label no_bound_arguments;
   2410   __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
   2411   __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
   2412   __ SmiUntag(r6);
   2413   __ LoadAndTestP(r6, r6);
   2414   __ beq(&no_bound_arguments);
   2415   {
   2416     // ----------- S t a t e -------------
   2417     //  -- r2 : the number of arguments (not including the receiver)
   2418     //  -- r3 : target (checked to be a JSBoundFunction)
   2419     //  -- r4 : the [[BoundArguments]] (implemented as FixedArray)
   2420     //  -- r5 : new.target (only in case of [[Construct]])
   2421     //  -- r6 : the number of [[BoundArguments]]
   2422     // -----------------------------------
   2423 
   2424     // Reserve stack space for the [[BoundArguments]].
   2425     {
   2426       Label done;
   2427       __ LoadRR(r8, sp);  // preserve previous stack pointer
   2428       __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
   2429       __ SubP(sp, sp, r9);
   2430       // Check the stack for overflow. We are not trying to catch interruptions
   2431       // (i.e. debug break and preemption) here, so check the "real stack
   2432       // limit".
   2433       __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
   2434       __ bgt(&done);  // Signed comparison.
   2435       // Restore the stack pointer.
   2436       __ LoadRR(sp, r8);
   2437       {
   2438         FrameScope scope(masm, StackFrame::MANUAL);
   2439         __ EnterFrame(StackFrame::INTERNAL);
   2440         __ CallRuntime(Runtime::kThrowStackOverflow);
   2441       }
   2442       __ bind(&done);
   2443     }
   2444 
   2445     // Relocate arguments down the stack.
   2446     //  -- r2 : the number of arguments (not including the receiver)
   2447     //  -- r8 : the previous stack pointer
   2448     //  -- r9: the size of the [[BoundArguments]]
   2449     {
   2450       Label skip, loop;
   2451       __ LoadImmP(r7, Operand::Zero());
   2452       __ CmpP(r2, Operand::Zero());
   2453       __ beq(&skip);
   2454       __ LoadRR(r1, r2);
   2455       __ bind(&loop);
   2456       __ LoadP(r0, MemOperand(r8, r7));
   2457       __ StoreP(r0, MemOperand(sp, r7));
   2458       __ AddP(r7, r7, Operand(kPointerSize));
   2459       __ BranchOnCount(r1, &loop);
   2460       __ bind(&skip);
   2461     }
   2462 
   2463     // Copy [[BoundArguments]] to the stack (below the arguments).
   2464     {
   2465       Label loop;
   2466       __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2467       __ AddP(r4, r4, r9);
   2468       __ LoadRR(r1, r6);
   2469       __ bind(&loop);
   2470       __ LoadP(r0, MemOperand(r4, -kPointerSize));
   2471       __ lay(r4, MemOperand(r4, -kPointerSize));
   2472       __ StoreP(r0, MemOperand(sp, r7));
   2473       __ AddP(r7, r7, Operand(kPointerSize));
   2474       __ BranchOnCount(r1, &loop);
   2475       __ AddP(r2, r2, r6);
   2476     }
   2477   }
   2478   __ bind(&no_bound_arguments);
   2479 }
   2480 
   2481 }  // namespace
   2482 
   2483 // static
   2484 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2485                                               TailCallMode tail_call_mode) {
   2486   // ----------- S t a t e -------------
   2487   //  -- r2 : the number of arguments (not including the receiver)
   2488   //  -- r3 : the function to call (checked to be a JSBoundFunction)
   2489   // -----------------------------------
   2490   __ AssertBoundFunction(r3);
   2491 
   2492   if (tail_call_mode == TailCallMode::kAllow) {
   2493     PrepareForTailCall(masm, r2, r5, r6, r7);
   2494   }
   2495 
   2496   // Patch the receiver to [[BoundThis]].
   2497   __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
   2498   __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
   2499   __ StoreP(ip, MemOperand(sp, r1));
   2500 
   2501   // Push the [[BoundArguments]] onto the stack.
   2502   Generate_PushBoundArguments(masm);
   2503 
   2504   // Call the [[BoundTargetFunction]] via the Call builtin.
   2505   __ LoadP(r3,
   2506            FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
   2507   __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
   2508                                        masm->isolate())));
   2509   __ LoadP(ip, MemOperand(ip));
   2510   __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
   2511   __ JumpToJSEntry(ip);
   2512 }
   2513 
   2514 // static
   2515 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2516                              TailCallMode tail_call_mode) {
   2517   // ----------- S t a t e -------------
   2518   //  -- r2 : the number of arguments (not including the receiver)
   2519   //  -- r3 : the target to call (can be any Object).
   2520   // -----------------------------------
   2521 
   2522   Label non_callable, non_function, non_smi;
   2523   __ JumpIfSmi(r3, &non_callable);
   2524   __ bind(&non_smi);
   2525   __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
   2526   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2527           RelocInfo::CODE_TARGET, eq);
   2528   __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
   2529   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2530           RelocInfo::CODE_TARGET, eq);
   2531 
   2532   // Check if target has a [[Call]] internal method.
   2533   __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
   2534   __ TestBit(r6, Map::kIsCallable);
   2535   __ beq(&non_callable);
   2536 
   2537   __ CmpP(r7, Operand(JS_PROXY_TYPE));
   2538   __ bne(&non_function);
   2539 
   2540   // 0. Prepare for tail call if necessary.
   2541   if (tail_call_mode == TailCallMode::kAllow) {
   2542     PrepareForTailCall(masm, r2, r5, r6, r7);
   2543   }
   2544 
   2545   // 1. Runtime fallback for Proxy [[Call]].
   2546   __ Push(r3);
   2547   // Increase the arguments size to include the pushed function and the
   2548   // existing receiver on the stack.
   2549   __ AddP(r2, r2, Operand(2));
   2550   // Tail-call to the runtime.
   2551   __ JumpToExternalReference(
   2552       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2553 
   2554   // 2. Call to something else, which might have a [[Call]] internal method (if
   2555   // not we raise an exception).
   2556   __ bind(&non_function);
   2557   // Overwrite the original receiver the (original) target.
   2558   __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
   2559   __ StoreP(r3, MemOperand(sp, r7));
   2560   // Let the "call_as_function_delegate" take care of the rest.
   2561   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
   2562   __ Jump(masm->isolate()->builtins()->CallFunction(
   2563               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2564           RelocInfo::CODE_TARGET);
   2565 
   2566   // 3. Call to something that is not callable.
   2567   __ bind(&non_callable);
   2568   {
   2569     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
   2570     __ Push(r3);
   2571     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2572   }
   2573 }
   2574 
   2575 // static
   2576 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2577   // ----------- S t a t e -------------
   2578   //  -- r2 : the number of arguments (not including the receiver)
   2579   //  -- r3 : the constructor to call (checked to be a JSFunction)
   2580   //  -- r5 : the new target (checked to be a constructor)
   2581   // -----------------------------------
   2582   __ AssertFunction(r3);
   2583 
   2584   // Calling convention for function specific ConstructStubs require
   2585   // r4 to contain either an AllocationSite or undefined.
   2586   __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
   2587 
   2588   // Tail call to the function-specific construct stub (still in the caller
   2589   // context at this point).
   2590   __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   2591   __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
   2592   __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
   2593   __ JumpToJSEntry(ip);
   2594 }
   2595 
   2596 // static
   2597 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2598   // ----------- S t a t e -------------
   2599   //  -- r2 : the number of arguments (not including the receiver)
   2600   //  -- r3 : the function to call (checked to be a JSBoundFunction)
   2601   //  -- r5 : the new target (checked to be a constructor)
   2602   // -----------------------------------
   2603   __ AssertBoundFunction(r3);
   2604 
   2605   // Push the [[BoundArguments]] onto the stack.
   2606   Generate_PushBoundArguments(masm);
   2607 
   2608   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2609   Label skip;
   2610   __ CmpP(r3, r5);
   2611   __ bne(&skip);
   2612   __ LoadP(r5,
   2613            FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
   2614   __ bind(&skip);
   2615 
   2616   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2617   __ LoadP(r3,
   2618            FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
   2619   __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
   2620   __ LoadP(ip, MemOperand(ip));
   2621   __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
   2622   __ JumpToJSEntry(ip);
   2623 }
   2624 
   2625 // static
   2626 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2627   // ----------- S t a t e -------------
   2628   //  -- r2 : the number of arguments (not including the receiver)
   2629   //  -- r3 : the constructor to call (checked to be a JSProxy)
   2630   //  -- r5 : the new target (either the same as the constructor or
   2631   //          the JSFunction on which new was invoked initially)
   2632   // -----------------------------------
   2633 
   2634   // Call into the Runtime for Proxy [[Construct]].
   2635   __ Push(r3, r5);
   2636   // Include the pushed new_target, constructor and the receiver.
   2637   __ AddP(r2, r2, Operand(3));
   2638   // Tail-call to the runtime.
   2639   __ JumpToExternalReference(
   2640       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2641 }
   2642 
   2643 // static
   2644 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2645   // ----------- S t a t e -------------
   2646   //  -- r2 : the number of arguments (not including the receiver)
   2647   //  -- r3 : the constructor to call (can be any Object)
   2648   //  -- r5 : the new target (either the same as the constructor or
   2649   //          the JSFunction on which new was invoked initially)
   2650   // -----------------------------------
   2651 
   2652   // Check if target is a Smi.
   2653   Label non_constructor;
   2654   __ JumpIfSmi(r3, &non_constructor);
   2655 
   2656   // Dispatch based on instance type.
   2657   __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
   2658   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
   2659           RelocInfo::CODE_TARGET, eq);
   2660 
   2661   // Check if target has a [[Construct]] internal method.
   2662   __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
   2663   __ TestBit(r4, Map::kIsConstructor);
   2664   __ beq(&non_constructor);
   2665 
   2666   // Only dispatch to bound functions after checking whether they are
   2667   // constructors.
   2668   __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
   2669   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
   2670           RelocInfo::CODE_TARGET, eq);
   2671 
   2672   // Only dispatch to proxies after checking whether they are constructors.
   2673   __ CmpP(r7, Operand(JS_PROXY_TYPE));
   2674   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
   2675           eq);
   2676 
   2677   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2678   {
   2679     // Overwrite the original receiver with the (original) target.
   2680     __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
   2681     __ StoreP(r3, MemOperand(sp, r7));
   2682     // Let the "call_as_constructor_delegate" take care of the rest.
   2683     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
   2684     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2685             RelocInfo::CODE_TARGET);
   2686   }
   2687 
   2688   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2689   // method.
   2690   __ bind(&non_constructor);
   2691   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   2692           RelocInfo::CODE_TARGET);
   2693 }
   2694 
   2695 // static
   2696 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   2697   // ----------- S t a t e -------------
   2698   //  -- r3 : requested object size (untagged)
   2699   //  -- lr : return address
   2700   // -----------------------------------
   2701   __ SmiTag(r3);
   2702   __ Push(r3);
   2703   __ LoadSmiLiteral(cp, Smi::FromInt(0));
   2704   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   2705 }
   2706 
   2707 // static
   2708 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   2709   // ----------- S t a t e -------------
   2710   //  -- r3 : requested object size (untagged)
   2711   //  -- lr : return address
   2712   // -----------------------------------
   2713   __ SmiTag(r3);
   2714   __ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   2715   __ Push(r3, r4);
   2716   __ LoadSmiLiteral(cp, Smi::FromInt(0));
   2717   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   2718 }
   2719 
   2720 // static
   2721 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
   2722   // The StringToNumber stub takes one argument in r2.
   2723   __ AssertString(r2);
   2724 
   2725   // Check if string has a cached array index.
   2726   Label runtime;
   2727   __ LoadlW(r4, FieldMemOperand(r2, String::kHashFieldOffset));
   2728   __ And(r0, r4, Operand(String::kContainsCachedArrayIndexMask));
   2729   __ bne(&runtime);
   2730   __ IndexFromHash(r4, r2);
   2731   __ Ret();
   2732 
   2733   __ bind(&runtime);
   2734   {
   2735     FrameScope frame(masm, StackFrame::INTERNAL);
   2736     // Push argument.
   2737     __ push(r2);
   2738     // We cannot use a tail call here because this builtin can also be called
   2739     // from wasm.
   2740     __ CallRuntime(Runtime::kStringToNumber);
   2741   }
   2742   __ Ret();
   2743 }
   2744 
   2745 // static
   2746 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
   2747   // The ToNumber stub takes one argument in r2.
   2748   STATIC_ASSERT(kSmiTag == 0);
   2749   __ TestIfSmi(r2);
   2750   __ Ret(eq);
   2751 
   2752   __ CompareObjectType(r2, r3, r3, HEAP_NUMBER_TYPE);
   2753   // r2: receiver
   2754   // r3: receiver instance type
   2755   __ Ret(eq);
   2756 
   2757   __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
   2758           RelocInfo::CODE_TARGET);
   2759 }
   2760 
   2761 // static
   2762 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
   2763   // The NonNumberToNumber stub takes one argument in r2.
   2764   __ AssertNotNumber(r2);
   2765 
   2766   __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE);
   2767   // r2: receiver
   2768   // r3: receiver instance type
   2769   __ Jump(masm->isolate()->builtins()->StringToNumber(), RelocInfo::CODE_TARGET,
   2770           lt);
   2771 
   2772   Label not_oddball;
   2773   __ CmpP(r3, Operand(ODDBALL_TYPE));
   2774   __ bne(&not_oddball);
   2775   __ LoadP(r2, FieldMemOperand(r2, Oddball::kToNumberOffset));
   2776   __ Ret();
   2777   __ bind(&not_oddball);
   2778 
   2779   {
   2780     FrameScope frame(masm, StackFrame::INTERNAL);
   2781     // Push argument.
   2782     __ push(r2);
   2783     // We cannot use a tail call here because this builtin can also be called
   2784     // from wasm.
   2785     __ CallRuntime(Runtime::kToNumber);
   2786   }
   2787   __ Ret();
   2788 }
   2789 
   2790 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   2791   // ----------- S t a t e -------------
   2792   //  -- r2 : actual number of arguments
   2793   //  -- r3 : function (passed through to callee)
   2794   //  -- r4 : expected number of arguments
   2795   //  -- r5 : new target (passed through to callee)
   2796   // -----------------------------------
   2797 
   2798   Label invoke, dont_adapt_arguments, stack_overflow;
   2799 
   2800   Label enough, too_few;
   2801   __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
   2802   __ CmpP(r2, r4);
   2803   __ blt(&too_few);
   2804   __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   2805   __ beq(&dont_adapt_arguments);
   2806 
   2807   {  // Enough parameters: actual >= expected
   2808     __ bind(&enough);
   2809     EnterArgumentsAdaptorFrame(masm);
   2810     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2811 
   2812     // Calculate copy start address into r2 and copy end address into r6.
   2813     // r2: actual number of arguments as a smi
   2814     // r3: function
   2815     // r4: expected number of arguments
   2816     // r5: new target (passed through to callee)
   2817     // ip: code entry to call
   2818     __ SmiToPtrArrayOffset(r2, r2);
   2819     __ AddP(r2, fp);
   2820     // adjust for return address and receiver
   2821     __ AddP(r2, r2, Operand(2 * kPointerSize));
   2822     __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
   2823     __ SubP(r6, r2, r6);
   2824 
   2825     // Copy the arguments (including the receiver) to the new stack frame.
   2826     // r2: copy start address
   2827     // r3: function
   2828     // r4: expected number of arguments
   2829     // r5: new target (passed through to callee)
   2830     // r6: copy end address
   2831     // ip: code entry to call
   2832 
   2833     Label copy;
   2834     __ bind(&copy);
   2835     __ LoadP(r0, MemOperand(r2, 0));
   2836     __ push(r0);
   2837     __ CmpP(r2, r6);  // Compare before moving to next argument.
   2838     __ lay(r2, MemOperand(r2, -kPointerSize));
   2839     __ bne(&copy);
   2840 
   2841     __ b(&invoke);
   2842   }
   2843 
   2844   {  // Too few parameters: Actual < expected
   2845     __ bind(&too_few);
   2846 
   2847     EnterArgumentsAdaptorFrame(masm);
   2848     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2849 
   2850     // Calculate copy start address into r0 and copy end address is fp.
   2851     // r2: actual number of arguments as a smi
   2852     // r3: function
   2853     // r4: expected number of arguments
   2854     // r5: new target (passed through to callee)
   2855     // ip: code entry to call
   2856     __ SmiToPtrArrayOffset(r2, r2);
   2857     __ lay(r2, MemOperand(r2, fp));
   2858 
   2859     // Copy the arguments (including the receiver) to the new stack frame.
   2860     // r2: copy start address
   2861     // r3: function
   2862     // r4: expected number of arguments
   2863     // r5: new target (passed through to callee)
   2864     // ip: code entry to call
   2865     Label copy;
   2866     __ bind(&copy);
   2867     // Adjust load for return address and receiver.
   2868     __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
   2869     __ push(r0);
   2870     __ CmpP(r2, fp);  // Compare before moving to next argument.
   2871     __ lay(r2, MemOperand(r2, -kPointerSize));
   2872     __ bne(&copy);
   2873 
   2874     // Fill the remaining expected arguments with undefined.
   2875     // r3: function
   2876     // r4: expected number of argumentus
   2877     // ip: code entry to call
   2878     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2879     __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
   2880     __ SubP(r6, fp, r6);
   2881     // Adjust for frame.
   2882     __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   2883                             2 * kPointerSize));
   2884 
   2885     Label fill;
   2886     __ bind(&fill);
   2887     __ push(r0);
   2888     __ CmpP(sp, r6);
   2889     __ bne(&fill);
   2890   }
   2891 
   2892   // Call the entry point.
   2893   __ bind(&invoke);
   2894   __ LoadRR(r2, r4);
   2895   // r2 : expected number of arguments
   2896   // r3 : function (passed through to callee)
   2897   // r5 : new target (passed through to callee)
   2898   __ CallJSEntry(ip);
   2899 
   2900   // Store offset of return address for deoptimizer.
   2901   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   2902 
   2903   // Exit frame and return.
   2904   LeaveArgumentsAdaptorFrame(masm);
   2905   __ Ret();
   2906 
   2907   // -------------------------------------------
   2908   // Dont adapt arguments.
   2909   // -------------------------------------------
   2910   __ bind(&dont_adapt_arguments);
   2911   __ JumpToJSEntry(ip);
   2912 
   2913   __ bind(&stack_overflow);
   2914   {
   2915     FrameScope frame(masm, StackFrame::MANUAL);
   2916     __ CallRuntime(Runtime::kThrowStackOverflow);
   2917     __ bkpt(0);
   2918   }
   2919 }
   2920 
   2921 #undef __
   2922 
   2923 }  // namespace internal
   2924 }  // namespace v8
   2925 
   2926 #endif  // V8_TARGET_ARCH_S390
   2927