Home | History | Annotate | Download | only in mips64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_MIPS64
      6 
      7 #include "src/codegen.h"
      8 #include "src/debug/debug.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 #include "src/runtime/runtime.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 #define __ ACCESS_MASM(masm)
     17 
     18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
     19                                 ExitFrameType exit_frame_type) {
     20   // ----------- S t a t e -------------
     21   //  -- a0                 : number of arguments excluding receiver
     22   //  -- a1                 : target
     23   //  -- a3                 : new.target
     24   //  -- sp[0]              : last argument
     25   //  -- ...
     26   //  -- sp[8 * (argc - 1)] : first argument
     27   //  -- sp[8 * agrc]       : receiver
     28   // -----------------------------------
     29   __ AssertFunction(a1);
     30 
     31   // Make sure we operate in the context of the called function (for example
     32   // ConstructStubs implemented in C++ will be run in the context of the caller
     33   // instead of the callee, due to the way that [[Construct]] is defined for
     34   // ordinary functions).
     35   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
     36 
     37   // JumpToExternalReference expects a0 to contain the number of arguments
     38   // including the receiver and the extra arguments.
     39   const int num_extra_args = 3;
     40   __ Daddu(a0, a0, num_extra_args + 1);
     41 
     42   // Insert extra arguments.
     43   __ SmiTag(a0);
     44   __ Push(a0, a1, a3);
     45   __ SmiUntag(a0);
     46 
     47   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
     48                              PROTECT, exit_frame_type == BUILTIN_EXIT);
     49 }
     50 
     51 // Load the built-in InternalArray function from the current context.
     52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     53                                               Register result) {
     54   // Load the InternalArray function from the native context.
     55   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
     56 }
     57 
     58 // Load the built-in Array function from the current context.
     59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     60   // Load the Array function from the native context.
     61   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
     62 }
     63 
     64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     65   // ----------- S t a t e -------------
     66   //  -- a0     : number of arguments
     67   //  -- ra     : return address
     68   //  -- sp[...]: constructor arguments
     69   // -----------------------------------
     70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     71 
     72   // Get the InternalArray function.
     73   GenerateLoadInternalArrayFunction(masm, a1);
     74 
     75   if (FLAG_debug_code) {
     76     // Initial map for the builtin InternalArray functions should be maps.
     77     __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
     78     __ SmiTst(a2, a4);
     79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, a4,
     80               Operand(zero_reg));
     81     __ GetObjectType(a2, a3, a4);
     82     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, a4,
     83               Operand(MAP_TYPE));
     84   }
     85 
     86   // Run the native code for the InternalArray function called as a normal
     87   // function.
     88   // Tail call a stub.
     89   InternalArrayConstructorStub stub(masm->isolate());
     90   __ TailCallStub(&stub);
     91 }
     92 
     93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
     94   // ----------- S t a t e -------------
     95   //  -- a0     : number of arguments
     96   //  -- ra     : return address
     97   //  -- sp[...]: constructor arguments
     98   // -----------------------------------
     99   Label generic_array_code;
    100 
    101   // Get the Array function.
    102   GenerateLoadArrayFunction(masm, a1);
    103 
    104   if (FLAG_debug_code) {
    105     // Initial map for the builtin Array functions should be maps.
    106     __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
    107     __ SmiTst(a2, a4);
    108     __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, a4,
    109               Operand(zero_reg));
    110     __ GetObjectType(a2, a3, a4);
    111     __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, a4,
    112               Operand(MAP_TYPE));
    113   }
    114 
    115   // Run the native code for the Array function called as a normal function.
    116   // Tail call a stub.
    117   __ mov(a3, a1);
    118   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
    119   ArrayConstructorStub stub(masm->isolate());
    120   __ TailCallStub(&stub);
    121 }
    122 
    123 // static
    124 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
    125   // ----------- S t a t e -------------
    126   //  -- a0                     : number of arguments
    127   //  -- a1                     : function
    128   //  -- cp                     : context
    129   //  -- ra                     : return address
    130   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero-based)
    131   //  -- sp[argc * 8]           : receiver
    132   // -----------------------------------
    133   Heap::RootListIndex const root_index =
    134       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
    135                                      : Heap::kMinusInfinityValueRootIndex;
    136 
    137   // Load the accumulator with the default return value (either -Infinity or
    138   // +Infinity), with the tagged value in t1 and the double value in f0.
    139   __ LoadRoot(t1, root_index);
    140   __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
    141 
    142   Label done_loop, loop, done;
    143   __ mov(a3, a0);
    144   __ bind(&loop);
    145   {
    146     // Check if all parameters done.
    147     __ Dsubu(a3, a3, Operand(1));
    148     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
    149 
    150     // Load the next parameter tagged value into a2.
    151     __ Dlsa(at, sp, a3, kPointerSizeLog2);
    152     __ ld(a2, MemOperand(at));
    153 
    154     // Load the double value of the parameter into f2, maybe converting the
    155     // parameter to a number first using the ToNumber builtin if necessary.
    156     Label convert, convert_smi, convert_number, done_convert;
    157     __ bind(&convert);
    158     __ JumpIfSmi(a2, &convert_smi);
    159     __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset));
    160     __ JumpIfRoot(a4, Heap::kHeapNumberMapRootIndex, &convert_number);
    161     {
    162       // Parameter is not a Number, use the ToNumber builtin to convert it.
    163       FrameScope scope(masm, StackFrame::MANUAL);
    164       __ SmiTag(a0);
    165       __ SmiTag(a3);
    166       __ EnterBuiltinFrame(cp, a1, a0);
    167       __ Push(t1, a3);
    168       __ mov(a0, a2);
    169       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    170       __ mov(a2, v0);
    171       __ Pop(t1, a3);
    172       __ LeaveBuiltinFrame(cp, a1, a0);
    173       __ SmiUntag(a3);
    174       __ SmiUntag(a0);
    175       {
    176         // Restore the double accumulator value (f0).
    177         Label restore_smi, done_restore;
    178         __ JumpIfSmi(t1, &restore_smi);
    179         __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
    180         __ jmp(&done_restore);
    181         __ bind(&restore_smi);
    182         __ SmiToDoubleFPURegister(t1, f0, a4);
    183         __ bind(&done_restore);
    184       }
    185     }
    186     __ jmp(&convert);
    187     __ bind(&convert_number);
    188     __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
    189     __ jmp(&done_convert);
    190     __ bind(&convert_smi);
    191     __ SmiToDoubleFPURegister(a2, f2, a4);
    192     __ bind(&done_convert);
    193 
    194     // Perform the actual comparison with using Min/Max macro instructions the
    195     // accumulator value on the left hand side (f0) and the next parameter value
    196     // on the right hand side (f2).
    197     // We need to work out which HeapNumber (or smi) the result came from.
    198     Label compare_nan, ool_min, ool_max;
    199     __ BranchF(nullptr, &compare_nan, eq, f0, f2);
    200     __ Move(a4, f0);
    201     if (kind == MathMaxMinKind::kMin) {
    202       __ Float64Min(f0, f0, f2, &ool_min);
    203     } else {
    204       DCHECK(kind == MathMaxMinKind::kMax);
    205       __ Float64Max(f0, f0, f2, &ool_max);
    206     }
    207     __ jmp(&done);
    208 
    209     __ bind(&ool_min);
    210     __ Float64MinOutOfLine(f0, f0, f2);
    211     __ jmp(&done);
    212 
    213     __ bind(&ool_max);
    214     __ Float64MaxOutOfLine(f0, f0, f2);
    215 
    216     __ bind(&done);
    217     __ Move(at, f0);
    218     __ Branch(&loop, eq, a4, Operand(at));
    219     __ mov(t1, a2);
    220     __ jmp(&loop);
    221 
    222     // At least one side is NaN, which means that the result will be NaN too.
    223     __ bind(&compare_nan);
    224     __ LoadRoot(t1, Heap::kNanValueRootIndex);
    225     __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
    226     __ jmp(&loop);
    227   }
    228 
    229   __ bind(&done_loop);
    230   // Drop all slots, including the receiver.
    231   __ Daddu(a0, a0, Operand(1));
    232   __ Dlsa(sp, sp, a0, kPointerSizeLog2);
    233   __ Ret(USE_DELAY_SLOT);
    234   __ mov(v0, t1);  // In delay slot.
    235 }
    236 
    237 // static
    238 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
    239   // ----------- S t a t e -------------
    240   //  -- a0                     : number of arguments
    241   //  -- a1                     : constructor function
    242   //  -- cp                     : context
    243   //  -- ra                     : return address
    244   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    245   //  -- sp[argc * 8]           : receiver
    246   // -----------------------------------
    247 
    248   // 1. Load the first argument into a0 and get rid of the rest (including the
    249   // receiver).
    250   Label no_arguments;
    251   {
    252     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    253     __ Dsubu(t1, a0, Operand(1));  // In delay slot.
    254     __ mov(t0, a0);                // Store argc in t0.
    255     __ Dlsa(at, sp, t1, kPointerSizeLog2);
    256     __ ld(a0, MemOperand(at));
    257   }
    258 
    259   // 2a. Convert first argument to number.
    260   {
    261     FrameScope scope(masm, StackFrame::MANUAL);
    262     __ SmiTag(t0);
    263     __ EnterBuiltinFrame(cp, a1, t0);
    264     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    265     __ LeaveBuiltinFrame(cp, a1, t0);
    266     __ SmiUntag(t0);
    267   }
    268 
    269   {
    270     // Drop all arguments including the receiver.
    271     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
    272     __ DropAndRet(1);
    273   }
    274 
    275   // 2b. No arguments, return +0.
    276   __ bind(&no_arguments);
    277   __ Move(v0, Smi::kZero);
    278   __ DropAndRet(1);
    279 }
    280 
    281 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
    282   // ----------- S t a t e -------------
    283   //  -- a0                     : number of arguments
    284   //  -- a1                     : constructor function
    285   //  -- a3                     : new target
    286   //  -- cp                     : context
    287   //  -- ra                     : return address
    288   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    289   //  -- sp[argc * 8]           : receiver
    290   // -----------------------------------
    291 
    292   // 1. Make sure we operate in the context of the called function.
    293   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    294 
    295   // 2. Load the first argument into a0 and get rid of the rest (including the
    296   // receiver).
    297   {
    298     Label no_arguments, done;
    299     __ mov(t0, a0);  // Store argc in t0.
    300     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    301     __ Dsubu(a0, a0, Operand(1));  // In delay slot.
    302     __ Dlsa(at, sp, a0, kPointerSizeLog2);
    303     __ ld(a0, MemOperand(at));
    304     __ jmp(&done);
    305     __ bind(&no_arguments);
    306     __ Move(a0, Smi::kZero);
    307     __ bind(&done);
    308   }
    309 
    310   // 3. Make sure a0 is a number.
    311   {
    312     Label done_convert;
    313     __ JumpIfSmi(a0, &done_convert);
    314     __ GetObjectType(a0, a2, a2);
    315     __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
    316     {
    317       FrameScope scope(masm, StackFrame::MANUAL);
    318       __ SmiTag(t0);
    319       __ EnterBuiltinFrame(cp, a1, t0);
    320       __ Push(a3);
    321       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    322       __ Move(a0, v0);
    323       __ Pop(a3);
    324       __ LeaveBuiltinFrame(cp, a1, t0);
    325       __ SmiUntag(t0);
    326     }
    327     __ bind(&done_convert);
    328   }
    329 
    330   // 4. Check if new target and constructor differ.
    331   Label drop_frame_and_ret, new_object;
    332   __ Branch(&new_object, ne, a1, Operand(a3));
    333 
    334   // 5. Allocate a JSValue wrapper for the number.
    335   __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
    336   __ jmp(&drop_frame_and_ret);
    337 
    338   // 6. Fallback to the runtime to create new object.
    339   __ bind(&new_object);
    340   {
    341     FrameScope scope(masm, StackFrame::MANUAL);
    342     __ SmiTag(t0);
    343     __ EnterBuiltinFrame(cp, a1, t0);
    344     __ Push(a0);
    345     __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    346             RelocInfo::CODE_TARGET);
    347     __ Pop(a0);
    348     __ LeaveBuiltinFrame(cp, a1, t0);
    349     __ SmiUntag(t0);
    350   }
    351   __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));
    352 
    353   __ bind(&drop_frame_and_ret);
    354   {
    355     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
    356     __ DropAndRet(1);
    357   }
    358 }
    359 
    360 // static
    361 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
    362   // ----------- S t a t e -------------
    363   //  -- a0                     : number of arguments
    364   //  -- a1                     : constructor function
    365   //  -- cp                     : context
    366   //  -- ra                     : return address
    367   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    368   //  -- sp[argc * 8]           : receiver
    369   // -----------------------------------
    370 
    371   // 1. Load the first argument into a0 and get rid of the rest (including the
    372   // receiver).
    373   Label no_arguments;
    374   {
    375     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    376     __ Dsubu(t1, a0, Operand(1));  // In delay slot.
    377     __ mov(t0, a0);                // Store argc in t0.
    378     __ Dlsa(at, sp, t1, kPointerSizeLog2);
    379     __ ld(a0, MemOperand(at));
    380   }
    381 
    382   // 2a. At least one argument, return a0 if it's a string, otherwise
    383   // dispatch to appropriate conversion.
    384   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
    385   {
    386     __ JumpIfSmi(a0, &to_string);
    387     __ GetObjectType(a0, t1, t1);
    388     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
    389     __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE));
    390     __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg));
    391     __ Branch(&to_string, gt, t1, Operand(zero_reg));
    392     __ mov(v0, a0);
    393     __ jmp(&drop_frame_and_ret);
    394   }
    395 
    396   // 2b. No arguments, return the empty string (and pop the receiver).
    397   __ bind(&no_arguments);
    398   {
    399     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
    400     __ DropAndRet(1);
    401   }
    402 
    403   // 3a. Convert a0 to a string.
    404   __ bind(&to_string);
    405   {
    406     FrameScope scope(masm, StackFrame::MANUAL);
    407     __ SmiTag(t0);
    408     __ EnterBuiltinFrame(cp, a1, t0);
    409     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
    410     __ LeaveBuiltinFrame(cp, a1, t0);
    411     __ SmiUntag(t0);
    412   }
    413   __ jmp(&drop_frame_and_ret);
    414 
    415   // 3b. Convert symbol in a0 to a string.
    416   __ bind(&symbol_descriptive_string);
    417   {
    418     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
    419     __ Drop(1);
    420     __ Push(a0);
    421     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
    422   }
    423 
    424   __ bind(&drop_frame_and_ret);
    425   {
    426     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
    427     __ DropAndRet(1);
    428   }
    429 }
    430 
    431 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
    432   // ----------- S t a t e -------------
    433   //  -- a0                     : number of arguments
    434   //  -- a1                     : constructor function
    435   //  -- a3                     : new target
    436   //  -- cp                     : context
    437   //  -- ra                     : return address
    438   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    439   //  -- sp[argc * 8]           : receiver
    440   // -----------------------------------
    441 
    442   // 1. Make sure we operate in the context of the called function.
    443   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    444 
    445   // 2. Load the first argument into a0 and get rid of the rest (including the
    446   // receiver).
    447   {
    448     Label no_arguments, done;
    449     __ mov(t0, a0);  // Store argc in t0.
    450     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    451     __ Dsubu(a0, a0, Operand(1));
    452     __ Dlsa(at, sp, a0, kPointerSizeLog2);
    453     __ ld(a0, MemOperand(at));
    454     __ jmp(&done);
    455     __ bind(&no_arguments);
    456     __ LoadRoot(a0, Heap::kempty_stringRootIndex);
    457     __ bind(&done);
    458   }
    459 
    460   // 3. Make sure a0 is a string.
    461   {
    462     Label convert, done_convert;
    463     __ JumpIfSmi(a0, &convert);
    464     __ GetObjectType(a0, a2, a2);
    465     __ And(t1, a2, Operand(kIsNotStringMask));
    466     __ Branch(&done_convert, eq, t1, Operand(zero_reg));
    467     __ bind(&convert);
    468     {
    469       FrameScope scope(masm, StackFrame::MANUAL);
    470       __ SmiTag(t0);
    471       __ EnterBuiltinFrame(cp, a1, t0);
    472       __ Push(a3);
    473       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
    474       __ Move(a0, v0);
    475       __ Pop(a3);
    476       __ LeaveBuiltinFrame(cp, a1, t0);
    477       __ SmiUntag(t0);
    478     }
    479     __ bind(&done_convert);
    480   }
    481 
    482   // 4. Check if new target and constructor differ.
    483   Label drop_frame_and_ret, new_object;
    484   __ Branch(&new_object, ne, a1, Operand(a3));
    485 
    486   // 5. Allocate a JSValue wrapper for the string.
    487   __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
    488   __ jmp(&drop_frame_and_ret);
    489 
    490   // 6. Fallback to the runtime to create new object.
    491   __ bind(&new_object);
    492   {
    493     FrameScope scope(masm, StackFrame::MANUAL);
    494     __ SmiTag(t0);
    495     __ EnterBuiltinFrame(cp, a1, t0);
    496     __ Push(a0);
    497     __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    498             RelocInfo::CODE_TARGET);
    499     __ Pop(a0);
    500     __ LeaveBuiltinFrame(cp, a1, t0);
    501     __ SmiUntag(t0);
    502   }
    503   __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));
    504 
    505   __ bind(&drop_frame_and_ret);
    506   {
    507     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
    508     __ DropAndRet(1);
    509   }
    510 }
    511 
    512 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    513   __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
    514   __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
    515   __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
    516   __ Jump(at);
    517 }
    518 
    519 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
    520                                            Runtime::FunctionId function_id) {
    521   // ----------- S t a t e -------------
    522   //  -- a0 : argument count (preserved for callee)
    523   //  -- a1 : target function (preserved for callee)
    524   //  -- a3 : new target (preserved for callee)
    525   // -----------------------------------
    526   {
    527     FrameScope scope(masm, StackFrame::INTERNAL);
    528     // Push a copy of the function onto the stack.
    529     // Push a copy of the target function and the new target.
    530     __ SmiTag(a0);
    531     __ Push(a0, a1, a3, a1);
    532 
    533     __ CallRuntime(function_id, 1);
    534     // Restore target function and new target.
    535     __ Pop(a0, a1, a3);
    536     __ SmiUntag(a0);
    537   }
    538 
    539   __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
    540   __ Jump(at);
    541 }
    542 
    543 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    544   // Checking whether the queued function is ready for install is optional,
    545   // since we come across interrupts and stack checks elsewhere.  However,
    546   // not checking may delay installing ready functions, and always checking
    547   // would be quite expensive.  A good compromise is to first check against
    548   // stack limit as a cue for an interrupt signal.
    549   Label ok;
    550   __ LoadRoot(a4, Heap::kStackLimitRootIndex);
    551   __ Branch(&ok, hs, sp, Operand(a4));
    552 
    553   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    554 
    555   __ bind(&ok);
    556   GenerateTailCallToSharedCode(masm);
    557 }
    558 
    559 namespace {
    560 
    561 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
    562                                     bool create_implicit_receiver,
    563                                     bool check_derived_construct) {
    564   Label post_instantiation_deopt_entry;
    565 
    566   // ----------- S t a t e -------------
    567   //  -- a0     : number of arguments
    568   //  -- a1     : constructor function
    569   //  -- a3     : new target
    570   //  -- cp     : context
    571   //  -- ra     : return address
    572   //  -- sp[...]: constructor arguments
    573   // -----------------------------------
    574 
    575   Isolate* isolate = masm->isolate();
    576 
    577   // Enter a construct frame.
    578   {
    579     FrameScope scope(masm, StackFrame::CONSTRUCT);
    580 
    581     // Preserve the incoming parameters on the stack.
    582     __ SmiTag(a0);
    583     __ Push(cp, a0);
    584 
    585     if (create_implicit_receiver) {
    586       __ Push(a1, a3);
    587       __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    588               RelocInfo::CODE_TARGET);
    589       __ mov(t0, v0);
    590       __ Pop(a1, a3);
    591 
    592       // ----------- S t a t e -------------
    593       // -- a1: constructor function
    594       // -- a3: new target
    595       // -- t0: newly allocated object
    596       // -----------------------------------
    597       __ ld(a0, MemOperand(sp));
    598     }
    599     __ SmiUntag(a0);
    600 
    601     if (create_implicit_receiver) {
    602       // Push the allocated receiver to the stack. We need two copies
    603       // because we may have to return the original one and the calling
    604       // conventions dictate that the called function pops the receiver.
    605       __ Push(t0, t0);
    606     } else {
    607       __ PushRoot(Heap::kTheHoleValueRootIndex);
    608     }
    609 
    610     // Deoptimizer re-enters stub code here.
    611     __ bind(&post_instantiation_deopt_entry);
    612 
    613     // Set up pointer to last argument.
    614     __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    615 
    616     // Copy arguments and receiver to the expression stack.
    617     // a0: number of arguments
    618     // a1: constructor function
    619     // a2: address of last argument (caller sp)
    620     // a3: new target
    621     // t0: number of arguments (smi-tagged)
    622     // sp[0]: receiver
    623     // sp[1]: receiver
    624     // sp[2]: number of arguments (smi-tagged)
    625     Label loop, entry;
    626     __ mov(t0, a0);
    627     __ jmp(&entry);
    628     __ bind(&loop);
    629     __ Dlsa(a4, a2, t0, kPointerSizeLog2);
    630     __ ld(a5, MemOperand(a4));
    631     __ push(a5);
    632     __ bind(&entry);
    633     __ Daddu(t0, t0, Operand(-1));
    634     __ Branch(&loop, greater_equal, t0, Operand(zero_reg));
    635 
    636     // Call the function.
    637     // a0: number of arguments
    638     // a1: constructor function
    639     // a3: new target
    640     ParameterCount actual(a0);
    641     __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
    642                       CheckDebugStepCallWrapper());
    643 
    644     // Store offset of return address for deoptimizer.
    645     if (create_implicit_receiver && !is_api_function) {
    646       masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
    647           masm->pc_offset());
    648     }
    649 
    650     // Restore context from the frame.
    651     __ ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    652 
    653     if (create_implicit_receiver) {
    654       // If the result is an object (in the ECMA sense), we should get rid
    655       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    656       // on page 74.
    657       Label use_receiver, exit;
    658 
    659       // If the result is a smi, it is *not* an object in the ECMA sense.
    660       // v0: result
    661       // sp[0]: receiver (newly allocated object)
    662       // sp[1]: number of arguments (smi-tagged)
    663       __ JumpIfSmi(v0, &use_receiver);
    664 
    665       // If the type of the result (stored in its map) is less than
    666       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    667       __ GetObjectType(v0, a1, a3);
    668       __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
    669 
    670       // Throw away the result of the constructor invocation and use the
    671       // on-stack receiver as the result.
    672       __ bind(&use_receiver);
    673       __ ld(v0, MemOperand(sp));
    674 
    675       // Remove receiver from the stack, remove caller arguments, and
    676       // return.
    677       __ bind(&exit);
    678       // v0: result
    679       // sp[0]: receiver (newly allocated object)
    680       // sp[1]: number of arguments (smi-tagged)
    681       __ ld(a1, MemOperand(sp, 1 * kPointerSize));
    682     } else {
    683       __ ld(a1, MemOperand(sp));
    684     }
    685 
    686     // Leave construct frame.
    687   }
    688 
    689   // ES6 9.2.2. Step 13+
    690   // Check that the result is not a Smi, indicating that the constructor result
    691   // from a derived class is neither undefined nor an Object.
    692   if (check_derived_construct) {
    693     Label dont_throw;
    694     __ JumpIfNotSmi(v0, &dont_throw);
    695     {
    696       FrameScope scope(masm, StackFrame::INTERNAL);
    697       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    698     }
    699     __ bind(&dont_throw);
    700   }
    701 
    702   __ SmiScale(a4, a1, kPointerSizeLog2);
    703   __ Daddu(sp, sp, a4);
    704   __ Daddu(sp, sp, kPointerSize);
    705   if (create_implicit_receiver) {
    706     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
    707   }
    708   __ Ret();
    709 
    710   // Store offset of trampoline address for deoptimizer. This is the bailout
    711   // point after the receiver instantiation but before the function invocation.
    712   // We need to restore some registers in order to continue the above code.
    713   if (create_implicit_receiver && !is_api_function) {
    714     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
    715         masm->pc_offset());
    716 
    717     // ----------- S t a t e -------------
    718     //  -- a0    : newly allocated object
    719     //  -- sp[0] : constructor function
    720     // -----------------------------------
    721 
    722     __ Pop(a1);
    723     __ Push(a0, a0);
    724 
    725     // Retrieve smi-tagged arguments count from the stack.
    726     __ ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
    727     __ SmiUntag(a0);
    728 
    729     // Retrieve the new target value from the stack. This was placed into the
    730     // frame description in place of the receiver by the optimizing compiler.
    731     __ Daddu(a3, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    732     __ Dlsa(a3, a3, a0, kPointerSizeLog2);
    733     __ ld(a3, MemOperand(a3));
    734 
    735     // Continue with constructor function invocation.
    736     __ jmp(&post_instantiation_deopt_entry);
    737   }
    738 }
    739 
    740 }  // namespace
    741 
    742 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    743   Generate_JSConstructStubHelper(masm, false, true, false);
    744 }
    745 
    746 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    747   Generate_JSConstructStubHelper(masm, true, false, false);
    748 }
    749 
    750 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    751   Generate_JSConstructStubHelper(masm, false, false, false);
    752 }
    753 
    754 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    755     MacroAssembler* masm) {
    756   Generate_JSConstructStubHelper(masm, false, false, true);
    757 }
    758 
    759 // static
    760 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    761   // ----------- S t a t e -------------
    762   //  -- v0 : the value to pass to the generator
    763   //  -- a1 : the JSGeneratorObject to resume
    764   //  -- a2 : the resume mode (tagged)
    765   //  -- ra : return address
    766   // -----------------------------------
    767   __ AssertGeneratorObject(a1);
    768 
    769   // Store input value into generator object.
    770   __ sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
    771   __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
    772                       kRAHasNotBeenSaved, kDontSaveFPRegs);
    773 
    774   // Store resume mode into generator object.
    775   __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
    776 
    777   // Load suspended function and context.
    778   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
    779   __ ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset));
    780 
    781   // Flood function if we are stepping.
    782   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    783   Label stepping_prepared;
    784   ExternalReference debug_hook =
    785       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
    786   __ li(a5, Operand(debug_hook));
    787   __ lb(a5, MemOperand(a5));
    788   __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
    789 
    790   // Flood function if we need to continue stepping in the suspended generator.
    791   ExternalReference debug_suspended_generator =
    792       ExternalReference::debug_suspended_generator_address(masm->isolate());
    793   __ li(a5, Operand(debug_suspended_generator));
    794   __ ld(a5, MemOperand(a5));
    795   __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
    796   __ bind(&stepping_prepared);
    797 
    798   // Push receiver.
    799   __ ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
    800   __ Push(a5);
    801 
    802   // ----------- S t a t e -------------
    803   //  -- a1    : the JSGeneratorObject to resume
    804   //  -- a2    : the resume mode (tagged)
    805   //  -- a4    : generator function
    806   //  -- cp    : generator context
    807   //  -- ra    : return address
    808   //  -- sp[0] : generator receiver
    809   // -----------------------------------
    810 
    811   // Push holes for arguments to generator function. Since the parser forced
    812   // context allocation for any variables in generators, the actual argument
    813   // values have already been copied into the context and these dummy values
    814   // will never be used.
    815   __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
    816   __ lw(a3,
    817         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
    818   {
    819     Label done_loop, loop;
    820     __ bind(&loop);
    821     __ Dsubu(a3, a3, Operand(1));
    822     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
    823     __ PushRoot(Heap::kTheHoleValueRootIndex);
    824     __ Branch(&loop);
    825     __ bind(&done_loop);
    826   }
    827 
    828   // Underlying function needs to have bytecode available.
    829   if (FLAG_debug_code) {
    830     __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
    831     __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
    832     __ GetObjectType(a3, a3, a3);
    833     __ Assert(eq, kMissingBytecodeArray, a3, Operand(BYTECODE_ARRAY_TYPE));
    834   }
    835 
    836   // Resume (Ignition/TurboFan) generator object.
    837   {
    838     __ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
    839     __ lw(a0,
    840           FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
    841     // We abuse new.target both to indicate that this is a resume call and to
    842     // pass in the generator object.  In ordinary calls, new.target is always
    843     // undefined because generator functions are non-constructable.
    844     __ Move(a3, a1);
    845     __ Move(a1, a4);
    846     __ ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
    847     __ Jump(a2);
    848   }
    849 
    850   __ bind(&prepare_step_in_if_stepping);
    851   {
    852     FrameScope scope(masm, StackFrame::INTERNAL);
    853     __ Push(a1, a2, a4);
    854     __ CallRuntime(Runtime::kDebugOnFunctionCall);
    855     __ Pop(a1, a2);
    856   }
    857   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
    858   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
    859 
    860   __ bind(&prepare_step_in_suspended_generator);
    861   {
    862     FrameScope scope(masm, StackFrame::INTERNAL);
    863     __ Push(a1, a2);
    864     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    865     __ Pop(a1, a2);
    866   }
    867   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
    868   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
    869 }
    870 
    871 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    872   FrameScope scope(masm, StackFrame::INTERNAL);
    873   __ Push(a1);
    874   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    875 }
    876 
    877 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
    878 
    879 // Clobbers a2; preserves all other registers.
    880 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
    881                                         IsTagged argc_is_tagged) {
    882   // Check the stack for overflow. We are not trying to catch
    883   // interruptions (e.g. debug break and preemption) here, so the "real stack
    884   // limit" is checked.
    885   Label okay;
    886   __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    887   // Make a2 the space we have left. The stack might already be overflowed
    888   // here which will cause r2 to become negative.
    889   __ dsubu(a2, sp, a2);
    890   // Check if the arguments will overflow the stack.
    891   if (argc_is_tagged == kArgcIsSmiTagged) {
    892     __ SmiScale(a7, v0, kPointerSizeLog2);
    893   } else {
    894     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    895     __ dsll(a7, argc, kPointerSizeLog2);
    896   }
    897   __ Branch(&okay, gt, a2, Operand(a7));  // Signed comparison.
    898 
    899   // Out of stack space.
    900   __ CallRuntime(Runtime::kThrowStackOverflow);
    901 
    902   __ bind(&okay);
    903 }
    904 
    905 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    906                                              bool is_construct) {
    907   // Called from JSEntryStub::GenerateBody
    908 
    909   // ----------- S t a t e -------------
    910   //  -- a0: new.target
    911   //  -- a1: function
    912   //  -- a2: receiver_pointer
    913   //  -- a3: argc
    914   //  -- s0: argv
    915   // -----------------------------------
    916   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    917 
    918   // Enter an internal frame.
    919   {
    920     FrameScope scope(masm, StackFrame::INTERNAL);
    921 
    922     // Setup the context (we need to use the caller context from the isolate).
    923     ExternalReference context_address(Isolate::kContextAddress,
    924                                       masm->isolate());
    925     __ li(cp, Operand(context_address));
    926     __ ld(cp, MemOperand(cp));
    927 
    928     // Push the function and the receiver onto the stack.
    929     __ Push(a1, a2);
    930 
    931     // Check if we have enough stack space to push all arguments.
    932     // Clobbers a2.
    933     Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
    934 
    935     // Remember new.target.
    936     __ mov(a5, a0);
    937 
    938     // Copy arguments to the stack in a loop.
    939     // a3: argc
    940     // s0: argv, i.e. points to first arg
    941     Label loop, entry;
    942     __ Dlsa(a6, s0, a3, kPointerSizeLog2);
    943     __ b(&entry);
    944     __ nop();  // Branch delay slot nop.
    945     // a6 points past last arg.
    946     __ bind(&loop);
    947     __ ld(a4, MemOperand(s0));  // Read next parameter.
    948     __ daddiu(s0, s0, kPointerSize);
    949     __ ld(a4, MemOperand(a4));  // Dereference handle.
    950     __ push(a4);                // Push parameter.
    951     __ bind(&entry);
    952     __ Branch(&loop, ne, s0, Operand(a6));
    953 
    954     // Setup new.target and argc.
    955     __ mov(a0, a3);
    956     __ mov(a3, a5);
    957 
    958     // Initialize all JavaScript callee-saved registers, since they will be seen
    959     // by the garbage collector as part of handlers.
    960     __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
    961     __ mov(s1, a4);
    962     __ mov(s2, a4);
    963     __ mov(s3, a4);
    964     __ mov(s4, a4);
    965     __ mov(s5, a4);
    966     // s6 holds the root address. Do not clobber.
    967     // s7 is cp. Do not init.
    968 
    969     // Invoke the code.
    970     Handle<Code> builtin = is_construct
    971                                ? masm->isolate()->builtins()->Construct()
    972                                : masm->isolate()->builtins()->Call();
    973     __ Call(builtin, RelocInfo::CODE_TARGET);
    974 
    975     // Leave internal frame.
    976   }
    977   __ Jump(ra);
    978 }
    979 
    980 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    981   Generate_JSEntryTrampolineHelper(masm, false);
    982 }
    983 
    984 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    985   Generate_JSEntryTrampolineHelper(masm, true);
    986 }
    987 
    988 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
    989   Register args_count = scratch;
    990 
    991   // Get the arguments + receiver count.
    992   __ ld(args_count,
    993         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    994   __ lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
    995 
    996   // Leave the frame (also dropping the register file).
    997   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
    998 
    999   // Drop receiver + arguments.
   1000   __ Daddu(sp, sp, args_count);
   1001 }
   1002 
   1003 // Generate code for entering a JS function with the interpreter.
   1004 // On entry to the function the receiver and arguments have been pushed on the
   1005 // stack left to right.  The actual argument count matches the formal parameter
   1006 // count expected by the function.
   1007 //
   1008 // The live registers are:
   1009 //   o a1: the JS function object being called.
   1010 //   o a3: the new target
   1011 //   o cp: our context
   1012 //   o fp: the caller's frame pointer
   1013 //   o sp: stack pointer
   1014 //   o ra: return address
   1015 //
   1016 // The function builds an interpreter frame.  See InterpreterFrameConstants in
   1017 // frames.h for its layout.
   1018 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
   1019   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1020 
   1021   // Open a frame scope to indicate that there is a frame on the stack.  The
   1022   // MANUAL indicates that the scope shouldn't actually generate code to set up
   1023   // the frame (that is done below).
   1024   FrameScope frame_scope(masm, StackFrame::MANUAL);
   1025   __ PushStandardFrame(a1);
   1026 
   1027   // Get the bytecode array from the function object (or from the DebugInfo if
   1028   // it is present) and load it into kInterpreterBytecodeArrayRegister.
   1029   __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1030   Label load_debug_bytecode_array, bytecode_array_loaded;
   1031   Register debug_info = kInterpreterBytecodeArrayRegister;
   1032   DCHECK(!debug_info.is(a0));
   1033   __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
   1034   __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array);
   1035   __ ld(kInterpreterBytecodeArrayRegister,
   1036         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
   1037   __ bind(&bytecode_array_loaded);
   1038 
   1039   // Check whether we should continue to use the interpreter.
   1040   Label switch_to_different_code_kind;
   1041   __ ld(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset));
   1042   __ Branch(&switch_to_different_code_kind, ne, a0,
   1043             Operand(masm->CodeObject()));  // Self-reference to this code.
   1044 
   1045   // Increment invocation count for the function.
   1046   __ ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset));
   1047   __ ld(a0, FieldMemOperand(a0, Cell::kValueOffset));
   1048   __ ld(a4, FieldMemOperand(
   1049                 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
   1050                         FeedbackVector::kHeaderSize));
   1051   __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
   1052   __ sd(a4, FieldMemOperand(
   1053                 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
   1054                         FeedbackVector::kHeaderSize));
   1055 
   1056   // Check function data field is actually a BytecodeArray object.
   1057   if (FLAG_debug_code) {
   1058     __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
   1059     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
   1060               Operand(zero_reg));
   1061     __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
   1062     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
   1063               Operand(BYTECODE_ARRAY_TYPE));
   1064   }
   1065 
   1066   // Reset code age.
   1067   DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
   1068   __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1069                                   BytecodeArray::kBytecodeAgeOffset));
   1070 
   1071   // Load initial bytecode offset.
   1072   __ li(kInterpreterBytecodeOffsetRegister,
   1073         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
   1074 
   1075   // Push new.target, bytecode array and Smi tagged bytecode array offset.
   1076   __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
   1077   __ Push(a3, kInterpreterBytecodeArrayRegister, a4);
   1078 
   1079   // Allocate the local and temporary register file on the stack.
   1080   {
   1081     // Load frame size (word) from the BytecodeArray object.
   1082     __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1083                               BytecodeArray::kFrameSizeOffset));
   1084 
   1085     // Do a stack check to ensure we don't go over the limit.
   1086     Label ok;
   1087     __ Dsubu(a5, sp, Operand(a4));
   1088     __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
   1089     __ Branch(&ok, hs, a5, Operand(a2));
   1090     __ CallRuntime(Runtime::kThrowStackOverflow);
   1091     __ bind(&ok);
   1092 
   1093     // If ok, push undefined as the initial value for all register file entries.
   1094     Label loop_header;
   1095     Label loop_check;
   1096     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
   1097     __ Branch(&loop_check);
   1098     __ bind(&loop_header);
   1099     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
   1100     __ push(a5);
   1101     // Continue loop if not done.
   1102     __ bind(&loop_check);
   1103     __ Dsubu(a4, a4, Operand(kPointerSize));
   1104     __ Branch(&loop_header, ge, a4, Operand(zero_reg));
   1105   }
   1106 
   1107   // Load accumulator and dispatch table into registers.
   1108   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
   1109   __ li(kInterpreterDispatchTableRegister,
   1110         Operand(ExternalReference::interpreter_dispatch_table_address(
   1111             masm->isolate())));
   1112 
   1113   // Dispatch to the first bytecode handler for the function.
   1114   __ Daddu(a0, kInterpreterBytecodeArrayRegister,
   1115            kInterpreterBytecodeOffsetRegister);
   1116   __ lbu(a0, MemOperand(a0));
   1117   __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
   1118   __ ld(at, MemOperand(at));
   1119   __ Call(at);
   1120   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
   1121 
   1122   // The return value is in v0.
   1123   LeaveInterpreterFrame(masm, t0);
   1124   __ Jump(ra);
   1125 
   1126   // Load debug copy of the bytecode array.
   1127   __ bind(&load_debug_bytecode_array);
   1128   __ ld(kInterpreterBytecodeArrayRegister,
   1129         FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
   1130   __ Branch(&bytecode_array_loaded);
   1131 
   1132   // If the shared code is no longer this entry trampoline, then the underlying
   1133   // function has been switched to a different kind of code and we heal the
   1134   // closure by switching the code entry field over to the new code as well.
   1135   __ bind(&switch_to_different_code_kind);
   1136   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
   1137   __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1138   __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset));
   1139   __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1140   __ sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
   1141   __ RecordWriteCodeEntryField(a1, a4, a5);
   1142   __ Jump(a4);
   1143 }
   1144 
   1145 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
   1146                                         Register scratch1, Register scratch2,
   1147                                         Label* stack_overflow) {
   1148   // Check the stack for overflow. We are not trying to catch
   1149   // interruptions (e.g. debug break and preemption) here, so the "real stack
   1150   // limit" is checked.
   1151   __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
   1152   // Make scratch1 the space we have left. The stack might already be overflowed
   1153   // here which will cause scratch1 to become negative.
   1154   __ dsubu(scratch1, sp, scratch1);
   1155   // Check if the arguments will overflow the stack.
   1156   __ dsll(scratch2, num_args, kPointerSizeLog2);
   1157   // Signed comparison.
   1158   __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
   1159 }
   1160 
   1161 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
   1162                                          Register num_args, Register index,
   1163                                          Register scratch, Register scratch2,
   1164                                          Label* stack_overflow) {
   1165   //  Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
   1166   //  stack_overflow);
   1167 
   1168   // Find the address of the last argument.
   1169   __ mov(scratch2, num_args);
   1170   __ dsll(scratch2, scratch2, kPointerSizeLog2);
   1171   __ Dsubu(scratch2, index, Operand(scratch2));
   1172 
   1173   // Push the arguments.
   1174   Label loop_header, loop_check;
   1175   __ Branch(&loop_check);
   1176   __ bind(&loop_header);
   1177   __ ld(scratch, MemOperand(index));
   1178   __ Daddu(index, index, Operand(-kPointerSize));
   1179   __ push(scratch);
   1180   __ bind(&loop_check);
   1181   __ Branch(&loop_header, gt, index, Operand(scratch2));
   1182 }
   1183 
   1184 // static
   1185 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
   1186     MacroAssembler* masm, TailCallMode tail_call_mode,
   1187     InterpreterPushArgsMode mode) {
   1188   // ----------- S t a t e -------------
   1189   //  -- a0 : the number of arguments (not including the receiver)
   1190   //  -- a2 : the address of the first argument to be pushed. Subsequent
   1191   //          arguments should be consecutive above this, in the same order as
   1192   //          they are to be pushed onto the stack.
   1193   //  -- a1 : the target to call (can be any Object).
   1194   // -----------------------------------
   1195   Label stack_overflow;
   1196 
   1197   __ Daddu(a3, a0, Operand(1));  // Add one for receiver.
   1198 
   1199   // This function modifies a2, t0 and a4.
   1200   Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow);
   1201 
   1202   // Call the target.
   1203   if (mode == InterpreterPushArgsMode::kJSFunction) {
   1204     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
   1205                                                       tail_call_mode),
   1206             RelocInfo::CODE_TARGET);
   1207   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
   1208     __ Jump(masm->isolate()->builtins()->CallWithSpread(),
   1209             RelocInfo::CODE_TARGET);
   1210   } else {
   1211     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   1212                                               tail_call_mode),
   1213             RelocInfo::CODE_TARGET);
   1214   }
   1215 
   1216   __ bind(&stack_overflow);
   1217   {
   1218     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1219     // Unreachable code.
   1220     __ break_(0xCC);
   1221   }
   1222 }
   1223 
   1224 // static
   1225 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
   1226     MacroAssembler* masm, InterpreterPushArgsMode mode) {
   1227   // ----------- S t a t e -------------
   1228   // -- a0 : argument count (not including receiver)
   1229   // -- a3 : new target
   1230   // -- a1 : constructor to call
   1231   // -- a2 : allocation site feedback if available, undefined otherwise.
   1232   // -- a4 : address of the first argument
   1233   // -----------------------------------
   1234   Label stack_overflow;
   1235 
   1236   // Push a slot for the receiver.
   1237   __ push(zero_reg);
   1238 
   1239   // This function modifies t0, a4 and a5.
   1240   Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow);
   1241 
   1242   __ AssertUndefinedOrAllocationSite(a2, t0);
   1243   if (mode == InterpreterPushArgsMode::kJSFunction) {
   1244     __ AssertFunction(a1);
   1245 
   1246     // Tail call to the function-specific construct stub (still in the caller
   1247     // context at this point).
   1248     __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1249     __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
   1250     __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1251     __ Jump(at);
   1252   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
   1253     // Call the constructor with a0, a1, and a3 unmodified.
   1254     __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
   1255             RelocInfo::CODE_TARGET);
   1256   } else {
   1257     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
   1258     // Call the constructor with a0, a1, and a3 unmodified.
   1259     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1260   }
   1261 
   1262   __ bind(&stack_overflow);
   1263   {
   1264     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1265     // Unreachable code.
   1266     __ break_(0xCC);
   1267   }
   1268 }
   1269 
   1270 // static
   1271 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
   1272     MacroAssembler* masm) {
   1273   // ----------- S t a t e -------------
   1274   //  -- a0 : the number of arguments (not including the receiver)
   1275   //  -- a1 : the target to call checked to be Array function.
   1276   //  -- a2 : allocation site feedback.
   1277   //  -- a3 : the address of the first argument to be pushed. Subsequent
   1278   //          arguments should be consecutive above this, in the same order as
   1279   //          they are to be pushed onto the stack.
   1280   // -----------------------------------
   1281   Label stack_overflow;
   1282 
   1283   __ Daddu(a4, a0, Operand(1));  // Add one for receiver.
   1284 
   1285   // This function modifies a3, a5 and a6.
   1286   Generate_InterpreterPushArgs(masm, a4, a3, a5, a6, &stack_overflow);
   1287 
   1288   // ArrayConstructor stub expects constructor in a3. Set it here.
   1289   __ mov(a3, a1);
   1290 
   1291   ArrayConstructorStub stub(masm->isolate());
   1292   __ TailCallStub(&stub);
   1293 
   1294   __ bind(&stack_overflow);
   1295   {
   1296     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1297     // Unreachable code.
   1298     __ break_(0xCC);
   1299   }
   1300 }
   1301 
   1302 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
   1303   // Set the return address to the correct point in the interpreter entry
   1304   // trampoline.
   1305   Smi* interpreter_entry_return_pc_offset(
   1306       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
   1307   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
   1308   __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
   1309   __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
   1310                            Code::kHeaderSize - kHeapObjectTag));
   1311 
   1312   // Initialize the dispatch table register.
   1313   __ li(kInterpreterDispatchTableRegister,
   1314         Operand(ExternalReference::interpreter_dispatch_table_address(
   1315             masm->isolate())));
   1316 
   1317   // Get the bytecode array pointer from the frame.
   1318   __ ld(kInterpreterBytecodeArrayRegister,
   1319         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1320 
   1321   if (FLAG_debug_code) {
   1322     // Check function data field is actually a BytecodeArray object.
   1323     __ SmiTst(kInterpreterBytecodeArrayRegister, at);
   1324     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
   1325               Operand(zero_reg));
   1326     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
   1327     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
   1328               Operand(BYTECODE_ARRAY_TYPE));
   1329   }
   1330 
   1331   // Get the target bytecode offset from the frame.
   1332   __ lw(
   1333       kInterpreterBytecodeOffsetRegister,
   1334       UntagSmiMemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1335 
   1336   // Dispatch to the target bytecode.
   1337   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
   1338            kInterpreterBytecodeOffsetRegister);
   1339   __ lbu(a1, MemOperand(a1));
   1340   __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
   1341   __ ld(a1, MemOperand(a1));
   1342   __ Jump(a1);
   1343 }
   1344 
   1345 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
   1346   // Advance the current bytecode offset stored within the given interpreter
   1347   // stack frame. This simulates what all bytecode handlers do upon completion
   1348   // of the underlying operation.
   1349   __ ld(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1350   __ ld(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1351   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   1352   {
   1353     FrameScope scope(masm, StackFrame::INTERNAL);
   1354     __ Push(kInterpreterAccumulatorRegister, a1, a2);
   1355     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
   1356     __ mov(a2, v0);  // Result is the new bytecode offset.
   1357     __ Pop(kInterpreterAccumulatorRegister);
   1358   }
   1359   __ sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1360 
   1361   Generate_InterpreterEnterBytecode(masm);
   1362 }
   1363 
   1364 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1365   Generate_InterpreterEnterBytecode(masm);
   1366 }
   1367 
   1368 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1369   // ----------- S t a t e -------------
   1370   //  -- a0 : argument count (preserved for callee)
   1371   //  -- a3 : new target (preserved for callee)
   1372   //  -- a1 : target function (preserved for callee)
   1373   // -----------------------------------
   1374   // First lookup code, maybe we don't need to compile!
   1375   Label gotta_call_runtime, gotta_call_runtime_no_stack;
   1376   Label try_shared;
   1377   Label loop_top, loop_bottom;
   1378 
   1379   Register argument_count = a0;
   1380   Register closure = a1;
   1381   Register new_target = a3;
   1382   Register map = a0;
   1383   Register index = a2;
   1384 
   1385   // Do we have a valid feedback vector?
   1386   __ ld(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
   1387   __ ld(index, FieldMemOperand(index, Cell::kValueOffset));
   1388   __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
   1389                 &gotta_call_runtime_no_stack);
   1390 
   1391   __ push(argument_count);
   1392   __ push(new_target);
   1393   __ push(closure);
   1394 
   1395   __ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1396   __ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
   1397   __ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset));
   1398   __ Branch(&try_shared, lt, index, Operand(Smi::FromInt(2)));
   1399 
   1400   // a3  : native context
   1401   // a2  : length / index
   1402   // a0  : optimized code map
   1403   // stack[0] : new target
   1404   // stack[4] : closure
   1405   Register native_context = a3;
   1406   __ ld(native_context, NativeContextMemOperand());
   1407 
   1408   __ bind(&loop_top);
   1409   Register temp = a1;
   1410   Register array_pointer = a5;
   1411 
   1412   // Does the native context match?
   1413   __ SmiScale(at, index, kPointerSizeLog2);
   1414   __ Daddu(array_pointer, map, Operand(at));
   1415   __ ld(temp, FieldMemOperand(array_pointer,
   1416                               SharedFunctionInfo::kOffsetToPreviousContext));
   1417   __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1418   __ Branch(&loop_bottom, ne, temp, Operand(native_context));
   1419 
   1420   // Code available?
   1421   Register entry = a4;
   1422   __ ld(entry,
   1423         FieldMemOperand(array_pointer,
   1424                         SharedFunctionInfo::kOffsetToPreviousCachedCode));
   1425   __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1426   __ JumpIfSmi(entry, &try_shared);
   1427 
   1428   // Found code. Get it into the closure and return.
   1429   __ pop(closure);
   1430   // Store code entry in the closure.
   1431   __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1432   __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1433   __ RecordWriteCodeEntryField(closure, entry, a5);
   1434 
   1435   // Link the closure into the optimized function list.
   1436   // a4 : code entry
   1437   // a3 : native context
   1438   // a1 : closure
   1439   __ ld(a5,
   1440         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1441   __ sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
   1442   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0,
   1443                       kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1444                       OMIT_SMI_CHECK);
   1445   const int function_list_offset =
   1446       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1447   __ sd(closure,
   1448         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1449   // Save closure before the write barrier.
   1450   __ mov(a5, closure);
   1451   __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
   1452                             kRAHasNotBeenSaved, kDontSaveFPRegs);
   1453   __ mov(closure, a5);
   1454   __ pop(new_target);
   1455   __ pop(argument_count);
   1456   __ Jump(entry);
   1457 
   1458   __ bind(&loop_bottom);
   1459   __ Dsubu(index, index,
   1460            Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
   1461   __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
   1462 
   1463   // We found no code.
   1464   __ bind(&try_shared);
   1465   __ pop(closure);
   1466   __ pop(new_target);
   1467   __ pop(argument_count);
   1468   __ ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1469   // Is the shared function marked for tier up?
   1470   __ lbu(a5, FieldMemOperand(entry,
   1471                              SharedFunctionInfo::kMarkedForTierUpByteOffset));
   1472   __ And(a5, a5,
   1473          Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
   1474   __ Branch(&gotta_call_runtime_no_stack, ne, a5, Operand(zero_reg));
   1475 
   1476   // If SFI points to anything other than CompileLazy, install that.
   1477   __ ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
   1478   __ Move(t1, masm->CodeObject());
   1479   __ Branch(&gotta_call_runtime_no_stack, eq, entry, Operand(t1));
   1480 
   1481   // Install the SFI's code entry.
   1482   __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1483   __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1484   __ RecordWriteCodeEntryField(closure, entry, a5);
   1485   __ Jump(entry);
   1486 
   1487   __ bind(&gotta_call_runtime);
   1488   __ pop(closure);
   1489   __ pop(new_target);
   1490   __ pop(argument_count);
   1491   __ bind(&gotta_call_runtime_no_stack);
   1492   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1493 }
   1494 
   1495 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1496   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1497 }
   1498 
   1499 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1500   GenerateTailCallToReturnedCode(masm,
   1501                                  Runtime::kCompileOptimized_NotConcurrent);
   1502 }
   1503 
   1504 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1505   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1506 }
   1507 
   1508 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
   1509   // ----------- S t a t e -------------
   1510   //  -- a0 : argument count (preserved for callee)
   1511   //  -- a1 : new target (preserved for callee)
   1512   //  -- a3 : target function (preserved for callee)
   1513   // -----------------------------------
   1514   Label failed;
   1515   {
   1516     FrameScope scope(masm, StackFrame::INTERNAL);
   1517     // Push a copy of the target function and the new target.
   1518     // Push function as parameter to the runtime call.
   1519     __ Move(t2, a0);
   1520     __ SmiTag(a0);
   1521     __ Push(a0, a1, a3, a1);
   1522 
   1523     // Copy arguments from caller (stdlib, foreign, heap).
   1524     Label args_done;
   1525     for (int j = 0; j < 4; ++j) {
   1526       Label over;
   1527       if (j < 3) {
   1528         __ Branch(&over, ne, t2, Operand(j));
   1529       }
   1530       for (int i = j - 1; i >= 0; --i) {
   1531         __ ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
   1532                                      i * kPointerSize));
   1533         __ push(t2);
   1534       }
   1535       for (int i = 0; i < 3 - j; ++i) {
   1536         __ PushRoot(Heap::kUndefinedValueRootIndex);
   1537       }
   1538       if (j < 3) {
   1539         __ jmp(&args_done);
   1540         __ bind(&over);
   1541       }
   1542     }
   1543     __ bind(&args_done);
   1544 
   1545     // Call runtime, on success unwind frame, and parent frame.
   1546     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
   1547     // A smi 0 is returned on failure, an object on success.
   1548     __ JumpIfSmi(v0, &failed);
   1549 
   1550     __ Drop(2);
   1551     __ pop(t2);
   1552     __ SmiUntag(t2);
   1553     scope.GenerateLeaveFrame();
   1554 
   1555     __ Daddu(t2, t2, Operand(1));
   1556     __ Dlsa(sp, sp, t2, kPointerSizeLog2);
   1557     __ Ret();
   1558 
   1559     __ bind(&failed);
   1560     // Restore target function and new target.
   1561     __ Pop(a0, a1, a3);
   1562     __ SmiUntag(a0);
   1563   }
   1564   // On failure, tail call back to regular js.
   1565   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1566 }
   1567 
   1568 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1569   // For now, we are relying on the fact that make_code_young doesn't do any
   1570   // garbage collection which allows us to save/restore the registers without
   1571   // worrying about which of them contain pointers. We also don't build an
   1572   // internal frame to make the code faster, since we shouldn't have to do stack
   1573   // crawls in MakeCodeYoung. This seems a bit fragile.
   1574 
   1575   // Set a0 to point to the head of the PlatformCodeAge sequence.
   1576   __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
   1577 
   1578   // The following registers must be saved and restored when calling through to
   1579   // the runtime:
   1580   //   a0 - contains return address (beginning of patch sequence)
   1581   //   a1 - isolate
   1582   //   a3 - new target
   1583   RegList saved_regs =
   1584       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
   1585   FrameScope scope(masm, StackFrame::MANUAL);
   1586   __ MultiPush(saved_regs);
   1587   __ PrepareCallCFunction(2, 0, a2);
   1588   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1589   __ CallCFunction(
   1590       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1591   __ MultiPop(saved_regs);
   1592   __ Jump(a0);
   1593 }
   1594 
   1595 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                              \
   1596   void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
   1597     GenerateMakeCodeYoungAgainCommon(masm);                               \
   1598   }
   1599 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1600 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1601 
   1602 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1603   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1604   // that make_code_young doesn't do any garbage collection which allows us to
   1605   // save/restore the registers without worrying about which of them contain
   1606   // pointers.
   1607 
   1608   // Set a0 to point to the head of the PlatformCodeAge sequence.
   1609   __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
   1610 
   1611   // The following registers must be saved and restored when calling through to
   1612   // the runtime:
   1613   //   a0 - contains return address (beginning of patch sequence)
   1614   //   a1 - isolate
   1615   //   a3 - new target
   1616   RegList saved_regs =
   1617       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
   1618   FrameScope scope(masm, StackFrame::MANUAL);
   1619   __ MultiPush(saved_regs);
   1620   __ PrepareCallCFunction(2, 0, a2);
   1621   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1622   __ CallCFunction(
   1623       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
   1624       2);
   1625   __ MultiPop(saved_regs);
   1626 
   1627   // Perform prologue operations usually performed by the young code stub.
   1628   __ PushStandardFrame(a1);
   1629 
   1630   // Jump to point after the code-age stub.
   1631   __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
   1632   __ Jump(a0);
   1633 }
   1634 
   1635 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1636   GenerateMakeCodeYoungAgainCommon(masm);
   1637 }
   1638 
   1639 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1640   Generate_MarkCodeAsExecutedOnce(masm);
   1641 }
   1642 
   1643 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1644                                              SaveFPRegsMode save_doubles) {
   1645   {
   1646     FrameScope scope(masm, StackFrame::INTERNAL);
   1647 
   1648     // Preserve registers across notification, this is important for compiled
   1649     // stubs that tail call the runtime on deopts passing their parameters in
   1650     // registers.
   1651     __ MultiPush(kJSCallerSaved | kCalleeSaved);
   1652     // Pass the function and deoptimization type to the runtime system.
   1653     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1654     __ MultiPop(kJSCallerSaved | kCalleeSaved);
   1655   }
   1656 
   1657   __ Daddu(sp, sp, Operand(kPointerSize));  // Ignore state
   1658   __ Jump(ra);                              // Jump to miss handler
   1659 }
   1660 
   1661 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1662   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1663 }
   1664 
   1665 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1666   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1667 }
   1668 
   1669 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1670                                              Deoptimizer::BailoutType type) {
   1671   {
   1672     FrameScope scope(masm, StackFrame::INTERNAL);
   1673     // Pass the function and deoptimization type to the runtime system.
   1674     __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
   1675     __ push(a0);
   1676     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1677   }
   1678 
   1679   // Get the full codegen state from the stack and untag it -> a6.
   1680   __ lw(a6, UntagSmiMemOperand(sp, 0 * kPointerSize));
   1681   // Switch on the state.
   1682   Label with_tos_register, unknown_state;
   1683   __ Branch(
   1684       &with_tos_register, ne, a6,
   1685       Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
   1686   __ Ret(USE_DELAY_SLOT);
   1687   // Safe to fill delay slot Addu will emit one instruction.
   1688   __ Daddu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
   1689 
   1690   __ bind(&with_tos_register);
   1691   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
   1692   __ ld(v0, MemOperand(sp, 1 * kPointerSize));
   1693   __ Branch(
   1694       &unknown_state, ne, a6,
   1695       Operand(static_cast<int64_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
   1696 
   1697   __ Ret(USE_DELAY_SLOT);
   1698   // Safe to fill delay slot Addu will emit one instruction.
   1699   __ Daddu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
   1700 
   1701   __ bind(&unknown_state);
   1702   __ stop("no cases left");
   1703 }
   1704 
   1705 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1706   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1707 }
   1708 
   1709 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1710   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1711 }
   1712 
   1713 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1714   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1715 }
   1716 
   1717 // Clobbers {t2, t3, a4, a5}.
   1718 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   1719                                     Register function_template_info,
   1720                                     Label* receiver_check_failed) {
   1721   Register signature = t2;
   1722   Register map = t3;
   1723   Register constructor = a4;
   1724   Register scratch = a5;
   1725 
   1726   // If there is no signature, return the holder.
   1727   __ ld(signature, FieldMemOperand(function_template_info,
   1728                                    FunctionTemplateInfo::kSignatureOffset));
   1729   Label receiver_check_passed;
   1730   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
   1731                 &receiver_check_passed);
   1732 
   1733   // Walk the prototype chain.
   1734   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1735   Label prototype_loop_start;
   1736   __ bind(&prototype_loop_start);
   1737 
   1738   // Get the constructor, if any.
   1739   __ GetMapConstructor(constructor, map, scratch, scratch);
   1740   Label next_prototype;
   1741   __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
   1742   Register type = constructor;
   1743   __ ld(type,
   1744         FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   1745   __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   1746 
   1747   // Loop through the chain of inheriting function templates.
   1748   Label function_template_loop;
   1749   __ bind(&function_template_loop);
   1750 
   1751   // If the signatures match, we have a compatible receiver.
   1752   __ Branch(&receiver_check_passed, eq, signature, Operand(type),
   1753             USE_DELAY_SLOT);
   1754 
   1755   // If the current type is not a FunctionTemplateInfo, load the next prototype
   1756   // in the chain.
   1757   __ JumpIfSmi(type, &next_prototype);
   1758   __ GetObjectType(type, scratch, scratch);
   1759   __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
   1760 
   1761   // Otherwise load the parent function template and iterate.
   1762   __ ld(type,
   1763         FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
   1764   __ Branch(&function_template_loop);
   1765 
   1766   // Load the next prototype.
   1767   __ bind(&next_prototype);
   1768   __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset));
   1769   __ DecodeField<Map::HasHiddenPrototype>(scratch);
   1770   __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
   1771 
   1772   __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
   1773   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1774   // Iterate.
   1775   __ Branch(&prototype_loop_start);
   1776 
   1777   __ bind(&receiver_check_passed);
   1778 }
   1779 
   1780 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   1781   // ----------- S t a t e -------------
   1782   //  -- a0                 : number of arguments excluding receiver
   1783   //  -- a1                 : callee
   1784   //  -- ra                 : return address
   1785   //  -- sp[0]              : last argument
   1786   //  -- ...
   1787   //  -- sp[8 * (argc - 1)] : first argument
   1788   //  -- sp[8 * argc]       : receiver
   1789   // -----------------------------------
   1790 
   1791   // Load the FunctionTemplateInfo.
   1792   __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1793   __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
   1794 
   1795   // Do the compatible receiver check
   1796   Label receiver_check_failed;
   1797   __ Dlsa(t8, sp, a0, kPointerSizeLog2);
   1798   __ ld(t0, MemOperand(t8));
   1799   CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
   1800 
   1801   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   1802   // beginning of the code.
   1803   __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
   1804   __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
   1805   __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
   1806   __ Jump(t2);
   1807 
   1808   // Compatible receiver check failed: throw an Illegal Invocation exception.
   1809   __ bind(&receiver_check_failed);
   1810   // Drop the arguments (including the receiver);
   1811   __ Daddu(t8, t8, Operand(kPointerSize));
   1812   __ daddu(sp, t8, zero_reg);
   1813   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   1814 }
   1815 
   1816 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
   1817                                               bool has_handler_frame) {
   1818   // Lookup the function in the JavaScript frame.
   1819   if (has_handler_frame) {
   1820     __ ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   1821     __ ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
   1822   } else {
   1823     __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1824   }
   1825 
   1826   {
   1827     FrameScope scope(masm, StackFrame::INTERNAL);
   1828     // Pass function as argument.
   1829     __ push(a0);
   1830     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1831   }
   1832 
   1833   // If the code object is null, just return to the caller.
   1834   __ Ret(eq, v0, Operand(Smi::kZero));
   1835 
   1836   // Drop any potential handler frame that is be sitting on top of the actual
   1837   // JavaScript frame. This is the case then OSR is triggered from bytecode.
   1838   if (has_handler_frame) {
   1839     __ LeaveFrame(StackFrame::STUB);
   1840   }
   1841 
   1842   // Load deoptimization data from the code object.
   1843   // <deopt_data> = <code>[#deoptimization_data_offset]
   1844   __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   1845 
   1846   // Load the OSR entrypoint offset from the deoptimization data.
   1847   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1848   __ lw(a1,
   1849         UntagSmiMemOperand(a1, FixedArray::OffsetOfElementAt(
   1850                                    DeoptimizationInputData::kOsrPcOffsetIndex) -
   1851                                    kHeapObjectTag));
   1852 
   1853   // Compute the target address = code_obj + header_size + osr_offset
   1854   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
   1855   __ daddu(v0, v0, a1);
   1856   __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
   1857 
   1858   // And "return" to the OSR entry point of the function.
   1859   __ Ret();
   1860 }
   1861 
   1862 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1863   Generate_OnStackReplacementHelper(masm, false);
   1864 }
   1865 
   1866 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
   1867   Generate_OnStackReplacementHelper(masm, true);
   1868 }
   1869 
   1870 // static
   1871 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1872   // ----------- S t a t e -------------
   1873   //  -- a0    : argc
   1874   //  -- sp[0] : argArray
   1875   //  -- sp[4] : thisArg
   1876   //  -- sp[8] : receiver
   1877   // -----------------------------------
   1878 
   1879   Register argc = a0;
   1880   Register arg_array = a0;
   1881   Register receiver = a1;
   1882   Register this_arg = a2;
   1883   Register undefined_value = a3;
   1884   Register scratch = a4;
   1885 
   1886   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   1887   // 1. Load receiver into a1, argArray into a0 (if present), remove all
   1888   // arguments from the stack (including the receiver), and push thisArg (if
   1889   // present) instead.
   1890   {
   1891     // Claim (2 - argc) dummy arguments form the stack, to put the stack in a
   1892     // consistent state for a simple pop operation.
   1893 
   1894     __ Dsubu(sp, sp, Operand(2 * kPointerSize));
   1895     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
   1896     __ mov(scratch, argc);
   1897     __ Pop(this_arg, arg_array);                   // Overwrite argc
   1898     __ Movz(arg_array, undefined_value, scratch);  // if argc == 0
   1899     __ Movz(this_arg, undefined_value, scratch);   // if argc == 0
   1900     __ Dsubu(scratch, scratch, Operand(1));
   1901     __ Movz(arg_array, undefined_value, scratch);  // if argc == 1
   1902     __ ld(receiver, MemOperand(sp));
   1903     __ sd(this_arg, MemOperand(sp));
   1904   }
   1905 
   1906   // ----------- S t a t e -------------
   1907   //  -- a0    : argArray
   1908   //  -- a1    : receiver
   1909   //  -- a3    : undefined root value
   1910   //  -- sp[0] : thisArg
   1911   // -----------------------------------
   1912 
   1913   // 2. Make sure the receiver is actually callable.
   1914   Label receiver_not_callable;
   1915   __ JumpIfSmi(receiver, &receiver_not_callable);
   1916   __ ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1917   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
   1918   __ And(a4, a4, Operand(1 << Map::kIsCallable));
   1919   __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
   1920 
   1921   // 3. Tail call with no arguments if argArray is null or undefined.
   1922   Label no_arguments;
   1923   __ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments);
   1924   __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
   1925 
   1926   // 4a. Apply the receiver to the given argArray (passing undefined for
   1927   // new.target).
   1928   DCHECK(undefined_value.is(a3));
   1929   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1930 
   1931   // 4b. The argArray is either null or undefined, so we tail call without any
   1932   // arguments to the receiver.
   1933   __ bind(&no_arguments);
   1934   {
   1935     __ mov(a0, zero_reg);
   1936     DCHECK(receiver.is(a1));
   1937     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1938   }
   1939 
   1940   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1941   __ bind(&receiver_not_callable);
   1942   {
   1943     __ sd(receiver, MemOperand(sp));
   1944     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1945   }
   1946 }
   1947 
   1948 // static
   1949 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1950   // 1. Make sure we have at least one argument.
   1951   // a0: actual number of arguments
   1952   {
   1953     Label done;
   1954     __ Branch(&done, ne, a0, Operand(zero_reg));
   1955     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1956     __ Daddu(a0, a0, Operand(1));
   1957     __ bind(&done);
   1958   }
   1959 
   1960   // 2. Get the function to call (passed as receiver) from the stack.
   1961   // a0: actual number of arguments
   1962   __ Dlsa(at, sp, a0, kPointerSizeLog2);
   1963   __ ld(a1, MemOperand(at));
   1964 
   1965   // 3. Shift arguments and return address one slot down on the stack
   1966   //    (overwriting the original receiver).  Adjust argument count to make
   1967   //    the original first argument the new receiver.
   1968   // a0: actual number of arguments
   1969   // a1: function
   1970   {
   1971     Label loop;
   1972     // Calculate the copy start address (destination). Copy end address is sp.
   1973     __ Dlsa(a2, sp, a0, kPointerSizeLog2);
   1974 
   1975     __ bind(&loop);
   1976     __ ld(at, MemOperand(a2, -kPointerSize));
   1977     __ sd(at, MemOperand(a2));
   1978     __ Dsubu(a2, a2, Operand(kPointerSize));
   1979     __ Branch(&loop, ne, a2, Operand(sp));
   1980     // Adjust the actual number of arguments and remove the top element
   1981     // (which is a copy of the last argument).
   1982     __ Dsubu(a0, a0, Operand(1));
   1983     __ Pop();
   1984   }
   1985 
   1986   // 4. Call the callable.
   1987   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1988 }
   1989 
   1990 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1991   // ----------- S t a t e -------------
   1992   //  -- a0     : argc
   1993   //  -- sp[0]  : argumentsList  (if argc ==3)
   1994   //  -- sp[4]  : thisArgument   (if argc >=2)
   1995   //  -- sp[8]  : target         (if argc >=1)
   1996   //  -- sp[12] : receiver
   1997   // -----------------------------------
   1998 
   1999   Register argc = a0;
   2000   Register arguments_list = a0;
   2001   Register target = a1;
   2002   Register this_argument = a2;
   2003   Register undefined_value = a3;
   2004   Register scratch = a4;
   2005 
   2006   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   2007   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
   2008   // remove all arguments from the stack (including the receiver), and push
   2009   // thisArgument (if present) instead.
   2010   {
   2011     // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
   2012     // consistent state for a simple pop operation.
   2013 
   2014     __ Dsubu(sp, sp, Operand(3 * kPointerSize));
   2015     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
   2016     __ mov(scratch, argc);
   2017     __ Pop(target, this_argument, arguments_list);
   2018     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 0
   2019     __ Movz(this_argument, undefined_value, scratch);   // if argc == 0
   2020     __ Movz(target, undefined_value, scratch);          // if argc == 0
   2021     __ Dsubu(scratch, scratch, Operand(1));
   2022     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 1
   2023     __ Movz(this_argument, undefined_value, scratch);   // if argc == 1
   2024     __ Dsubu(scratch, scratch, Operand(1));
   2025     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 2
   2026 
   2027     __ sd(this_argument, MemOperand(sp, 0));  // Overwrite receiver
   2028   }
   2029 
   2030   // ----------- S t a t e -------------
   2031   //  -- a0    : argumentsList
   2032   //  -- a1    : target
   2033   //  -- a3    : undefined root value
   2034   //  -- sp[0] : thisArgument
   2035   // -----------------------------------
   2036 
   2037   // 2. Make sure the target is actually callable.
   2038   Label target_not_callable;
   2039   __ JumpIfSmi(target, &target_not_callable);
   2040   __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
   2041   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
   2042   __ And(a4, a4, Operand(1 << Map::kIsCallable));
   2043   __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
   2044 
   2045   // 3a. Apply the target to the given argumentsList (passing undefined for
   2046   // new.target).
   2047   DCHECK(undefined_value.is(a3));
   2048   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   2049 
   2050   // 3b. The target is not callable, throw an appropriate TypeError.
   2051   __ bind(&target_not_callable);
   2052   {
   2053     __ sd(target, MemOperand(sp));
   2054     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   2055   }
   2056 }
   2057 
   2058 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   2059   // ----------- S t a t e -------------
   2060   //  -- a0     : argc
   2061   //  -- sp[0]  : new.target (optional) (dummy value if argc <= 2)
   2062   //  -- sp[4]  : argumentsList         (dummy value if argc <= 1)
   2063   //  -- sp[8]  : target                (dummy value if argc == 0)
   2064   //  -- sp[12] : receiver
   2065   // -----------------------------------
   2066   Register argc = a0;
   2067   Register arguments_list = a0;
   2068   Register target = a1;
   2069   Register new_target = a3;
   2070   Register undefined_value = a4;
   2071   Register scratch = a5;
   2072 
   2073   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
   2074   // new.target into a3 (if present, otherwise use target), remove all
   2075   // arguments from the stack (including the receiver), and push thisArgument
   2076   // (if present) instead.
   2077   {
   2078     // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
   2079     // consistent state for a simple pop operation.
   2080 
   2081     __ Dsubu(sp, sp, Operand(3 * kPointerSize));
   2082     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
   2083     __ mov(scratch, argc);
   2084     __ Pop(target, arguments_list, new_target);
   2085     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 0
   2086     __ Movz(new_target, undefined_value, scratch);      // if argc == 0
   2087     __ Movz(target, undefined_value, scratch);          // if argc == 0
   2088     __ Dsubu(scratch, scratch, Operand(1));
   2089     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 1
   2090     __ Movz(new_target, target, scratch);               // if argc == 1
   2091     __ Dsubu(scratch, scratch, Operand(1));
   2092     __ Movz(new_target, target, scratch);  // if argc == 2
   2093 
   2094     __ sd(undefined_value, MemOperand(sp, 0));  // Overwrite receiver
   2095   }
   2096 
   2097   // ----------- S t a t e -------------
   2098   //  -- a0    : argumentsList
   2099   //  -- a1    : target
   2100   //  -- a3    : new.target
   2101   //  -- sp[0] : receiver (undefined)
   2102   // -----------------------------------
   2103 
   2104   // 2. Make sure the target is actually a constructor.
   2105   Label target_not_constructor;
   2106   __ JumpIfSmi(target, &target_not_constructor);
   2107   __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
   2108   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
   2109   __ And(a4, a4, Operand(1 << Map::kIsConstructor));
   2110   __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
   2111 
   2112   // 3. Make sure the target is actually a constructor.
   2113   Label new_target_not_constructor;
   2114   __ JumpIfSmi(new_target, &new_target_not_constructor);
   2115   __ ld(a4, FieldMemOperand(new_target, HeapObject::kMapOffset));
   2116   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
   2117   __ And(a4, a4, Operand(1 << Map::kIsConstructor));
   2118   __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
   2119 
   2120   // 4a. Construct the target with the given new.target and argumentsList.
   2121   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   2122 
   2123   // 4b. The target is not a constructor, throw an appropriate TypeError.
   2124   __ bind(&target_not_constructor);
   2125   {
   2126     __ sd(target, MemOperand(sp));
   2127     __ TailCallRuntime(Runtime::kThrowNotConstructor);
   2128   }
   2129 
   2130   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   2131   __ bind(&new_target_not_constructor);
   2132   {
   2133     __ sd(new_target, MemOperand(sp));
   2134     __ TailCallRuntime(Runtime::kThrowNotConstructor);
   2135   }
   2136 }
   2137 
   2138 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2139   // __ sll(a0, a0, kSmiTagSize);
   2140   __ dsll32(a0, a0, 0);
   2141   __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   2142   __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
   2143   __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   2144                            kPointerSize));
   2145 }
   2146 
   2147 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2148   // ----------- S t a t e -------------
   2149   //  -- v0 : result being passed through
   2150   // -----------------------------------
   2151   // Get the number of arguments passed (as a smi), tear down the frame and
   2152   // then tear down the parameters.
   2153   __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   2154                              kPointerSize)));
   2155   __ mov(sp, fp);
   2156   __ MultiPop(fp.bit() | ra.bit());
   2157   __ SmiScale(a4, a1, kPointerSizeLog2);
   2158   __ Daddu(sp, sp, a4);
   2159   // Adjust for the receiver.
   2160   __ Daddu(sp, sp, Operand(kPointerSize));
   2161 }
   2162 
   2163 // static
   2164 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2165   // ----------- S t a t e -------------
   2166   //  -- a0    : argumentsList
   2167   //  -- a1    : target
   2168   //  -- a3    : new.target (checked to be constructor or undefined)
   2169   //  -- sp[0] : thisArgument
   2170   // -----------------------------------
   2171 
   2172   Register arguments_list = a0;
   2173   Register target = a1;
   2174   Register new_target = a3;
   2175 
   2176   Register args = a0;
   2177   Register len = a2;
   2178 
   2179   // Create the list of arguments from the array-like argumentsList.
   2180   {
   2181     Label create_arguments, create_array, create_holey_array, create_runtime,
   2182         done_create;
   2183     __ JumpIfSmi(arguments_list, &create_runtime);
   2184 
   2185     // Load the map of argumentsList into a2.
   2186     Register arguments_list_map = a2;
   2187     __ ld(arguments_list_map,
   2188           FieldMemOperand(arguments_list, HeapObject::kMapOffset));
   2189 
   2190     // Load native context into a4.
   2191     Register native_context = a4;
   2192     __ ld(native_context, NativeContextMemOperand());
   2193 
   2194     // Check if argumentsList is an (unmodified) arguments object.
   2195     __ ld(at, ContextMemOperand(native_context,
   2196                                 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2197     __ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
   2198     __ ld(at, ContextMemOperand(native_context,
   2199                                 Context::STRICT_ARGUMENTS_MAP_INDEX));
   2200     __ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
   2201 
   2202     // Check if argumentsList is a fast JSArray.
   2203     __ lbu(v0, FieldMemOperand(a2, Map::kInstanceTypeOffset));
   2204     __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
   2205 
   2206     // Ask the runtime to create the list (actually a FixedArray).
   2207     __ bind(&create_runtime);
   2208     {
   2209       FrameScope scope(masm, StackFrame::INTERNAL);
   2210       __ Push(target, new_target, arguments_list);
   2211       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2212       __ mov(arguments_list, v0);
   2213       __ Pop(target, new_target);
   2214       __ lw(len, UntagSmiFieldMemOperand(v0, FixedArray::kLengthOffset));
   2215     }
   2216     __ Branch(&done_create);
   2217 
   2218     // Try to create the list from an arguments object.
   2219     __ bind(&create_arguments);
   2220     __ lw(len, UntagSmiFieldMemOperand(arguments_list,
   2221                                        JSArgumentsObject::kLengthOffset));
   2222     __ ld(a4, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
   2223     __ lw(at, UntagSmiFieldMemOperand(a4, FixedArray::kLengthOffset));
   2224     __ Branch(&create_runtime, ne, len, Operand(at));
   2225     __ mov(args, a4);
   2226 
   2227     __ Branch(&done_create);
   2228 
   2229     // For holey JSArrays we need to check that the array prototype chain
   2230     // protector is intact and our prototype is the Array.prototype actually.
   2231     __ bind(&create_holey_array);
   2232     __ ld(a2, FieldMemOperand(a2, Map::kPrototypeOffset));
   2233     __ ld(at, ContextMemOperand(native_context,
   2234                                 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
   2235     __ Branch(&create_runtime, ne, a2, Operand(at));
   2236     __ LoadRoot(at, Heap::kArrayProtectorRootIndex);
   2237     __ lw(a2, FieldMemOperand(at, PropertyCell::kValueOffset));
   2238     __ Branch(&create_runtime, ne, a2,
   2239               Operand(Smi::FromInt(Isolate::kProtectorValid)));
   2240     __ lw(a2, UntagSmiFieldMemOperand(a0, JSArray::kLengthOffset));
   2241     __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
   2242     __ Branch(&done_create);
   2243 
   2244     // Try to create the list from a JSArray object.
   2245     __ bind(&create_array);
   2246     __ lbu(t1, FieldMemOperand(a2, Map::kBitField2Offset));
   2247     __ DecodeField<Map::ElementsKindBits>(t1);
   2248     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2249     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2250     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
   2251     __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_SMI_ELEMENTS));
   2252     __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_ELEMENTS));
   2253     __ Branch(&create_runtime, hi, t1, Operand(FAST_ELEMENTS));
   2254     __ lw(a2, UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
   2255     __ ld(a0, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
   2256 
   2257     __ bind(&done_create);
   2258   }
   2259 
   2260   // Check for stack overflow.
   2261   {
   2262     // Check the stack for overflow. We are not trying to catch interruptions
   2263     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2264     Label done;
   2265     __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
   2266     // Make ip the space we have left. The stack might already be overflowed
   2267     // here which will cause ip to become negative.
   2268     __ Dsubu(a4, sp, a4);
   2269     // Check if the arguments will overflow the stack.
   2270     __ dsll(at, len, kPointerSizeLog2);
   2271     __ Branch(&done, gt, a4, Operand(at));  // Signed comparison.
   2272     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2273     __ bind(&done);
   2274   }
   2275 
   2276   // ----------- S t a t e -------------
   2277   //  -- a1    : target
   2278   //  -- a0    : args (a FixedArray built from argumentsList)
   2279   //  -- a2    : len (number of elements to push from args)
   2280   //  -- a3    : new.target (checked to be constructor or undefined)
   2281   //  -- sp[0] : thisArgument
   2282   // -----------------------------------
   2283 
   2284   // Push arguments onto the stack (thisArgument is already on the stack).
   2285   {
   2286     Label done, push, loop;
   2287     Register src = a4;
   2288     Register scratch = len;
   2289 
   2290     __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
   2291     __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
   2292     __ mov(a0, len);  // The 'len' argument for Call() or Construct().
   2293     __ dsll(scratch, len, kPointerSizeLog2);
   2294     __ Dsubu(scratch, sp, Operand(scratch));
   2295     __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
   2296     __ bind(&loop);
   2297     __ ld(a5, MemOperand(src));
   2298     __ Branch(&push, ne, a5, Operand(t1));
   2299     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
   2300     __ bind(&push);
   2301     __ daddiu(src, src, kPointerSize);
   2302     __ Push(a5);
   2303     __ Branch(&loop, ne, scratch, Operand(sp));
   2304     __ bind(&done);
   2305   }
   2306 
   2307   // ----------- S t a t e -------------
   2308   //  -- a0             : argument count (len)
   2309   //  -- a1             : target
   2310   //  -- a3             : new.target (checked to be constructor or undefinded)
   2311   //  -- sp[0]          : args[len-1]
   2312   //  -- sp[8]          : args[len-2]
   2313   //     ...            : ...
   2314   //  -- sp[8*(len-2)]  : args[1]
   2315   //  -- sp[8*(len-1)]  : args[0]
   2316   //  ----------------------------------
   2317 
   2318   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2319   {
   2320     Label construct;
   2321     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   2322     __ Branch(&construct, ne, a3, Operand(at));
   2323     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2324     __ bind(&construct);
   2325     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2326   }
   2327 }
   2328 
   2329 // static
   2330 void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
   2331                                            Handle<Code> code) {
   2332   // ----------- S t a t e -------------
   2333   //  -- a1    : the target to call (can be any Object)
   2334   //  -- a2    : start index (to support rest parameters)
   2335   //  -- ra    : return address.
   2336   //  -- sp[0] : thisArgument
   2337   // -----------------------------------
   2338 
   2339   // Check if we have an arguments adaptor frame below the function frame.
   2340   Label arguments_adaptor, arguments_done;
   2341   __ ld(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2342   __ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
   2343   __ Branch(&arguments_adaptor, eq, a0,
   2344             Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   2345   {
   2346     __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   2347     __ ld(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
   2348     __ lw(a0,
   2349           FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
   2350     __ mov(a3, fp);
   2351   }
   2352   __ Branch(&arguments_done);
   2353   __ bind(&arguments_adaptor);
   2354   {
   2355     // Just get the length from the ArgumentsAdaptorFrame.
   2356     __ lw(a0, UntagSmiMemOperand(
   2357                   a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2358   }
   2359   __ bind(&arguments_done);
   2360 
   2361   Label stack_empty, stack_done, stack_overflow;
   2362   __ Subu(a0, a0, a2);
   2363   __ Branch(&stack_empty, le, a0, Operand(zero_reg));
   2364   {
   2365     // Check for stack overflow.
   2366     Generate_StackOverflowCheck(masm, a0, a4, a5, &stack_overflow);
   2367 
   2368     // Forward the arguments from the caller frame.
   2369     {
   2370       Label loop;
   2371       __ mov(a2, a0);
   2372       __ bind(&loop);
   2373       {
   2374         __ Dlsa(at, a3, a2, kPointerSizeLog2);
   2375         __ ld(at, MemOperand(at, 1 * kPointerSize));
   2376         __ push(at);
   2377         __ Subu(a2, a2, Operand(1));
   2378         __ Branch(&loop, ne, a2, Operand(zero_reg));
   2379       }
   2380     }
   2381   }
   2382   __ Branch(&stack_done);
   2383   __ bind(&stack_overflow);
   2384   __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2385   __ bind(&stack_empty);
   2386   {
   2387     // We just pass the receiver, which is already on the stack.
   2388     __ mov(a0, zero_reg);
   2389   }
   2390   __ bind(&stack_done);
   2391 
   2392   __ Jump(code, RelocInfo::CODE_TARGET);
   2393 }
   2394 
   2395 namespace {
   2396 
   2397 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2398 // present) preserving all the arguments prepared for current call.
   2399 // Does nothing if debugger is currently active.
   2400 // ES6 14.6.3. PrepareForTailCall
   2401 //
   2402 // Stack structure for the function g() tail calling f():
   2403 //
   2404 // ------- Caller frame: -------
   2405 // |  ...
   2406 // |  g()'s arg M
   2407 // |  ...
   2408 // |  g()'s arg 1
   2409 // |  g()'s receiver arg
   2410 // |  g()'s caller pc
   2411 // ------- g()'s frame: -------
   2412 // |  g()'s caller fp      <- fp
   2413 // |  g()'s context
   2414 // |  function pointer: g
   2415 // |  -------------------------
   2416 // |  ...
   2417 // |  ...
   2418 // |  f()'s arg N
   2419 // |  ...
   2420 // |  f()'s arg 1
   2421 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
   2422 // ----------------------
   2423 //
   2424 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2425                         Register scratch1, Register scratch2,
   2426                         Register scratch3) {
   2427   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2428   Comment cmnt(masm, "[ PrepareForTailCall");
   2429 
   2430   // Prepare for tail call only if ES2015 tail call elimination is enabled.
   2431   Label done;
   2432   ExternalReference is_tail_call_elimination_enabled =
   2433       ExternalReference::is_tail_call_elimination_enabled_address(
   2434           masm->isolate());
   2435   __ li(at, Operand(is_tail_call_elimination_enabled));
   2436   __ lb(scratch1, MemOperand(at));
   2437   __ Branch(&done, eq, scratch1, Operand(zero_reg));
   2438 
   2439   // Drop possible interpreter handler/stub frame.
   2440   {
   2441     Label no_interpreter_frame;
   2442     __ ld(scratch3,
   2443           MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
   2444     __ Branch(&no_interpreter_frame, ne, scratch3,
   2445               Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
   2446     __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2447     __ bind(&no_interpreter_frame);
   2448   }
   2449 
   2450   // Check if next frame is an arguments adaptor frame.
   2451   Register caller_args_count_reg = scratch1;
   2452   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2453   __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2454   __ ld(scratch3,
   2455         MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
   2456   __ Branch(&no_arguments_adaptor, ne, scratch3,
   2457             Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   2458 
   2459   // Drop current frame and load arguments count from arguments adaptor frame.
   2460   __ mov(fp, scratch2);
   2461   __ lw(caller_args_count_reg,
   2462         UntagSmiMemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2463   __ Branch(&formal_parameter_count_loaded);
   2464 
   2465   __ bind(&no_arguments_adaptor);
   2466   // Load caller's formal parameter count
   2467   __ ld(scratch1,
   2468         MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
   2469   __ ld(scratch1,
   2470         FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2471   __ lw(caller_args_count_reg,
   2472         FieldMemOperand(scratch1,
   2473                         SharedFunctionInfo::kFormalParameterCountOffset));
   2474 
   2475   __ bind(&formal_parameter_count_loaded);
   2476 
   2477   ParameterCount callee_args_count(args_reg);
   2478   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2479                         scratch3);
   2480   __ bind(&done);
   2481 }
   2482 }  // namespace
   2483 
   2484 // static
   2485 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2486                                      ConvertReceiverMode mode,
   2487                                      TailCallMode tail_call_mode) {
   2488   // ----------- S t a t e -------------
   2489   //  -- a0 : the number of arguments (not including the receiver)
   2490   //  -- a1 : the function to call (checked to be a JSFunction)
   2491   // -----------------------------------
   2492   __ AssertFunction(a1);
   2493 
   2494   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2495   // Check that function is not a "classConstructor".
   2496   Label class_constructor;
   2497   __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   2498   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
   2499   __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
   2500   __ Branch(&class_constructor, ne, at, Operand(zero_reg));
   2501 
   2502   // Enter the context of the function; ToObject has to run in the function
   2503   // context, and we also need to take the global proxy from the function
   2504   // context in case of conversion.
   2505   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
   2506                 SharedFunctionInfo::kStrictModeByteOffset);
   2507   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
   2508   // We need to convert the receiver for non-native sloppy mode functions.
   2509   Label done_convert;
   2510   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
   2511   __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
   2512                          (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
   2513   __ Branch(&done_convert, ne, at, Operand(zero_reg));
   2514   {
   2515     // ----------- S t a t e -------------
   2516     //  -- a0 : the number of arguments (not including the receiver)
   2517     //  -- a1 : the function to call (checked to be a JSFunction)
   2518     //  -- a2 : the shared function info.
   2519     //  -- cp : the function context.
   2520     // -----------------------------------
   2521 
   2522     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2523       // Patch receiver to global proxy.
   2524       __ LoadGlobalProxy(a3);
   2525     } else {
   2526       Label convert_to_object, convert_receiver;
   2527       __ Dlsa(at, sp, a0, kPointerSizeLog2);
   2528       __ ld(a3, MemOperand(at));
   2529       __ JumpIfSmi(a3, &convert_to_object);
   2530       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2531       __ GetObjectType(a3, a4, a4);
   2532       __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
   2533       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2534         Label convert_global_proxy;
   2535         __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
   2536                       &convert_global_proxy);
   2537         __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
   2538         __ bind(&convert_global_proxy);
   2539         {
   2540           // Patch receiver to global proxy.
   2541           __ LoadGlobalProxy(a3);
   2542         }
   2543         __ Branch(&convert_receiver);
   2544       }
   2545       __ bind(&convert_to_object);
   2546       {
   2547         // Convert receiver using ToObject.
   2548         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2549         // in the fast case? (fall back to AllocateInNewSpace?)
   2550         FrameScope scope(masm, StackFrame::INTERNAL);
   2551         __ SmiTag(a0);
   2552         __ Push(a0, a1);
   2553         __ mov(a0, a3);
   2554         __ Push(cp);
   2555         __ Call(masm->isolate()->builtins()->ToObject(),
   2556                 RelocInfo::CODE_TARGET);
   2557         __ Pop(cp);
   2558         __ mov(a3, v0);
   2559         __ Pop(a0, a1);
   2560         __ SmiUntag(a0);
   2561       }
   2562       __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   2563       __ bind(&convert_receiver);
   2564     }
   2565     __ Dlsa(at, sp, a0, kPointerSizeLog2);
   2566     __ sd(a3, MemOperand(at));
   2567   }
   2568   __ bind(&done_convert);
   2569 
   2570   // ----------- S t a t e -------------
   2571   //  -- a0 : the number of arguments (not including the receiver)
   2572   //  -- a1 : the function to call (checked to be a JSFunction)
   2573   //  -- a2 : the shared function info.
   2574   //  -- cp : the function context.
   2575   // -----------------------------------
   2576 
   2577   if (tail_call_mode == TailCallMode::kAllow) {
   2578     PrepareForTailCall(masm, a0, t0, t1, t2);
   2579   }
   2580 
   2581   __ lw(a2,
   2582         FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
   2583   ParameterCount actual(a0);
   2584   ParameterCount expected(a2);
   2585   __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
   2586                         CheckDebugStepCallWrapper());
   2587 
   2588   // The function is a "classConstructor", need to raise an exception.
   2589   __ bind(&class_constructor);
   2590   {
   2591     FrameScope frame(masm, StackFrame::INTERNAL);
   2592     __ Push(a1);
   2593     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2594   }
   2595 }
   2596 
   2597 // static
   2598 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2599                                               TailCallMode tail_call_mode) {
   2600   // ----------- S t a t e -------------
   2601   //  -- a0 : the number of arguments (not including the receiver)
   2602   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2603   // -----------------------------------
   2604   __ AssertBoundFunction(a1);
   2605 
   2606   if (tail_call_mode == TailCallMode::kAllow) {
   2607     PrepareForTailCall(masm, a0, t0, t1, t2);
   2608   }
   2609 
   2610   // Patch the receiver to [[BoundThis]].
   2611   {
   2612     __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
   2613     __ Dlsa(a4, sp, a0, kPointerSizeLog2);
   2614     __ sd(at, MemOperand(a4));
   2615   }
   2616 
   2617   // Load [[BoundArguments]] into a2 and length of that into a4.
   2618   __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
   2619   __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
   2620 
   2621   // ----------- S t a t e -------------
   2622   //  -- a0 : the number of arguments (not including the receiver)
   2623   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2624   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
   2625   //  -- a4 : the number of [[BoundArguments]]
   2626   // -----------------------------------
   2627 
   2628   // Reserve stack space for the [[BoundArguments]].
   2629   {
   2630     Label done;
   2631     __ dsll(a5, a4, kPointerSizeLog2);
   2632     __ Dsubu(sp, sp, Operand(a5));
   2633     // Check the stack for overflow. We are not trying to catch interruptions
   2634     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2635     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
   2636     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
   2637     // Restore the stack pointer.
   2638     __ Daddu(sp, sp, Operand(a5));
   2639     {
   2640       FrameScope scope(masm, StackFrame::MANUAL);
   2641       __ EnterFrame(StackFrame::INTERNAL);
   2642       __ CallRuntime(Runtime::kThrowStackOverflow);
   2643     }
   2644     __ bind(&done);
   2645   }
   2646 
   2647   // Relocate arguments down the stack.
   2648   {
   2649     Label loop, done_loop;
   2650     __ mov(a5, zero_reg);
   2651     __ bind(&loop);
   2652     __ Branch(&done_loop, gt, a5, Operand(a0));
   2653     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
   2654     __ ld(at, MemOperand(a6));
   2655     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
   2656     __ sd(at, MemOperand(a6));
   2657     __ Daddu(a4, a4, Operand(1));
   2658     __ Daddu(a5, a5, Operand(1));
   2659     __ Branch(&loop);
   2660     __ bind(&done_loop);
   2661   }
   2662 
   2663   // Copy [[BoundArguments]] to the stack (below the arguments).
   2664   {
   2665     Label loop, done_loop;
   2666     __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
   2667     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2668     __ bind(&loop);
   2669     __ Dsubu(a4, a4, Operand(1));
   2670     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
   2671     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
   2672     __ ld(at, MemOperand(a5));
   2673     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
   2674     __ sd(at, MemOperand(a5));
   2675     __ Daddu(a0, a0, Operand(1));
   2676     __ Branch(&loop);
   2677     __ bind(&done_loop);
   2678   }
   2679 
   2680   // Call the [[BoundTargetFunction]] via the Call builtin.
   2681   __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2682   __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
   2683                                       masm->isolate())));
   2684   __ ld(at, MemOperand(at));
   2685   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
   2686   __ Jump(at);
   2687 }
   2688 
   2689 // static
   2690 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2691                              TailCallMode tail_call_mode) {
   2692   // ----------- S t a t e -------------
   2693   //  -- a0 : the number of arguments (not including the receiver)
   2694   //  -- a1 : the target to call (can be any Object).
   2695   // -----------------------------------
   2696 
   2697   Label non_callable, non_function, non_smi;
   2698   __ JumpIfSmi(a1, &non_callable);
   2699   __ bind(&non_smi);
   2700   __ GetObjectType(a1, t1, t2);
   2701   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2702           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
   2703   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2704           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
   2705 
   2706   // Check if target has a [[Call]] internal method.
   2707   __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
   2708   __ And(t1, t1, Operand(1 << Map::kIsCallable));
   2709   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
   2710 
   2711   __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
   2712 
   2713   // 0. Prepare for tail call if necessary.
   2714   if (tail_call_mode == TailCallMode::kAllow) {
   2715     PrepareForTailCall(masm, a0, t0, t1, t2);
   2716   }
   2717 
   2718   // 1. Runtime fallback for Proxy [[Call]].
   2719   __ Push(a1);
   2720   // Increase the arguments size to include the pushed function and the
   2721   // existing receiver on the stack.
   2722   __ Daddu(a0, a0, 2);
   2723   // Tail-call to the runtime.
   2724   __ JumpToExternalReference(
   2725       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2726 
   2727   // 2. Call to something else, which might have a [[Call]] internal method (if
   2728   // not we raise an exception).
   2729   __ bind(&non_function);
   2730   // Overwrite the original receiver with the (original) target.
   2731   __ Dlsa(at, sp, a0, kPointerSizeLog2);
   2732   __ sd(a1, MemOperand(at));
   2733   // Let the "call_as_function_delegate" take care of the rest.
   2734   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
   2735   __ Jump(masm->isolate()->builtins()->CallFunction(
   2736               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2737           RelocInfo::CODE_TARGET);
   2738 
   2739   // 3. Call to something that is not callable.
   2740   __ bind(&non_callable);
   2741   {
   2742     FrameScope scope(masm, StackFrame::INTERNAL);
   2743     __ Push(a1);
   2744     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2745   }
   2746 }
   2747 
   2748 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
   2749   Register argc = a0;
   2750   Register constructor = a1;
   2751   Register new_target = a3;
   2752 
   2753   Register scratch = t0;
   2754   Register scratch2 = t1;
   2755 
   2756   Register spread = a2;
   2757   Register spread_map = a4;
   2758 
   2759   Register spread_len = a4;
   2760 
   2761   Register native_context = a5;
   2762 
   2763   Label runtime_call, push_args;
   2764   __ ld(spread, MemOperand(sp, 0));
   2765   __ JumpIfSmi(spread, &runtime_call);
   2766   __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
   2767   __ ld(native_context, NativeContextMemOperand());
   2768 
   2769   // Check that the spread is an array.
   2770   __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
   2771   __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
   2772 
   2773   // Check that we have the original ArrayPrototype.
   2774   __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
   2775   __ ld(scratch2, ContextMemOperand(native_context,
   2776                                     Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
   2777   __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
   2778 
   2779   // Check that the ArrayPrototype hasn't been modified in a way that would
   2780   // affect iteration.
   2781   __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
   2782   __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
   2783   __ Branch(&runtime_call, ne, scratch,
   2784             Operand(Smi::FromInt(Isolate::kProtectorValid)));
   2785 
   2786   // Check that the map of the initial array iterator hasn't changed.
   2787   __ ld(scratch,
   2788         ContextMemOperand(native_context,
   2789                           Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
   2790   __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
   2791   __ ld(scratch2,
   2792         ContextMemOperand(native_context,
   2793                           Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
   2794   __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
   2795 
   2796   // For FastPacked kinds, iteration will have the same effect as simply
   2797   // accessing each property in order.
   2798   Label no_protector_check;
   2799   __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
   2800   __ DecodeField<Map::ElementsKindBits>(scratch);
   2801   __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
   2802   // For non-FastHoley kinds, we can skip the protector check.
   2803   __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
   2804   __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
   2805   // Check the ArrayProtector cell.
   2806   __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
   2807   __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
   2808   __ Branch(&runtime_call, ne, scratch,
   2809             Operand(Smi::FromInt(Isolate::kProtectorValid)));
   2810 
   2811   __ bind(&no_protector_check);
   2812   // Load the FixedArray backing store, but use the length from the array.
   2813   __ lw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset));
   2814   __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
   2815   __ Branch(&push_args);
   2816 
   2817   __ bind(&runtime_call);
   2818   {
   2819     // Call the builtin for the result of the spread.
   2820     FrameScope scope(masm, StackFrame::INTERNAL);
   2821     __ SmiTag(argc);
   2822     __ Push(constructor, new_target, argc, spread);
   2823     __ CallRuntime(Runtime::kSpreadIterableFixed);
   2824     __ mov(spread, v0);
   2825     __ Pop(constructor, new_target, argc);
   2826     __ SmiUntag(argc);
   2827   }
   2828 
   2829   {
   2830     // Calculate the new nargs including the result of the spread.
   2831     __ lw(spread_len,
   2832           UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
   2833 
   2834     __ bind(&push_args);
   2835     // argc += spread_len - 1. Subtract 1 for the spread itself.
   2836     __ Daddu(argc, argc, spread_len);
   2837     __ Dsubu(argc, argc, Operand(1));
   2838 
   2839     // Pop the spread argument off the stack.
   2840     __ Pop(scratch);
   2841   }
   2842 
   2843   // Check for stack overflow.
   2844   {
   2845     // Check the stack for overflow. We are not trying to catch interruptions
   2846     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2847     Label done;
   2848     __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
   2849     // Make scratch the space we have left. The stack might already be
   2850     // overflowed here which will cause ip to become negative.
   2851     __ Dsubu(scratch, sp, scratch);
   2852     // Check if the arguments will overflow the stack.
   2853     __ dsll(at, spread_len, kPointerSizeLog2);
   2854     __ Branch(&done, gt, scratch, Operand(at));  // Signed comparison.
   2855     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2856     __ bind(&done);
   2857   }
   2858 
   2859   // Put the evaluated spread onto the stack as additional arguments.
   2860   {
   2861     __ mov(scratch, zero_reg);
   2862     Label done, push, loop;
   2863     __ bind(&loop);
   2864     __ Branch(&done, eq, scratch, Operand(spread_len));
   2865     __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2);
   2866     __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
   2867     __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
   2868     __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
   2869     __ bind(&push);
   2870     __ Push(scratch2);
   2871     __ Daddu(scratch, scratch, Operand(1));
   2872     __ Branch(&loop);
   2873     __ bind(&done);
   2874   }
   2875 }
   2876 
   2877 // static
   2878 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
   2879   // ----------- S t a t e -------------
   2880   //  -- a0 : the number of arguments (not including the receiver)
   2881   //  -- a1 : the target to call (can be any Object).
   2882   // -----------------------------------
   2883 
   2884   // CheckSpreadAndPushToStack will push a3 to save it.
   2885   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
   2886   CheckSpreadAndPushToStack(masm);
   2887   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   2888                                             TailCallMode::kDisallow),
   2889           RelocInfo::CODE_TARGET);
   2890 }
   2891 
   2892 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2893   // ----------- S t a t e -------------
   2894   //  -- a0 : the number of arguments (not including the receiver)
   2895   //  -- a1 : the constructor to call (checked to be a JSFunction)
   2896   //  -- a3 : the new target (checked to be a constructor)
   2897   // -----------------------------------
   2898   __ AssertFunction(a1);
   2899 
   2900   // Calling convention for function specific ConstructStubs require
   2901   // a2 to contain either an AllocationSite or undefined.
   2902   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   2903 
   2904   // Tail call to the function-specific construct stub (still in the caller
   2905   // context at this point).
   2906   __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   2907   __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
   2908   __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
   2909   __ Jump(at);
   2910 }
   2911 
   2912 // static
   2913 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2914   // ----------- S t a t e -------------
   2915   //  -- a0 : the number of arguments (not including the receiver)
   2916   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2917   //  -- a3 : the new target (checked to be a constructor)
   2918   // -----------------------------------
   2919   __ AssertBoundFunction(a1);
   2920 
   2921   // Load [[BoundArguments]] into a2 and length of that into a4.
   2922   __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
   2923   __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
   2924 
   2925   // ----------- S t a t e -------------
   2926   //  -- a0 : the number of arguments (not including the receiver)
   2927   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2928   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
   2929   //  -- a3 : the new target (checked to be a constructor)
   2930   //  -- a4 : the number of [[BoundArguments]]
   2931   // -----------------------------------
   2932 
   2933   // Reserve stack space for the [[BoundArguments]].
   2934   {
   2935     Label done;
   2936     __ dsll(a5, a4, kPointerSizeLog2);
   2937     __ Dsubu(sp, sp, Operand(a5));
   2938     // Check the stack for overflow. We are not trying to catch interruptions
   2939     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2940     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
   2941     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
   2942     // Restore the stack pointer.
   2943     __ Daddu(sp, sp, Operand(a5));
   2944     {
   2945       FrameScope scope(masm, StackFrame::MANUAL);
   2946       __ EnterFrame(StackFrame::INTERNAL);
   2947       __ CallRuntime(Runtime::kThrowStackOverflow);
   2948     }
   2949     __ bind(&done);
   2950   }
   2951 
   2952   // Relocate arguments down the stack.
   2953   {
   2954     Label loop, done_loop;
   2955     __ mov(a5, zero_reg);
   2956     __ bind(&loop);
   2957     __ Branch(&done_loop, ge, a5, Operand(a0));
   2958     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
   2959     __ ld(at, MemOperand(a6));
   2960     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
   2961     __ sd(at, MemOperand(a6));
   2962     __ Daddu(a4, a4, Operand(1));
   2963     __ Daddu(a5, a5, Operand(1));
   2964     __ Branch(&loop);
   2965     __ bind(&done_loop);
   2966   }
   2967 
   2968   // Copy [[BoundArguments]] to the stack (below the arguments).
   2969   {
   2970     Label loop, done_loop;
   2971     __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
   2972     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2973     __ bind(&loop);
   2974     __ Dsubu(a4, a4, Operand(1));
   2975     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
   2976     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
   2977     __ ld(at, MemOperand(a5));
   2978     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
   2979     __ sd(at, MemOperand(a5));
   2980     __ Daddu(a0, a0, Operand(1));
   2981     __ Branch(&loop);
   2982     __ bind(&done_loop);
   2983   }
   2984 
   2985   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2986   {
   2987     Label skip_load;
   2988     __ Branch(&skip_load, ne, a1, Operand(a3));
   2989     __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2990     __ bind(&skip_load);
   2991   }
   2992 
   2993   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2994   __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2995   __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
   2996   __ ld(at, MemOperand(at));
   2997   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
   2998   __ Jump(at);
   2999 }
   3000 
   3001 // static
   3002 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   3003   // ----------- S t a t e -------------
   3004   //  -- a0 : the number of arguments (not including the receiver)
   3005   //  -- a1 : the constructor to call (checked to be a JSProxy)
   3006   //  -- a3 : the new target (either the same as the constructor or
   3007   //          the JSFunction on which new was invoked initially)
   3008   // -----------------------------------
   3009 
   3010   // Call into the Runtime for Proxy [[Construct]].
   3011   __ Push(a1, a3);
   3012   // Include the pushed new_target, constructor and the receiver.
   3013   __ Daddu(a0, a0, Operand(3));
   3014   // Tail-call to the runtime.
   3015   __ JumpToExternalReference(
   3016       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   3017 }
   3018 
   3019 // static
   3020 void Builtins::Generate_Construct(MacroAssembler* masm) {
   3021   // ----------- S t a t e -------------
   3022   //  -- a0 : the number of arguments (not including the receiver)
   3023   //  -- a1 : the constructor to call (can be any Object)
   3024   //  -- a3 : the new target (either the same as the constructor or
   3025   //          the JSFunction on which new was invoked initially)
   3026   // -----------------------------------
   3027 
   3028   // Check if target is a Smi.
   3029   Label non_constructor;
   3030   __ JumpIfSmi(a1, &non_constructor);
   3031 
   3032   // Dispatch based on instance type.
   3033   __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
   3034   __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
   3035   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
   3036           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
   3037 
   3038   // Check if target has a [[Construct]] internal method.
   3039   __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
   3040   __ And(t3, t3, Operand(1 << Map::kIsConstructor));
   3041   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
   3042 
   3043   // Only dispatch to bound functions after checking whether they are
   3044   // constructors.
   3045   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
   3046           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
   3047 
   3048   // Only dispatch to proxies after checking whether they are constructors.
   3049   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
   3050           eq, t2, Operand(JS_PROXY_TYPE));
   3051 
   3052   // Called Construct on an exotic Object with a [[Construct]] internal method.
   3053   {
   3054     // Overwrite the original receiver with the (original) target.
   3055     __ Dlsa(at, sp, a0, kPointerSizeLog2);
   3056     __ sd(a1, MemOperand(at));
   3057     // Let the "call_as_constructor_delegate" take care of the rest.
   3058     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
   3059     __ Jump(masm->isolate()->builtins()->CallFunction(),
   3060             RelocInfo::CODE_TARGET);
   3061   }
   3062 
   3063   // Called Construct on an Object that doesn't have a [[Construct]] internal
   3064   // method.
   3065   __ bind(&non_constructor);
   3066   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   3067           RelocInfo::CODE_TARGET);
   3068 }
   3069 
   3070 // static
   3071 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
   3072   // ----------- S t a t e -------------
   3073   //  -- a0 : the number of arguments (not including the receiver)
   3074   //  -- a1 : the constructor to call (can be any Object)
   3075   //  -- a3 : the new target (either the same as the constructor or
   3076   //          the JSFunction on which new was invoked initially)
   3077   // -----------------------------------
   3078 
   3079   CheckSpreadAndPushToStack(masm);
   3080   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   3081 }
   3082 
   3083 // static
   3084 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   3085   // ----------- S t a t e -------------
   3086   //  -- a0 : requested object size (untagged)
   3087   //  -- ra : return address
   3088   // -----------------------------------
   3089   __ SmiTag(a0);
   3090   __ Push(a0);
   3091   __ Move(cp, Smi::kZero);
   3092   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   3093 }
   3094 
   3095 // static
   3096 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   3097   // ----------- S t a t e -------------
   3098   //  -- a0 : requested object size (untagged)
   3099   //  -- ra : return address
   3100   // -----------------------------------
   3101   __ SmiTag(a0);
   3102   __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   3103   __ Push(a0, a1);
   3104   __ Move(cp, Smi::kZero);
   3105   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   3106 }
   3107 
   3108 // static
   3109 void Builtins::Generate_Abort(MacroAssembler* masm) {
   3110   // ----------- S t a t e -------------
   3111   //  -- a0 : message_id as Smi
   3112   //  -- ra : return address
   3113   // -----------------------------------
   3114   __ Push(a0);
   3115   __ Move(cp, Smi::kZero);
   3116   __ TailCallRuntime(Runtime::kAbort);
   3117 }
   3118 
   3119 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   3120   // State setup as expected by MacroAssembler::InvokePrologue.
   3121   // ----------- S t a t e -------------
   3122   //  -- a0: actual arguments count
   3123   //  -- a1: function (passed through to callee)
   3124   //  -- a2: expected arguments count
   3125   //  -- a3: new target (passed through to callee)
   3126   // -----------------------------------
   3127 
   3128   Label invoke, dont_adapt_arguments, stack_overflow;
   3129 
   3130   Label enough, too_few;
   3131   __ Branch(&dont_adapt_arguments, eq, a2,
   3132             Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   3133   // We use Uless as the number of argument should always be greater than 0.
   3134   __ Branch(&too_few, Uless, a0, Operand(a2));
   3135 
   3136   {  // Enough parameters: actual >= expected.
   3137     // a0: actual number of arguments as a smi
   3138     // a1: function
   3139     // a2: expected number of arguments
   3140     // a3: new target (passed through to callee)
   3141     __ bind(&enough);
   3142     EnterArgumentsAdaptorFrame(masm);
   3143     Generate_StackOverflowCheck(masm, a2, a5, at, &stack_overflow);
   3144 
   3145     // Calculate copy start address into a0 and copy end address into a4.
   3146     __ SmiScale(a0, a0, kPointerSizeLog2);
   3147     __ Daddu(a0, fp, a0);
   3148     // Adjust for return address and receiver.
   3149     __ Daddu(a0, a0, Operand(2 * kPointerSize));
   3150     // Compute copy end address.
   3151     __ dsll(a4, a2, kPointerSizeLog2);
   3152     __ dsubu(a4, a0, a4);
   3153 
   3154     // Copy the arguments (including the receiver) to the new stack frame.
   3155     // a0: copy start address
   3156     // a1: function
   3157     // a2: expected number of arguments
   3158     // a3: new target (passed through to callee)
   3159     // a4: copy end address
   3160 
   3161     Label copy;
   3162     __ bind(&copy);
   3163     __ ld(a5, MemOperand(a0));
   3164     __ push(a5);
   3165     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
   3166     __ daddiu(a0, a0, -kPointerSize);  // In delay slot.
   3167 
   3168     __ jmp(&invoke);
   3169   }
   3170 
   3171   {  // Too few parameters: Actual < expected.
   3172     __ bind(&too_few);
   3173     EnterArgumentsAdaptorFrame(masm);
   3174     Generate_StackOverflowCheck(masm, a2, a5, at, &stack_overflow);
   3175 
   3176     // Calculate copy start address into a0 and copy end address into a7.
   3177     // a0: actual number of arguments as a smi
   3178     // a1: function
   3179     // a2: expected number of arguments
   3180     // a3: new target (passed through to callee)
   3181     __ SmiScale(a0, a0, kPointerSizeLog2);
   3182     __ Daddu(a0, fp, a0);
   3183     // Adjust for return address and receiver.
   3184     __ Daddu(a0, a0, Operand(2 * kPointerSize));
   3185     // Compute copy end address. Also adjust for return address.
   3186     __ Daddu(a7, fp, kPointerSize);
   3187 
   3188     // Copy the arguments (including the receiver) to the new stack frame.
   3189     // a0: copy start address
   3190     // a1: function
   3191     // a2: expected number of arguments
   3192     // a3: new target (passed through to callee)
   3193     // a7: copy end address
   3194     Label copy;
   3195     __ bind(&copy);
   3196     __ ld(a4, MemOperand(a0));  // Adjusted above for return addr and receiver.
   3197     __ Dsubu(sp, sp, kPointerSize);
   3198     __ Dsubu(a0, a0, kPointerSize);
   3199     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
   3200     __ sd(a4, MemOperand(sp));  // In the delay slot.
   3201 
   3202     // Fill the remaining expected arguments with undefined.
   3203     // a1: function
   3204     // a2: expected number of arguments
   3205     // a3: new target (passed through to callee)
   3206     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
   3207     __ dsll(a6, a2, kPointerSizeLog2);
   3208     __ Dsubu(a4, fp, Operand(a6));
   3209     // Adjust for frame.
   3210     __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   3211                              2 * kPointerSize));
   3212 
   3213     Label fill;
   3214     __ bind(&fill);
   3215     __ Dsubu(sp, sp, kPointerSize);
   3216     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
   3217     __ sd(a5, MemOperand(sp));
   3218   }
   3219 
   3220   // Call the entry point.
   3221   __ bind(&invoke);
   3222   __ mov(a0, a2);
   3223   // a0 : expected number of arguments
   3224   // a1 : function (passed through to callee)
   3225   // a3: new target (passed through to callee)
   3226   __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
   3227   __ Call(a4);
   3228 
   3229   // Store offset of return address for deoptimizer.
   3230   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   3231 
   3232   // Exit frame and return.
   3233   LeaveArgumentsAdaptorFrame(masm);
   3234   __ Ret();
   3235 
   3236   // -------------------------------------------
   3237   // Don't adapt arguments.
   3238   // -------------------------------------------
   3239   __ bind(&dont_adapt_arguments);
   3240   __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
   3241   __ Jump(a4);
   3242 
   3243   __ bind(&stack_overflow);
   3244   {
   3245     FrameScope frame(masm, StackFrame::MANUAL);
   3246     __ CallRuntime(Runtime::kThrowStackOverflow);
   3247     __ break_(0xCC);
   3248   }
   3249 }
   3250 
   3251 #undef __
   3252 
   3253 }  // namespace internal
   3254 }  // namespace v8
   3255 
   3256 #endif  // V8_TARGET_ARCH_MIPS64
   3257