Home | History | Annotate | Download | only in mips64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_MIPS64
      6 
      7 #include "src/code-factory.h"
      8 #include "src/code-stubs.h"
      9 #include "src/counters.h"
     10 #include "src/debug/debug.h"
     11 #include "src/deoptimizer.h"
     12 #include "src/frame-constants.h"
     13 #include "src/frames.h"
     14 #include "src/mips64/constants-mips64.h"
     15 #include "src/objects-inl.h"
     16 #include "src/objects/js-generator.h"
     17 #include "src/runtime/runtime.h"
     18 #include "src/wasm/wasm-objects.h"
     19 
     20 namespace v8 {
     21 namespace internal {
     22 
     23 #define __ ACCESS_MASM(masm)
     24 
     25 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
     26                                 ExitFrameType exit_frame_type) {
     27   __ li(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
     28   if (exit_frame_type == BUILTIN_EXIT) {
     29     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
     30             RelocInfo::CODE_TARGET);
     31   } else {
     32     DCHECK(exit_frame_type == EXIT);
     33     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
     34             RelocInfo::CODE_TARGET);
     35   }
     36 }
     37 
     38 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
     39   // ----------- S t a t e -------------
     40   //  -- a0     : number of arguments
     41   //  -- ra     : return address
     42   //  -- sp[...]: constructor arguments
     43   // -----------------------------------
     44   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     45 
     46   if (FLAG_debug_code) {
     47     // Initial map for the builtin InternalArray functions should be maps.
     48     __ Ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
     49     __ SmiTst(a2, a4);
     50     __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
     51               a4, Operand(zero_reg));
     52     __ GetObjectType(a2, a3, a4);
     53     __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
     54               a4, Operand(MAP_TYPE));
     55   }
     56 
     57   // Run the native code for the InternalArray function called as a normal
     58   // function.
     59   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
     60   __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
     61           RelocInfo::CODE_TARGET);
     62 }
     63 
     64 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
     65                                            Runtime::FunctionId function_id) {
     66   // ----------- S t a t e -------------
     67   //  -- a0 : argument count (preserved for callee)
     68   //  -- a1 : target function (preserved for callee)
     69   //  -- a3 : new target (preserved for callee)
     70   // -----------------------------------
     71   {
     72     FrameScope scope(masm, StackFrame::INTERNAL);
     73     // Push a copy of the function onto the stack.
     74     // Push a copy of the target function and the new target.
     75     __ SmiTag(a0);
     76     __ Push(a0, a1, a3, a1);
     77 
     78     __ CallRuntime(function_id, 1);
     79     // Restore target function and new target.
     80     __ Pop(a0, a1, a3);
     81     __ SmiUntag(a0);
     82   }
     83 
     84   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
     85   __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
     86   __ Jump(a2);
     87 }
     88 
     89 namespace {
     90 
     91 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
     92   // ----------- S t a t e -------------
     93   //  -- a0     : number of arguments
     94   //  -- a1     : constructor function
     95   //  -- a3     : new target
     96   //  -- cp     : context
     97   //  -- ra     : return address
     98   //  -- sp[...]: constructor arguments
     99   // -----------------------------------
    100 
    101   // Enter a construct frame.
    102   {
    103     FrameScope scope(masm, StackFrame::CONSTRUCT);
    104 
    105     // Preserve the incoming parameters on the stack.
    106     __ SmiTag(a0);
    107     __ Push(cp, a0);
    108     __ SmiUntag(a0);
    109 
    110     // The receiver for the builtin/api call.
    111     __ PushRoot(Heap::kTheHoleValueRootIndex);
    112 
    113     // Set up pointer to last argument.
    114     __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    115 
    116     // Copy arguments and receiver to the expression stack.
    117     Label loop, entry;
    118     __ mov(t3, a0);
    119     // ----------- S t a t e -------------
    120     //  --                        a0: number of arguments (untagged)
    121     //  --                        a3: new target
    122     //  --                        t2: pointer to last argument
    123     //  --                        t3: counter
    124     //  --        sp[0*kPointerSize]: the hole (receiver)
    125     //  --        sp[1*kPointerSize]: number of arguments (tagged)
    126     //  --        sp[2*kPointerSize]: context
    127     // -----------------------------------
    128     __ jmp(&entry);
    129     __ bind(&loop);
    130     __ Dlsa(t0, t2, t3, kPointerSizeLog2);
    131     __ Ld(t1, MemOperand(t0));
    132     __ push(t1);
    133     __ bind(&entry);
    134     __ Daddu(t3, t3, Operand(-1));
    135     __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
    136 
    137     // Call the function.
    138     // a0: number of arguments (untagged)
    139     // a1: constructor function
    140     // a3: new target
    141     ParameterCount actual(a0);
    142     __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
    143 
    144     // Restore context from the frame.
    145     __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    146     // Restore smi-tagged arguments count from the frame.
    147     __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
    148     // Leave construct frame.
    149   }
    150 
    151   // Remove caller arguments from the stack and return.
    152   __ SmiScale(a4, a1, kPointerSizeLog2);
    153   __ Daddu(sp, sp, a4);
    154   __ Daddu(sp, sp, kPointerSize);
    155   __ Ret();
    156 }
    157 
    158 }  // namespace
    159 
    160 // The construct stub for ES5 constructor functions and ES6 class constructors.
    161 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    162   // ----------- S t a t e -------------
    163   //  --      a0: number of arguments (untagged)
    164   //  --      a1: constructor function
    165   //  --      a3: new target
    166   //  --      cp: context
    167   //  --      ra: return address
    168   //  -- sp[...]: constructor arguments
    169   // -----------------------------------
    170 
    171   // Enter a construct frame.
    172   {
    173     FrameScope scope(masm, StackFrame::CONSTRUCT);
    174     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
    175 
    176     // Preserve the incoming parameters on the stack.
    177     __ SmiTag(a0);
    178     __ Push(cp, a0, a1);
    179     __ PushRoot(Heap::kTheHoleValueRootIndex);
    180     __ Push(a3);
    181 
    182     // ----------- S t a t e -------------
    183     //  --        sp[0*kPointerSize]: new target
    184     //  --        sp[1*kPointerSize]: padding
    185     //  -- a1 and sp[2*kPointerSize]: constructor function
    186     //  --        sp[3*kPointerSize]: number of arguments (tagged)
    187     //  --        sp[4*kPointerSize]: context
    188     // -----------------------------------
    189 
    190     __ Ld(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
    191     __ lwu(t2, FieldMemOperand(t2, SharedFunctionInfo::kFlagsOffset));
    192     __ And(t2, t2, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
    193     __ Branch(&not_create_implicit_receiver, ne, t2, Operand(zero_reg));
    194 
    195     // If not derived class constructor: Allocate the new receiver object.
    196     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
    197                         t2, t3);
    198     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
    199             RelocInfo::CODE_TARGET);
    200     __ Branch(&post_instantiation_deopt_entry);
    201 
    202     // Else: use TheHoleValue as receiver for constructor call
    203     __ bind(&not_create_implicit_receiver);
    204     __ LoadRoot(v0, Heap::kTheHoleValueRootIndex);
    205 
    206     // ----------- S t a t e -------------
    207     //  --                          v0: receiver
    208     //  -- Slot 4 / sp[0*kPointerSize]: new target
    209     //  -- Slot 3 / sp[1*kPointerSize]: padding
    210     //  -- Slot 2 / sp[2*kPointerSize]: constructor function
    211     //  -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
    212     //  -- Slot 0 / sp[4*kPointerSize]: context
    213     // -----------------------------------
    214     // Deoptimizer enters here.
    215     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
    216         masm->pc_offset());
    217     __ bind(&post_instantiation_deopt_entry);
    218 
    219     // Restore new target.
    220     __ Pop(a3);
    221     // Push the allocated receiver to the stack. We need two copies
    222     // because we may have to return the original one and the calling
    223     // conventions dictate that the called function pops the receiver.
    224     __ Push(v0, v0);
    225 
    226     // ----------- S t a t e -------------
    227     //  --                 r3: new target
    228     //  -- sp[0*kPointerSize]: implicit receiver
    229     //  -- sp[1*kPointerSize]: implicit receiver
    230     //  -- sp[2*kPointerSize]: padding
    231     //  -- sp[3*kPointerSize]: constructor function
    232     //  -- sp[4*kPointerSize]: number of arguments (tagged)
    233     //  -- sp[5*kPointerSize]: context
    234     // -----------------------------------
    235 
    236     // Restore constructor function and argument count.
    237     __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
    238     __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
    239     __ SmiUntag(a0);
    240 
    241     // Set up pointer to last argument.
    242     __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    243 
    244     // Copy arguments and receiver to the expression stack.
    245     Label loop, entry;
    246     __ mov(t3, a0);
    247     // ----------- S t a t e -------------
    248     //  --                        a0: number of arguments (untagged)
    249     //  --                        a3: new target
    250     //  --                        t2: pointer to last argument
    251     //  --                        t3: counter
    252     //  --        sp[0*kPointerSize]: implicit receiver
    253     //  --        sp[1*kPointerSize]: implicit receiver
    254     //  --        sp[2*kPointerSize]: padding
    255     //  -- a1 and sp[3*kPointerSize]: constructor function
    256     //  --        sp[4*kPointerSize]: number of arguments (tagged)
    257     //  --        sp[5*kPointerSize]: context
    258     // -----------------------------------
    259     __ jmp(&entry);
    260     __ bind(&loop);
    261     __ Dlsa(t0, t2, t3, kPointerSizeLog2);
    262     __ Ld(t1, MemOperand(t0));
    263     __ push(t1);
    264     __ bind(&entry);
    265     __ Daddu(t3, t3, Operand(-1));
    266     __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
    267 
    268     // Call the function.
    269     ParameterCount actual(a0);
    270     __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
    271 
    272     // ----------- S t a t e -------------
    273     //  --                 v0: constructor result
    274     //  -- sp[0*kPointerSize]: implicit receiver
    275     //  -- sp[1*kPointerSize]: padding
    276     //  -- sp[2*kPointerSize]: constructor function
    277     //  -- sp[3*kPointerSize]: number of arguments
    278     //  -- sp[4*kPointerSize]: context
    279     // -----------------------------------
    280 
    281     // Store offset of return address for deoptimizer.
    282     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
    283         masm->pc_offset());
    284 
    285     // Restore the context from the frame.
    286     __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    287 
    288     // If the result is an object (in the ECMA sense), we should get rid
    289     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    290     // on page 74.
    291     Label use_receiver, do_throw, leave_frame;
    292 
    293     // If the result is undefined, we jump out to using the implicit receiver.
    294     __ JumpIfRoot(v0, Heap::kUndefinedValueRootIndex, &use_receiver);
    295 
    296     // Otherwise we do a smi check and fall through to check if the return value
    297     // is a valid receiver.
    298 
    299     // If the result is a smi, it is *not* an object in the ECMA sense.
    300     __ JumpIfSmi(v0, &use_receiver);
    301 
    302     // If the type of the result (stored in its map) is less than
    303     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    304     __ GetObjectType(v0, t2, t2);
    305     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    306     __ Branch(&leave_frame, greater_equal, t2, Operand(FIRST_JS_RECEIVER_TYPE));
    307     __ Branch(&use_receiver);
    308 
    309     __ bind(&do_throw);
    310     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
    311 
    312     // Throw away the result of the constructor invocation and use the
    313     // on-stack receiver as the result.
    314     __ bind(&use_receiver);
    315     __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
    316     __ JumpIfRoot(v0, Heap::kTheHoleValueRootIndex, &do_throw);
    317 
    318     __ bind(&leave_frame);
    319     // Restore smi-tagged arguments count from the frame.
    320     __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
    321     // Leave construct frame.
    322   }
    323   // Remove caller arguments from the stack and return.
    324   __ SmiScale(a4, a1, kPointerSizeLog2);
    325   __ Daddu(sp, sp, a4);
    326   __ Daddu(sp, sp, kPointerSize);
    327   __ Ret();
    328 }
    329 
    330 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    331   Generate_JSBuiltinsConstructStubHelper(masm);
    332 }
    333 
    334 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
    335                                           Register sfi_data,
    336                                           Register scratch1) {
    337   Label done;
    338 
    339   __ GetObjectType(sfi_data, scratch1, scratch1);
    340   __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
    341   __ Ld(sfi_data,
    342         FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
    343 
    344   __ bind(&done);
    345 }
    346 
    347 // static
    348 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    349   // ----------- S t a t e -------------
    350   //  -- v0 : the value to pass to the generator
    351   //  -- a1 : the JSGeneratorObject to resume
    352   //  -- ra : return address
    353   // -----------------------------------
    354   __ AssertGeneratorObject(a1);
    355 
    356   // Store input value into generator object.
    357   __ Sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
    358   __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
    359                       kRAHasNotBeenSaved, kDontSaveFPRegs);
    360 
    361   // Load suspended function and context.
    362   __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
    363   __ Ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset));
    364 
    365   // Flood function if we are stepping.
    366   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    367   Label stepping_prepared;
    368   ExternalReference debug_hook =
    369       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
    370   __ li(a5, debug_hook);
    371   __ Lb(a5, MemOperand(a5));
    372   __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
    373 
    374   // Flood function if we need to continue stepping in the suspended generator.
    375   ExternalReference debug_suspended_generator =
    376       ExternalReference::debug_suspended_generator_address(masm->isolate());
    377   __ li(a5, debug_suspended_generator);
    378   __ Ld(a5, MemOperand(a5));
    379   __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
    380   __ bind(&stepping_prepared);
    381 
    382   // Check the stack for overflow. We are not trying to catch interruptions
    383   // (i.e. debug break and preemption) here, so check the "real stack limit".
    384   Label stack_overflow;
    385   __ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
    386   __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
    387 
    388   // Push receiver.
    389   __ Ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
    390   __ Push(a5);
    391 
    392   // ----------- S t a t e -------------
    393   //  -- a1    : the JSGeneratorObject to resume
    394   //  -- a4    : generator function
    395   //  -- cp    : generator context
    396   //  -- ra    : return address
    397   //  -- sp[0] : generator receiver
    398   // -----------------------------------
    399 
    400   // Push holes for arguments to generator function. Since the parser forced
    401   // context allocation for any variables in generators, the actual argument
    402   // values have already been copied into the context and these dummy values
    403   // will never be used.
    404   __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
    405   __ Lhu(a3,
    406          FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
    407   __ Ld(t1,
    408         FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
    409   {
    410     Label done_loop, loop;
    411     __ Move(t2, zero_reg);
    412     __ bind(&loop);
    413     __ Dsubu(a3, a3, Operand(1));
    414     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
    415     __ Dlsa(kScratchReg, t1, t2, kPointerSizeLog2);
    416     __ Ld(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
    417     __ Push(kScratchReg);
    418     __ Daddu(t2, t2, Operand(1));
    419     __ Branch(&loop);
    420     __ bind(&done_loop);
    421   }
    422 
    423   // Underlying function needs to have bytecode available.
    424   if (FLAG_debug_code) {
    425     __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
    426     __ Ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
    427     GetSharedFunctionInfoBytecode(masm, a3, a0);
    428     __ GetObjectType(a3, a3, a3);
    429     __ Assert(eq, AbortReason::kMissingBytecodeArray, a3,
    430               Operand(BYTECODE_ARRAY_TYPE));
    431   }
    432 
    433   // Resume (Ignition/TurboFan) generator object.
    434   {
    435     __ Ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
    436     __ Lhu(a0, FieldMemOperand(
    437                    a0, SharedFunctionInfo::kFormalParameterCountOffset));
    438     // We abuse new.target both to indicate that this is a resume call and to
    439     // pass in the generator object.  In ordinary calls, new.target is always
    440     // undefined because generator functions are non-constructable.
    441     __ Move(a3, a1);
    442     __ Move(a1, a4);
    443     static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
    444     __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
    445     __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
    446     __ Jump(a2);
    447   }
    448 
    449   __ bind(&prepare_step_in_if_stepping);
    450   {
    451     FrameScope scope(masm, StackFrame::INTERNAL);
    452     __ Push(a1, a4);
    453     // Push hole as receiver since we do not use it for stepping.
    454     __ PushRoot(Heap::kTheHoleValueRootIndex);
    455     __ CallRuntime(Runtime::kDebugOnFunctionCall);
    456     __ Pop(a1);
    457   }
    458   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
    459   __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
    460 
    461   __ bind(&prepare_step_in_suspended_generator);
    462   {
    463     FrameScope scope(masm, StackFrame::INTERNAL);
    464     __ Push(a1);
    465     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    466     __ Pop(a1);
    467   }
    468   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
    469   __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
    470 
    471   __ bind(&stack_overflow);
    472   {
    473     FrameScope scope(masm, StackFrame::INTERNAL);
    474     __ CallRuntime(Runtime::kThrowStackOverflow);
    475     __ break_(0xCC);  // This should be unreachable.
    476   }
    477 }
    478 
    479 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    480   FrameScope scope(masm, StackFrame::INTERNAL);
    481   __ Push(a1);
    482   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    483 }
    484 
    485 // Clobbers a2; preserves all other registers.
    486 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
    487   // Check the stack for overflow. We are not trying to catch
    488   // interruptions (e.g. debug break and preemption) here, so the "real stack
    489   // limit" is checked.
    490   Label okay;
    491   __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    492   // Make a2 the space we have left. The stack might already be overflowed
    493   // here which will cause r2 to become negative.
    494   __ dsubu(a2, sp, a2);
    495   // Check if the arguments will overflow the stack.
    496   __ dsll(a7, argc, kPointerSizeLog2);
    497   __ Branch(&okay, gt, a2, Operand(a7));  // Signed comparison.
    498 
    499   // Out of stack space.
    500   __ CallRuntime(Runtime::kThrowStackOverflow);
    501 
    502   __ bind(&okay);
    503 }
    504 
    505 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    506                                              bool is_construct) {
    507   // ----------- S t a t e -------------
    508   //  -- a0: new.target
    509   //  -- a1: function
    510   //  -- a2: receiver_pointer
    511   //  -- a3: argc
    512   //  -- s0: argv
    513   // -----------------------------------
    514   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    515 
    516   // Enter an internal frame.
    517   {
    518     FrameScope scope(masm, StackFrame::INTERNAL);
    519 
    520     // Setup the context (we need to use the caller context from the isolate).
    521     ExternalReference context_address = ExternalReference::Create(
    522         IsolateAddressId::kContextAddress, masm->isolate());
    523     __ li(cp, context_address);
    524     __ Ld(cp, MemOperand(cp));
    525 
    526     // Push the function and the receiver onto the stack.
    527     __ Push(a1, a2);
    528 
    529     // Check if we have enough stack space to push all arguments.
    530     // Clobbers a2.
    531     Generate_CheckStackOverflow(masm, a3);
    532 
    533     // Remember new.target.
    534     __ mov(a5, a0);
    535 
    536     // Copy arguments to the stack in a loop.
    537     // a3: argc
    538     // s0: argv, i.e. points to first arg
    539     Label loop, entry;
    540     __ Dlsa(a6, s0, a3, kPointerSizeLog2);
    541     __ b(&entry);
    542     __ nop();  // Branch delay slot nop.
    543     // a6 points past last arg.
    544     __ bind(&loop);
    545     __ Ld(a4, MemOperand(s0));  // Read next parameter.
    546     __ daddiu(s0, s0, kPointerSize);
    547     __ Ld(a4, MemOperand(a4));  // Dereference handle.
    548     __ push(a4);                // Push parameter.
    549     __ bind(&entry);
    550     __ Branch(&loop, ne, s0, Operand(a6));
    551 
    552     // Setup new.target and argc.
    553     __ mov(a0, a3);
    554     __ mov(a3, a5);
    555 
    556     // Initialize all JavaScript callee-saved registers, since they will be seen
    557     // by the garbage collector as part of handlers.
    558     __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
    559     __ mov(s1, a4);
    560     __ mov(s2, a4);
    561     __ mov(s3, a4);
    562     __ mov(s4, a4);
    563     __ mov(s5, a4);
    564     // s6 holds the root address. Do not clobber.
    565     // s7 is cp. Do not init.
    566 
    567     // Invoke the code.
    568     Handle<Code> builtin = is_construct
    569                                ? BUILTIN_CODE(masm->isolate(), Construct)
    570                                : masm->isolate()->builtins()->Call();
    571     __ Call(builtin, RelocInfo::CODE_TARGET);
    572 
    573     // Leave internal frame.
    574   }
    575   __ Jump(ra);
    576 }
    577 
    578 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    579   Generate_JSEntryTrampolineHelper(masm, false);
    580 }
    581 
    582 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    583   Generate_JSEntryTrampolineHelper(masm, true);
    584 }
    585 
    586 static void ReplaceClosureCodeWithOptimizedCode(
    587     MacroAssembler* masm, Register optimized_code, Register closure,
    588     Register scratch1, Register scratch2, Register scratch3) {
    589   // Store code entry in the closure.
    590   __ Sd(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
    591   __ mov(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
    592   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
    593                       kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
    594                       OMIT_SMI_CHECK);
    595 }
    596 
    597 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
    598   Register args_count = scratch;
    599 
    600   // Get the arguments + receiver count.
    601   __ Ld(args_count,
    602         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    603   __ Lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
    604 
    605   // Leave the frame (also dropping the register file).
    606   __ LeaveFrame(StackFrame::INTERPRETED);
    607 
    608   // Drop receiver + arguments.
    609   __ Daddu(sp, sp, args_count);
    610 }
    611 
    612 // Tail-call |function_id| if |smi_entry| == |marker|
    613 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
    614                                           Register smi_entry,
    615                                           OptimizationMarker marker,
    616                                           Runtime::FunctionId function_id) {
    617   Label no_match;
    618   __ Branch(&no_match, ne, smi_entry, Operand(Smi::FromEnum(marker)));
    619   GenerateTailCallToReturnedCode(masm, function_id);
    620   __ bind(&no_match);
    621 }
    622 
    623 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
    624                                            Register feedback_vector,
    625                                            Register scratch1, Register scratch2,
    626                                            Register scratch3) {
    627   // ----------- S t a t e -------------
    628   //  -- a0 : argument count (preserved for callee if needed, and caller)
    629   //  -- a3 : new target (preserved for callee if needed, and caller)
    630   //  -- a1 : target function (preserved for callee if needed, and caller)
    631   //  -- feedback vector (preserved for caller if needed)
    632   // -----------------------------------
    633   DCHECK(
    634       !AreAliased(feedback_vector, a0, a1, a3, scratch1, scratch2, scratch3));
    635 
    636   Label optimized_code_slot_is_weak_ref, fallthrough;
    637 
    638   Register closure = a1;
    639   Register optimized_code_entry = scratch1;
    640 
    641   __ Ld(optimized_code_entry,
    642         FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
    643 
    644   // Check if the code entry is a Smi. If yes, we interpret it as an
    645   // optimisation marker. Otherwise, interpret it as a weak reference to a code
    646   // object.
    647   __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
    648 
    649   {
    650     // Optimized code slot is a Smi optimization marker.
    651 
    652     // Fall through if no optimization trigger.
    653     __ Branch(&fallthrough, eq, optimized_code_entry,
    654               Operand(Smi::FromEnum(OptimizationMarker::kNone)));
    655 
    656     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
    657                                   OptimizationMarker::kLogFirstExecution,
    658                                   Runtime::kFunctionFirstExecution);
    659     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
    660                                   OptimizationMarker::kCompileOptimized,
    661                                   Runtime::kCompileOptimized_NotConcurrent);
    662     TailCallRuntimeIfMarkerEquals(
    663         masm, optimized_code_entry,
    664         OptimizationMarker::kCompileOptimizedConcurrent,
    665         Runtime::kCompileOptimized_Concurrent);
    666 
    667     {
    668       // Otherwise, the marker is InOptimizationQueue, so fall through hoping
    669       // that an interrupt will eventually update the slot with optimized code.
    670       if (FLAG_debug_code) {
    671         __ Assert(
    672             eq, AbortReason::kExpectedOptimizationSentinel,
    673             optimized_code_entry,
    674             Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
    675       }
    676       __ jmp(&fallthrough);
    677     }
    678   }
    679 
    680   {
    681     // Optimized code slot is a weak reference.
    682     __ bind(&optimized_code_slot_is_weak_ref);
    683 
    684     __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
    685 
    686     // Check if the optimized code is marked for deopt. If it is, call the
    687     // runtime to clear it.
    688     Label found_deoptimized_code;
    689     __ Ld(a5, FieldMemOperand(optimized_code_entry,
    690                               Code::kCodeDataContainerOffset));
    691     __ Lw(a5, FieldMemOperand(a5, CodeDataContainer::kKindSpecificFlagsOffset));
    692     __ And(a5, a5, Operand(1 << Code::kMarkedForDeoptimizationBit));
    693     __ Branch(&found_deoptimized_code, ne, a5, Operand(zero_reg));
    694 
    695     // Optimized code is good, get it into the closure and link the closure into
    696     // the optimized functions list, then tail call the optimized code.
    697     // The feedback vector is no longer used, so re-use it as a scratch
    698     // register.
    699     ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
    700                                         scratch2, scratch3, feedback_vector);
    701 
    702     static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
    703     __ Daddu(a2, optimized_code_entry,
    704              Operand(Code::kHeaderSize - kHeapObjectTag));
    705     __ Jump(a2);
    706 
    707     // Optimized code slot contains deoptimized code, evict it and re-enter the
    708     // losure's code.
    709     __ bind(&found_deoptimized_code);
    710     GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
    711   }
    712 
    713   // Fall-through if the optimized code cell is clear and there is no
    714   // optimization marker.
    715   __ bind(&fallthrough);
    716 }
    717 
    718 // Advance the current bytecode offset. This simulates what all bytecode
    719 // handlers do upon completion of the underlying operation. Will bail out to a
    720 // label if the bytecode (without prefix) is a return bytecode.
    721 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
    722                                           Register bytecode_array,
    723                                           Register bytecode_offset,
    724                                           Register bytecode, Register scratch1,
    725                                           Register scratch2, Label* if_return) {
    726   Register bytecode_size_table = scratch1;
    727   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
    728                      bytecode));
    729   __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
    730 
    731   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
    732   Label process_bytecode, extra_wide;
    733   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
    734   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
    735   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
    736   STATIC_ASSERT(3 ==
    737                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
    738   __ Branch(&process_bytecode, hi, bytecode, Operand(3));
    739   __ And(scratch2, bytecode, Operand(1));
    740   __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg));
    741 
    742   // Load the next bytecode and update table to the wide scaled table.
    743   __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
    744   __ Daddu(scratch2, bytecode_array, bytecode_offset);
    745   __ Lbu(bytecode, MemOperand(scratch2));
    746   __ Daddu(bytecode_size_table, bytecode_size_table,
    747            Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
    748   __ jmp(&process_bytecode);
    749 
    750   __ bind(&extra_wide);
    751   // Load the next bytecode and update table to the extra wide scaled table.
    752   __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
    753   __ Daddu(scratch2, bytecode_array, bytecode_offset);
    754   __ Lbu(bytecode, MemOperand(scratch2));
    755   __ Daddu(bytecode_size_table, bytecode_size_table,
    756            Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
    757 
    758   __ bind(&process_bytecode);
    759 
    760 // Bailout to the return label if this is a return bytecode.
    761 #define JUMP_IF_EQUAL(NAME)          \
    762   __ Branch(if_return, eq, bytecode, \
    763             Operand(static_cast<int>(interpreter::Bytecode::k##NAME)));
    764   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
    765 #undef JUMP_IF_EQUAL
    766 
    767   // Otherwise, load the size of the current bytecode and advance the offset.
    768   __ Dlsa(scratch2, bytecode_size_table, bytecode, 2);
    769   __ Lw(scratch2, MemOperand(scratch2));
    770   __ Daddu(bytecode_offset, bytecode_offset, scratch2);
    771 }
    772 
    773 // Generate code for entering a JS function with the interpreter.
    774 // On entry to the function the receiver and arguments have been pushed on the
    775 // stack left to right.  The actual argument count matches the formal parameter
    776 // count expected by the function.
    777 //
    778 // The live registers are:
    779 //   o a1: the JS function object being called.
    780 //   o a3: the incoming new target or generator object
    781 //   o cp: our context
    782 //   o fp: the caller's frame pointer
    783 //   o sp: stack pointer
    784 //   o ra: return address
    785 //
    786 // The function builds an interpreter frame.  See InterpreterFrameConstants in
    787 // frames.h for its layout.
    788 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
    789   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    790 
    791   Register closure = a1;
    792   Register feedback_vector = a2;
    793 
    794   // Load the feedback vector from the closure.
    795   __ Ld(feedback_vector,
    796         FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
    797   __ Ld(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
    798   // Read off the optimized code slot in the feedback vector, and if there
    799   // is optimized code or an optimization marker, call that instead.
    800   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, a4, t3, a5);
    801 
    802   // Open a frame scope to indicate that there is a frame on the stack.  The
    803   // MANUAL indicates that the scope shouldn't actually generate code to set up
    804   // the frame (that is done below).
    805   FrameScope frame_scope(masm, StackFrame::MANUAL);
    806   __ PushStandardFrame(closure);
    807 
    808   // Get the bytecode array from the function object and load it into
    809   // kInterpreterBytecodeArrayRegister.
    810   __ Ld(a0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
    811   __ Ld(kInterpreterBytecodeArrayRegister,
    812         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
    813   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, a4);
    814 
    815   // Increment invocation count for the function.
    816   __ Lw(a4, FieldMemOperand(feedback_vector,
    817                             FeedbackVector::kInvocationCountOffset));
    818   __ Addu(a4, a4, Operand(1));
    819   __ Sw(a4, FieldMemOperand(feedback_vector,
    820                             FeedbackVector::kInvocationCountOffset));
    821 
    822   // Check function data field is actually a BytecodeArray object.
    823   if (FLAG_debug_code) {
    824     __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
    825     __ Assert(ne,
    826               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
    827               a4, Operand(zero_reg));
    828     __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
    829     __ Assert(eq,
    830               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
    831               a4, Operand(BYTECODE_ARRAY_TYPE));
    832   }
    833 
    834   // Reset code age.
    835   DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
    836   __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
    837                                   BytecodeArray::kBytecodeAgeOffset));
    838 
    839   // Load initial bytecode offset.
    840   __ li(kInterpreterBytecodeOffsetRegister,
    841         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
    842 
    843   // Push bytecode array and Smi tagged bytecode array offset.
    844   __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
    845   __ Push(kInterpreterBytecodeArrayRegister, a4);
    846 
    847   // Allocate the local and temporary register file on the stack.
    848   {
    849     // Load frame size (word) from the BytecodeArray object.
    850     __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
    851                               BytecodeArray::kFrameSizeOffset));
    852 
    853     // Do a stack check to ensure we don't go over the limit.
    854     Label ok;
    855     __ Dsubu(a5, sp, Operand(a4));
    856     __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    857     __ Branch(&ok, hs, a5, Operand(a2));
    858     __ CallRuntime(Runtime::kThrowStackOverflow);
    859     __ bind(&ok);
    860 
    861     // If ok, push undefined as the initial value for all register file entries.
    862     Label loop_header;
    863     Label loop_check;
    864     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
    865     __ Branch(&loop_check);
    866     __ bind(&loop_header);
    867     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
    868     __ push(a5);
    869     // Continue loop if not done.
    870     __ bind(&loop_check);
    871     __ Dsubu(a4, a4, Operand(kPointerSize));
    872     __ Branch(&loop_header, ge, a4, Operand(zero_reg));
    873   }
    874 
    875   // If the bytecode array has a valid incoming new target or generator object
    876   // register, initialize it with incoming value which was passed in r3.
    877   Label no_incoming_new_target_or_generator_register;
    878   __ Lw(a5, FieldMemOperand(
    879                 kInterpreterBytecodeArrayRegister,
    880                 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
    881   __ Branch(&no_incoming_new_target_or_generator_register, eq, a5,
    882             Operand(zero_reg));
    883   __ Dlsa(a5, fp, a5, kPointerSizeLog2);
    884   __ Sd(a3, MemOperand(a5));
    885   __ bind(&no_incoming_new_target_or_generator_register);
    886 
    887   // Load accumulator as undefined.
    888   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
    889 
    890   // Load the dispatch table into a register and dispatch to the bytecode
    891   // handler at the current bytecode offset.
    892   Label do_dispatch;
    893   __ bind(&do_dispatch);
    894   __ li(kInterpreterDispatchTableRegister,
    895         ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
    896   __ Daddu(a0, kInterpreterBytecodeArrayRegister,
    897            kInterpreterBytecodeOffsetRegister);
    898   __ Lbu(a7, MemOperand(a0));
    899   __ Dlsa(kScratchReg, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
    900   __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(kScratchReg));
    901   __ Call(kJavaScriptCallCodeStartRegister);
    902   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
    903 
    904   // Any returns to the entry trampoline are either due to the return bytecode
    905   // or the interpreter tail calling a builtin and then a dispatch.
    906 
    907   // Get bytecode array and bytecode offset from the stack frame.
    908   __ Ld(kInterpreterBytecodeArrayRegister,
    909         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    910   __ Ld(kInterpreterBytecodeOffsetRegister,
    911         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    912   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
    913 
    914   // Either return, or advance to the next bytecode and dispatch.
    915   Label do_return;
    916   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
    917            kInterpreterBytecodeOffsetRegister);
    918   __ Lbu(a1, MemOperand(a1));
    919   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
    920                                 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
    921                                 &do_return);
    922   __ jmp(&do_dispatch);
    923 
    924   __ bind(&do_return);
    925   // The return value is in v0.
    926   LeaveInterpreterFrame(masm, t0);
    927   __ Jump(ra);
    928 }
    929 
    930 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
    931                                         Register scratch1, Register scratch2,
    932                                         Label* stack_overflow) {
    933   // Check the stack for overflow. We are not trying to catch
    934   // interruptions (e.g. debug break and preemption) here, so the "real stack
    935   // limit" is checked.
    936   __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
    937   // Make scratch1 the space we have left. The stack might already be overflowed
    938   // here which will cause scratch1 to become negative.
    939   __ dsubu(scratch1, sp, scratch1);
    940   // Check if the arguments will overflow the stack.
    941   __ dsll(scratch2, num_args, kPointerSizeLog2);
    942   // Signed comparison.
    943   __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
    944 }
    945 
    946 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
    947                                          Register num_args, Register index,
    948                                          Register scratch, Register scratch2) {
    949   // Find the address of the last argument.
    950   __ mov(scratch2, num_args);
    951   __ dsll(scratch2, scratch2, kPointerSizeLog2);
    952   __ Dsubu(scratch2, index, Operand(scratch2));
    953 
    954   // Push the arguments.
    955   Label loop_header, loop_check;
    956   __ Branch(&loop_check);
    957   __ bind(&loop_header);
    958   __ Ld(scratch, MemOperand(index));
    959   __ Daddu(index, index, Operand(-kPointerSize));
    960   __ push(scratch);
    961   __ bind(&loop_check);
    962   __ Branch(&loop_header, gt, index, Operand(scratch2));
    963 }
    964 
    965 // static
    966 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
    967     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
    968     InterpreterPushArgsMode mode) {
    969   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
    970   // ----------- S t a t e -------------
    971   //  -- a0 : the number of arguments (not including the receiver)
    972   //  -- a2 : the address of the first argument to be pushed. Subsequent
    973   //          arguments should be consecutive above this, in the same order as
    974   //          they are to be pushed onto the stack.
    975   //  -- a1 : the target to call (can be any Object).
    976   // -----------------------------------
    977   Label stack_overflow;
    978 
    979   __ Daddu(a3, a0, Operand(1));  // Add one for receiver.
    980 
    981   // Push "undefined" as the receiver arg if we need to.
    982   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
    983     __ PushRoot(Heap::kUndefinedValueRootIndex);
    984     __ Dsubu(a3, a3, Operand(1));  // Subtract one for receiver.
    985   }
    986 
    987   Generate_StackOverflowCheck(masm, a3, a4, t0, &stack_overflow);
    988 
    989   // This function modifies a2, t0 and a4.
    990   Generate_InterpreterPushArgs(masm, a3, a2, a4, t0);
    991 
    992   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    993     __ Pop(a2);                   // Pass the spread in a register
    994     __ Dsubu(a0, a0, Operand(1));  // Subtract one for spread
    995   }
    996 
    997   // Call the target.
    998   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    999     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
   1000             RelocInfo::CODE_TARGET);
   1001   } else {
   1002     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
   1003             RelocInfo::CODE_TARGET);
   1004   }
   1005 
   1006   __ bind(&stack_overflow);
   1007   {
   1008     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1009     // Unreachable code.
   1010     __ break_(0xCC);
   1011   }
   1012 }
   1013 
   1014 // static
   1015 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
   1016     MacroAssembler* masm, InterpreterPushArgsMode mode) {
   1017   // ----------- S t a t e -------------
   1018   // -- a0 : argument count (not including receiver)
   1019   // -- a3 : new target
   1020   // -- a1 : constructor to call
   1021   // -- a2 : allocation site feedback if available, undefined otherwise.
   1022   // -- a4 : address of the first argument
   1023   // -----------------------------------
   1024   Label stack_overflow;
   1025 
   1026   // Push a slot for the receiver.
   1027   __ push(zero_reg);
   1028 
   1029   Generate_StackOverflowCheck(masm, a0, a5, t0, &stack_overflow);
   1030 
   1031   // This function modifies t0, a4 and a5.
   1032   Generate_InterpreterPushArgs(masm, a0, a4, a5, t0);
   1033 
   1034   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
   1035     __ Pop(a2);                   // Pass the spread in a register
   1036     __ Dsubu(a0, a0, Operand(1));  // Subtract one for spread
   1037   } else {
   1038     __ AssertUndefinedOrAllocationSite(a2, t0);
   1039   }
   1040 
   1041   if (mode == InterpreterPushArgsMode::kArrayFunction) {
   1042     __ AssertFunction(a1);
   1043 
   1044     // Tail call to the function-specific construct stub (still in the caller
   1045     // context at this point).
   1046     __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
   1047             RelocInfo::CODE_TARGET);
   1048   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
   1049     // Call the constructor with a0, a1, and a3 unmodified.
   1050     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
   1051             RelocInfo::CODE_TARGET);
   1052   } else {
   1053     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
   1054     // Call the constructor with a0, a1, and a3 unmodified.
   1055     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
   1056   }
   1057 
   1058   __ bind(&stack_overflow);
   1059   {
   1060     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1061     // Unreachable code.
   1062     __ break_(0xCC);
   1063   }
   1064 }
   1065 
   1066 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
   1067   // Set the return address to the correct point in the interpreter entry
   1068   // trampoline.
   1069   Label builtin_trampoline, trampoline_loaded;
   1070   Smi* interpreter_entry_return_pc_offset(
   1071       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
   1072   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
   1073 
   1074   // If the SFI function_data is an InterpreterData, get the trampoline stored
   1075   // in it, otherwise get the trampoline from the builtins list.
   1076   __ Ld(t0, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
   1077   __ Ld(t0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
   1078   __ Ld(t0, FieldMemOperand(t0, SharedFunctionInfo::kFunctionDataOffset));
   1079   __ GetObjectType(t0, kInterpreterDispatchTableRegister,
   1080                    kInterpreterDispatchTableRegister);
   1081   __ Branch(&builtin_trampoline, ne, kInterpreterDispatchTableRegister,
   1082             Operand(INTERPRETER_DATA_TYPE));
   1083 
   1084   __ Ld(t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
   1085   __ Branch(&trampoline_loaded);
   1086 
   1087   __ bind(&builtin_trampoline);
   1088   __ li(t0, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
   1089 
   1090   __ bind(&trampoline_loaded);
   1091   __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
   1092                            Code::kHeaderSize - kHeapObjectTag));
   1093 
   1094   // Initialize the dispatch table register.
   1095   __ li(kInterpreterDispatchTableRegister,
   1096         ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
   1097 
   1098   // Get the bytecode array pointer from the frame.
   1099   __ Ld(kInterpreterBytecodeArrayRegister,
   1100         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1101 
   1102   if (FLAG_debug_code) {
   1103     // Check function data field is actually a BytecodeArray object.
   1104     __ SmiTst(kInterpreterBytecodeArrayRegister, kScratchReg);
   1105     __ Assert(ne,
   1106               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
   1107               kScratchReg, Operand(zero_reg));
   1108     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
   1109     __ Assert(eq,
   1110               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
   1111               a1, Operand(BYTECODE_ARRAY_TYPE));
   1112   }
   1113 
   1114   // Get the target bytecode offset from the frame.
   1115   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
   1116               MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1117 
   1118   // Dispatch to the target bytecode.
   1119   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
   1120            kInterpreterBytecodeOffsetRegister);
   1121   __ Lbu(a7, MemOperand(a1));
   1122   __ Dlsa(a1, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
   1123   __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(a1));
   1124   __ Jump(kJavaScriptCallCodeStartRegister);
   1125 }
   1126 
   1127 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
   1128   // Advance the current bytecode offset stored within the given interpreter
   1129   // stack frame. This simulates what all bytecode handlers do upon completion
   1130   // of the underlying operation.
   1131   __ Ld(kInterpreterBytecodeArrayRegister,
   1132         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1133   __ Ld(kInterpreterBytecodeOffsetRegister,
   1134         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1135   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
   1136 
   1137   // Load the current bytecode.
   1138   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
   1139            kInterpreterBytecodeOffsetRegister);
   1140   __ Lbu(a1, MemOperand(a1));
   1141 
   1142   // Advance to the next bytecode.
   1143   Label if_return;
   1144   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
   1145                                 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
   1146                                 &if_return);
   1147 
   1148   // Convert new bytecode offset to a Smi and save in the stackframe.
   1149   __ SmiTag(a2, kInterpreterBytecodeOffsetRegister);
   1150   __ Sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1151 
   1152   Generate_InterpreterEnterBytecode(masm);
   1153 
   1154   // We should never take the if_return path.
   1155   __ bind(&if_return);
   1156   __ Abort(AbortReason::kInvalidBytecodeAdvance);
   1157 }
   1158 
   1159 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1160   Generate_InterpreterEnterBytecode(masm);
   1161 }
   1162 
   1163 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
   1164   // ----------- S t a t e -------------
   1165   //  -- a0 : argument count (preserved for callee)
   1166   //  -- a1 : new target (preserved for callee)
   1167   //  -- a3 : target function (preserved for callee)
   1168   // -----------------------------------
   1169   Label failed;
   1170   {
   1171     FrameScope scope(masm, StackFrame::INTERNAL);
   1172     // Push a copy of the target function and the new target.
   1173     // Push function as parameter to the runtime call.
   1174     __ Move(t2, a0);
   1175     __ SmiTag(a0);
   1176     __ Push(a0, a1, a3, a1);
   1177 
   1178     // Copy arguments from caller (stdlib, foreign, heap).
   1179     Label args_done;
   1180     for (int j = 0; j < 4; ++j) {
   1181       Label over;
   1182       if (j < 3) {
   1183         __ Branch(&over, ne, t2, Operand(j));
   1184       }
   1185       for (int i = j - 1; i >= 0; --i) {
   1186         __ Ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
   1187                                      i * kPointerSize));
   1188         __ push(t2);
   1189       }
   1190       for (int i = 0; i < 3 - j; ++i) {
   1191         __ PushRoot(Heap::kUndefinedValueRootIndex);
   1192       }
   1193       if (j < 3) {
   1194         __ jmp(&args_done);
   1195         __ bind(&over);
   1196       }
   1197     }
   1198     __ bind(&args_done);
   1199 
   1200     // Call runtime, on success unwind frame, and parent frame.
   1201     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
   1202     // A smi 0 is returned on failure, an object on success.
   1203     __ JumpIfSmi(v0, &failed);
   1204 
   1205     __ Drop(2);
   1206     __ pop(t2);
   1207     __ SmiUntag(t2);
   1208     scope.GenerateLeaveFrame();
   1209 
   1210     __ Daddu(t2, t2, Operand(1));
   1211     __ Dlsa(sp, sp, t2, kPointerSizeLog2);
   1212     __ Ret();
   1213 
   1214     __ bind(&failed);
   1215     // Restore target function and new target.
   1216     __ Pop(a0, a1, a3);
   1217     __ SmiUntag(a0);
   1218   }
   1219   // On failure, tail call back to regular js by re-calling the function
   1220   // which has be reset to the compile lazy builtin.
   1221   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
   1222   __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
   1223   __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
   1224   __ Jump(a2);
   1225 }
   1226 
   1227 namespace {
   1228 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
   1229                                       bool java_script_builtin,
   1230                                       bool with_result) {
   1231   const RegisterConfiguration* config(RegisterConfiguration::Default());
   1232   int allocatable_register_count = config->num_allocatable_general_registers();
   1233   if (with_result) {
   1234     // Overwrite the hole inserted by the deoptimizer with the return value from
   1235     // the LAZY deopt point.
   1236     __ Sd(v0,
   1237           MemOperand(
   1238               sp, config->num_allocatable_general_registers() * kPointerSize +
   1239                       BuiltinContinuationFrameConstants::kFixedFrameSize));
   1240   }
   1241   for (int i = allocatable_register_count - 1; i >= 0; --i) {
   1242     int code = config->GetAllocatableGeneralCode(i);
   1243     __ Pop(Register::from_code(code));
   1244     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
   1245       __ SmiUntag(Register::from_code(code));
   1246     }
   1247   }
   1248   __ Ld(fp, MemOperand(
   1249                 sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
   1250   __ Pop(t0);
   1251   __ Daddu(sp, sp,
   1252            Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
   1253   __ Pop(ra);
   1254   __ Daddu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
   1255   __ Jump(t0);
   1256 }
   1257 }  // namespace
   1258 
   1259 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
   1260   Generate_ContinueToBuiltinHelper(masm, false, false);
   1261 }
   1262 
   1263 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
   1264     MacroAssembler* masm) {
   1265   Generate_ContinueToBuiltinHelper(masm, false, true);
   1266 }
   1267 
   1268 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
   1269   Generate_ContinueToBuiltinHelper(masm, true, false);
   1270 }
   1271 
   1272 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
   1273     MacroAssembler* masm) {
   1274   Generate_ContinueToBuiltinHelper(masm, true, true);
   1275 }
   1276 
   1277 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1278   {
   1279     FrameScope scope(masm, StackFrame::INTERNAL);
   1280     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1281   }
   1282 
   1283   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
   1284   __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
   1285   __ Ret(USE_DELAY_SLOT);
   1286   // Safe to fill delay slot Addu will emit one instruction.
   1287   __ Daddu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
   1288 }
   1289 
   1290 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
   1291                                               bool has_handler_frame) {
   1292   // Lookup the function in the JavaScript frame.
   1293   if (has_handler_frame) {
   1294     __ Ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   1295     __ Ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
   1296   } else {
   1297     __ Ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1298   }
   1299 
   1300   {
   1301     FrameScope scope(masm, StackFrame::INTERNAL);
   1302     // Pass function as argument.
   1303     __ push(a0);
   1304     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1305   }
   1306 
   1307   // If the code object is null, just return to the caller.
   1308   __ Ret(eq, v0, Operand(Smi::kZero));
   1309 
   1310   // Drop any potential handler frame that is be sitting on top of the actual
   1311   // JavaScript frame. This is the case then OSR is triggered from bytecode.
   1312   if (has_handler_frame) {
   1313     __ LeaveFrame(StackFrame::STUB);
   1314   }
   1315 
   1316   // Load deoptimization data from the code object.
   1317   // <deopt_data> = <code>[#deoptimization_data_offset]
   1318   __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   1319 
   1320   // Load the OSR entrypoint offset from the deoptimization data.
   1321   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1322   __ SmiUntag(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
   1323                                      DeoptimizationData::kOsrPcOffsetIndex) -
   1324                                      kHeapObjectTag));
   1325 
   1326   // Compute the target address = code_obj + header_size + osr_offset
   1327   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
   1328   __ Daddu(v0, v0, a1);
   1329   __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
   1330 
   1331   // And "return" to the OSR entry point of the function.
   1332   __ Ret();
   1333 }
   1334 
   1335 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1336   Generate_OnStackReplacementHelper(masm, false);
   1337 }
   1338 
   1339 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
   1340   Generate_OnStackReplacementHelper(masm, true);
   1341 }
   1342 
   1343 // static
   1344 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1345   // ----------- S t a t e -------------
   1346   //  -- a0    : argc
   1347   //  -- sp[0] : argArray
   1348   //  -- sp[4] : thisArg
   1349   //  -- sp[8] : receiver
   1350   // -----------------------------------
   1351 
   1352   Register argc = a0;
   1353   Register arg_array = a2;
   1354   Register receiver = a1;
   1355   Register this_arg = a5;
   1356   Register undefined_value = a3;
   1357   Register scratch = a4;
   1358 
   1359   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   1360 
   1361   // 1. Load receiver into a1, argArray into a2 (if present), remove all
   1362   // arguments from the stack (including the receiver), and push thisArg (if
   1363   // present) instead.
   1364   {
   1365     // Claim (2 - argc) dummy arguments form the stack, to put the stack in a
   1366     // consistent state for a simple pop operation.
   1367 
   1368     __ Dsubu(sp, sp, Operand(2 * kPointerSize));
   1369     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
   1370     __ mov(scratch, argc);
   1371     __ Pop(this_arg, arg_array);                   // Overwrite argc
   1372     __ Movz(arg_array, undefined_value, scratch);  // if argc == 0
   1373     __ Movz(this_arg, undefined_value, scratch);   // if argc == 0
   1374     __ Dsubu(scratch, scratch, Operand(1));
   1375     __ Movz(arg_array, undefined_value, scratch);  // if argc == 1
   1376     __ Ld(receiver, MemOperand(sp));
   1377     __ Sd(this_arg, MemOperand(sp));
   1378   }
   1379 
   1380   // ----------- S t a t e -------------
   1381   //  -- a2    : argArray
   1382   //  -- a1    : receiver
   1383   //  -- a3    : undefined root value
   1384   //  -- sp[0] : thisArg
   1385   // -----------------------------------
   1386 
   1387   // 2. We don't need to check explicitly for callable receiver here,
   1388   // since that's the first thing the Call/CallWithArrayLike builtins
   1389   // will do.
   1390 
   1391   // 3. Tail call with no arguments if argArray is null or undefined.
   1392   Label no_arguments;
   1393   __ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments);
   1394   __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
   1395 
   1396   // 4a. Apply the receiver to the given argArray.
   1397   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
   1398           RelocInfo::CODE_TARGET);
   1399 
   1400   // 4b. The argArray is either null or undefined, so we tail call without any
   1401   // arguments to the receiver.
   1402   __ bind(&no_arguments);
   1403   {
   1404     __ mov(a0, zero_reg);
   1405     DCHECK(receiver == a1);
   1406     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1407   }
   1408 }
   1409 
   1410 // static
   1411 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1412   // 1. Make sure we have at least one argument.
   1413   // a0: actual number of arguments
   1414   {
   1415     Label done;
   1416     __ Branch(&done, ne, a0, Operand(zero_reg));
   1417     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1418     __ Daddu(a0, a0, Operand(1));
   1419     __ bind(&done);
   1420   }
   1421 
   1422   // 2. Get the function to call (passed as receiver) from the stack.
   1423   // a0: actual number of arguments
   1424   __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
   1425   __ Ld(a1, MemOperand(kScratchReg));
   1426 
   1427   // 3. Shift arguments and return address one slot down on the stack
   1428   //    (overwriting the original receiver).  Adjust argument count to make
   1429   //    the original first argument the new receiver.
   1430   // a0: actual number of arguments
   1431   // a1: function
   1432   {
   1433     Label loop;
   1434     // Calculate the copy start address (destination). Copy end address is sp.
   1435     __ Dlsa(a2, sp, a0, kPointerSizeLog2);
   1436 
   1437     __ bind(&loop);
   1438     __ Ld(kScratchReg, MemOperand(a2, -kPointerSize));
   1439     __ Sd(kScratchReg, MemOperand(a2));
   1440     __ Dsubu(a2, a2, Operand(kPointerSize));
   1441     __ Branch(&loop, ne, a2, Operand(sp));
   1442     // Adjust the actual number of arguments and remove the top element
   1443     // (which is a copy of the last argument).
   1444     __ Dsubu(a0, a0, Operand(1));
   1445     __ Pop();
   1446   }
   1447 
   1448   // 4. Call the callable.
   1449   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1450 }
   1451 
   1452 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1453   // ----------- S t a t e -------------
   1454   //  -- a0     : argc
   1455   //  -- sp[0]  : argumentsList  (if argc ==3)
   1456   //  -- sp[4]  : thisArgument   (if argc >=2)
   1457   //  -- sp[8]  : target         (if argc >=1)
   1458   //  -- sp[12] : receiver
   1459   // -----------------------------------
   1460 
   1461   Register argc = a0;
   1462   Register arguments_list = a2;
   1463   Register target = a1;
   1464   Register this_argument = a5;
   1465   Register undefined_value = a3;
   1466   Register scratch = a4;
   1467 
   1468   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   1469 
   1470   // 1. Load target into a1 (if present), argumentsList into a2 (if present),
   1471   // remove all arguments from the stack (including the receiver), and push
   1472   // thisArgument (if present) instead.
   1473   {
   1474     // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
   1475     // consistent state for a simple pop operation.
   1476 
   1477     __ Dsubu(sp, sp, Operand(3 * kPointerSize));
   1478     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
   1479     __ mov(scratch, argc);
   1480     __ Pop(target, this_argument, arguments_list);
   1481     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 0
   1482     __ Movz(this_argument, undefined_value, scratch);   // if argc == 0
   1483     __ Movz(target, undefined_value, scratch);          // if argc == 0
   1484     __ Dsubu(scratch, scratch, Operand(1));
   1485     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 1
   1486     __ Movz(this_argument, undefined_value, scratch);   // if argc == 1
   1487     __ Dsubu(scratch, scratch, Operand(1));
   1488     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 2
   1489 
   1490     __ Sd(this_argument, MemOperand(sp, 0));  // Overwrite receiver
   1491   }
   1492 
   1493   // ----------- S t a t e -------------
   1494   //  -- a2    : argumentsList
   1495   //  -- a1    : target
   1496   //  -- a3    : undefined root value
   1497   //  -- sp[0] : thisArgument
   1498   // -----------------------------------
   1499 
   1500   // 2. We don't need to check explicitly for callable target here,
   1501   // since that's the first thing the Call/CallWithArrayLike builtins
   1502   // will do.
   1503 
   1504   // 3. Apply the target to the given argumentsList.
   1505   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
   1506           RelocInfo::CODE_TARGET);
   1507 }
   1508 
   1509 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1510   // ----------- S t a t e -------------
   1511   //  -- a0     : argc
   1512   //  -- sp[0]  : new.target (optional) (dummy value if argc <= 2)
   1513   //  -- sp[4]  : argumentsList         (dummy value if argc <= 1)
   1514   //  -- sp[8]  : target                (dummy value if argc == 0)
   1515   //  -- sp[12] : receiver
   1516   // -----------------------------------
   1517   Register argc = a0;
   1518   Register arguments_list = a2;
   1519   Register target = a1;
   1520   Register new_target = a3;
   1521   Register undefined_value = a4;
   1522   Register scratch = a5;
   1523 
   1524   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   1525 
   1526   // 1. Load target into a1 (if present), argumentsList into a2 (if present),
   1527   // new.target into a3 (if present, otherwise use target), remove all
   1528   // arguments from the stack (including the receiver), and push thisArgument
   1529   // (if present) instead.
   1530   {
   1531     // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
   1532     // consistent state for a simple pop operation.
   1533 
   1534     __ Dsubu(sp, sp, Operand(3 * kPointerSize));
   1535     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
   1536     __ mov(scratch, argc);
   1537     __ Pop(target, arguments_list, new_target);
   1538     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 0
   1539     __ Movz(new_target, undefined_value, scratch);      // if argc == 0
   1540     __ Movz(target, undefined_value, scratch);          // if argc == 0
   1541     __ Dsubu(scratch, scratch, Operand(1));
   1542     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 1
   1543     __ Movz(new_target, target, scratch);               // if argc == 1
   1544     __ Dsubu(scratch, scratch, Operand(1));
   1545     __ Movz(new_target, target, scratch);  // if argc == 2
   1546 
   1547     __ Sd(undefined_value, MemOperand(sp, 0));  // Overwrite receiver
   1548   }
   1549 
   1550   // ----------- S t a t e -------------
   1551   //  -- a2    : argumentsList
   1552   //  -- a1    : target
   1553   //  -- a3    : new.target
   1554   //  -- sp[0] : receiver (undefined)
   1555   // -----------------------------------
   1556 
   1557   // 2. We don't need to check explicitly for constructor target here,
   1558   // since that's the first thing the Construct/ConstructWithArrayLike
   1559   // builtins will do.
   1560 
   1561   // 3. We don't need to check explicitly for constructor new.target here,
   1562   // since that's the second thing the Construct/ConstructWithArrayLike
   1563   // builtins will do.
   1564 
   1565   // 4. Construct the target with the given new.target and argumentsList.
   1566   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
   1567           RelocInfo::CODE_TARGET);
   1568 }
   1569 
   1570 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1571   __ SmiTag(a0);
   1572   __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   1573   __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
   1574   __ Push(Smi::kZero);  // Padding.
   1575   __ Daddu(fp, sp,
   1576            Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
   1577 }
   1578 
   1579 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1580   // ----------- S t a t e -------------
   1581   //  -- v0 : result being passed through
   1582   // -----------------------------------
   1583   // Get the number of arguments passed (as a smi), tear down the frame and
   1584   // then tear down the parameters.
   1585   __ Ld(a1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1586   __ mov(sp, fp);
   1587   __ MultiPop(fp.bit() | ra.bit());
   1588   __ SmiScale(a4, a1, kPointerSizeLog2);
   1589   __ Daddu(sp, sp, a4);
   1590   // Adjust for the receiver.
   1591   __ Daddu(sp, sp, Operand(kPointerSize));
   1592 }
   1593 
   1594 // static
   1595 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
   1596                                                Handle<Code> code) {
   1597   // ----------- S t a t e -------------
   1598   //  -- a1 : target
   1599   //  -- a0 : number of parameters on the stack (not including the receiver)
   1600   //  -- a2 : arguments list (a FixedArray)
   1601   //  -- a4 : len (number of elements to push from args)
   1602   //  -- a3 : new.target (for [[Construct]])
   1603   // -----------------------------------
   1604   if (masm->emit_debug_code()) {
   1605     // Allow a2 to be a FixedArray, or a FixedDoubleArray if a4 == 0.
   1606     Label ok, fail;
   1607     __ AssertNotSmi(a2);
   1608     __ GetObjectType(a2, t8, t8);
   1609     __ Branch(&ok, eq, t8, Operand(FIXED_ARRAY_TYPE));
   1610     __ Branch(&fail, ne, t8, Operand(FIXED_DOUBLE_ARRAY_TYPE));
   1611     __ Branch(&ok, eq, a4, Operand(zero_reg));
   1612     // Fall through.
   1613     __ bind(&fail);
   1614     __ Abort(AbortReason::kOperandIsNotAFixedArray);
   1615 
   1616     __ bind(&ok);
   1617   }
   1618 
   1619   Register args = a2;
   1620   Register len = a4;
   1621 
   1622   // Check for stack overflow.
   1623   {
   1624     // Check the stack for overflow. We are not trying to catch interruptions
   1625     // (i.e. debug break and preemption) here, so check the "real stack limit".
   1626     Label done;
   1627     __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
   1628     // Make ip the space we have left. The stack might already be overflowed
   1629     // here which will cause ip to become negative.
   1630     __ Dsubu(a5, sp, a5);
   1631     // Check if the arguments will overflow the stack.
   1632     __ dsll(kScratchReg, len, kPointerSizeLog2);
   1633     __ Branch(&done, gt, a5, Operand(kScratchReg));  // Signed comparison.
   1634     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1635     __ bind(&done);
   1636   }
   1637 
   1638   // Push arguments onto the stack (thisArgument is already on the stack).
   1639   {
   1640     Label done, push, loop;
   1641     Register src = a6;
   1642     Register scratch = len;
   1643 
   1644     __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
   1645     __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
   1646     __ Daddu(a0, a0, len);  // The 'len' argument for Call() or Construct().
   1647     __ dsll(scratch, len, kPointerSizeLog2);
   1648     __ Dsubu(scratch, sp, Operand(scratch));
   1649     __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
   1650     __ bind(&loop);
   1651     __ Ld(a5, MemOperand(src));
   1652     __ Branch(&push, ne, a5, Operand(t1));
   1653     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
   1654     __ bind(&push);
   1655     __ daddiu(src, src, kPointerSize);
   1656     __ Push(a5);
   1657     __ Branch(&loop, ne, scratch, Operand(sp));
   1658     __ bind(&done);
   1659   }
   1660 
   1661   // Tail-call to the actual Call or Construct builtin.
   1662   __ Jump(code, RelocInfo::CODE_TARGET);
   1663 }
   1664 
   1665 // static
   1666 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
   1667                                                       CallOrConstructMode mode,
   1668                                                       Handle<Code> code) {
   1669   // ----------- S t a t e -------------
   1670   //  -- a0 : the number of arguments (not including the receiver)
   1671   //  -- a3 : the new.target (for [[Construct]] calls)
   1672   //  -- a1 : the target to call (can be any Object)
   1673   //  -- a2 : start index (to support rest parameters)
   1674   // -----------------------------------
   1675 
   1676   // Check if new.target has a [[Construct]] internal method.
   1677   if (mode == CallOrConstructMode::kConstruct) {
   1678     Label new_target_constructor, new_target_not_constructor;
   1679     __ JumpIfSmi(a3, &new_target_not_constructor);
   1680     __ ld(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
   1681     __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
   1682     __ And(t1, t1, Operand(Map::IsConstructorBit::kMask));
   1683     __ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
   1684     __ bind(&new_target_not_constructor);
   1685     {
   1686       FrameScope scope(masm, StackFrame::MANUAL);
   1687       __ EnterFrame(StackFrame::INTERNAL);
   1688       __ Push(a3);
   1689       __ CallRuntime(Runtime::kThrowNotConstructor);
   1690     }
   1691     __ bind(&new_target_constructor);
   1692   }
   1693 
   1694   // Check if we have an arguments adaptor frame below the function frame.
   1695   Label arguments_adaptor, arguments_done;
   1696   __ Ld(a6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   1697   __ Ld(a7, MemOperand(a6, CommonFrameConstants::kContextOrFrameTypeOffset));
   1698   __ Branch(&arguments_adaptor, eq, a7,
   1699             Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
   1700   {
   1701     __ Ld(a7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1702     __ Ld(a7, FieldMemOperand(a7, JSFunction::kSharedFunctionInfoOffset));
   1703     __ Lhu(a7, FieldMemOperand(
   1704                    a7, SharedFunctionInfo::kFormalParameterCountOffset));
   1705     __ mov(a6, fp);
   1706   }
   1707   __ Branch(&arguments_done);
   1708   __ bind(&arguments_adaptor);
   1709   {
   1710     // Just get the length from the ArgumentsAdaptorFrame.
   1711     __ SmiUntag(a7,
   1712                 MemOperand(a6, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1713   }
   1714   __ bind(&arguments_done);
   1715 
   1716   Label stack_done, stack_overflow;
   1717   __ Subu(a7, a7, a2);
   1718   __ Branch(&stack_done, le, a7, Operand(zero_reg));
   1719   {
   1720     // Check for stack overflow.
   1721     Generate_StackOverflowCheck(masm, a7, a4, a5, &stack_overflow);
   1722 
   1723     // Forward the arguments from the caller frame.
   1724     {
   1725       Label loop;
   1726       __ Daddu(a0, a0, a7);
   1727       __ bind(&loop);
   1728       {
   1729         __ Dlsa(kScratchReg, a6, a7, kPointerSizeLog2);
   1730         __ Ld(kScratchReg, MemOperand(kScratchReg, 1 * kPointerSize));
   1731         __ push(kScratchReg);
   1732         __ Subu(a7, a7, Operand(1));
   1733         __ Branch(&loop, ne, a7, Operand(zero_reg));
   1734       }
   1735     }
   1736   }
   1737   __ Branch(&stack_done);
   1738   __ bind(&stack_overflow);
   1739   __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1740   __ bind(&stack_done);
   1741 
   1742   // Tail-call to the {code} handler.
   1743   __ Jump(code, RelocInfo::CODE_TARGET);
   1744 }
   1745 
   1746 // static
   1747 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   1748                                      ConvertReceiverMode mode) {
   1749   // ----------- S t a t e -------------
   1750   //  -- a0 : the number of arguments (not including the receiver)
   1751   //  -- a1 : the function to call (checked to be a JSFunction)
   1752   // -----------------------------------
   1753   __ AssertFunction(a1);
   1754 
   1755   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   1756   // Check that function is not a "classConstructor".
   1757   Label class_constructor;
   1758   __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1759   __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
   1760   __ And(kScratchReg, a3,
   1761          Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
   1762   __ Branch(&class_constructor, ne, kScratchReg, Operand(zero_reg));
   1763 
   1764   // Enter the context of the function; ToObject has to run in the function
   1765   // context, and we also need to take the global proxy from the function
   1766   // context in case of conversion.
   1767   __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
   1768   // We need to convert the receiver for non-native sloppy mode functions.
   1769   Label done_convert;
   1770   __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
   1771   __ And(kScratchReg, a3,
   1772          Operand(SharedFunctionInfo::IsNativeBit::kMask |
   1773                  SharedFunctionInfo::IsStrictBit::kMask));
   1774   __ Branch(&done_convert, ne, kScratchReg, Operand(zero_reg));
   1775   {
   1776     // ----------- S t a t e -------------
   1777     //  -- a0 : the number of arguments (not including the receiver)
   1778     //  -- a1 : the function to call (checked to be a JSFunction)
   1779     //  -- a2 : the shared function info.
   1780     //  -- cp : the function context.
   1781     // -----------------------------------
   1782 
   1783     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   1784       // Patch receiver to global proxy.
   1785       __ LoadGlobalProxy(a3);
   1786     } else {
   1787       Label convert_to_object, convert_receiver;
   1788       __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
   1789       __ Ld(a3, MemOperand(kScratchReg));
   1790       __ JumpIfSmi(a3, &convert_to_object);
   1791       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   1792       __ GetObjectType(a3, a4, a4);
   1793       __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
   1794       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   1795         Label convert_global_proxy;
   1796         __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
   1797                       &convert_global_proxy);
   1798         __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
   1799         __ bind(&convert_global_proxy);
   1800         {
   1801           // Patch receiver to global proxy.
   1802           __ LoadGlobalProxy(a3);
   1803         }
   1804         __ Branch(&convert_receiver);
   1805       }
   1806       __ bind(&convert_to_object);
   1807       {
   1808         // Convert receiver using ToObject.
   1809         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   1810         // in the fast case? (fall back to AllocateInNewSpace?)
   1811         FrameScope scope(masm, StackFrame::INTERNAL);
   1812         __ SmiTag(a0);
   1813         __ Push(a0, a1);
   1814         __ mov(a0, a3);
   1815         __ Push(cp);
   1816         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
   1817                 RelocInfo::CODE_TARGET);
   1818         __ Pop(cp);
   1819         __ mov(a3, v0);
   1820         __ Pop(a0, a1);
   1821         __ SmiUntag(a0);
   1822       }
   1823       __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1824       __ bind(&convert_receiver);
   1825     }
   1826     __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
   1827     __ Sd(a3, MemOperand(kScratchReg));
   1828   }
   1829   __ bind(&done_convert);
   1830 
   1831   // ----------- S t a t e -------------
   1832   //  -- a0 : the number of arguments (not including the receiver)
   1833   //  -- a1 : the function to call (checked to be a JSFunction)
   1834   //  -- a2 : the shared function info.
   1835   //  -- cp : the function context.
   1836   // -----------------------------------
   1837 
   1838   __ Lhu(a2,
   1839          FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
   1840   ParameterCount actual(a0);
   1841   ParameterCount expected(a2);
   1842   __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION);
   1843 
   1844   // The function is a "classConstructor", need to raise an exception.
   1845   __ bind(&class_constructor);
   1846   {
   1847     FrameScope frame(masm, StackFrame::INTERNAL);
   1848     __ Push(a1);
   1849     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   1850   }
   1851 }
   1852 
   1853 // static
   1854 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
   1855   // ----------- S t a t e -------------
   1856   //  -- a0 : the number of arguments (not including the receiver)
   1857   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   1858   // -----------------------------------
   1859   __ AssertBoundFunction(a1);
   1860 
   1861   // Patch the receiver to [[BoundThis]].
   1862   {
   1863     __ Ld(kScratchReg, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
   1864     __ Dlsa(a4, sp, a0, kPointerSizeLog2);
   1865     __ Sd(kScratchReg, MemOperand(a4));
   1866   }
   1867 
   1868   // Load [[BoundArguments]] into a2 and length of that into a4.
   1869   __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
   1870   __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
   1871 
   1872   // ----------- S t a t e -------------
   1873   //  -- a0 : the number of arguments (not including the receiver)
   1874   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   1875   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
   1876   //  -- a4 : the number of [[BoundArguments]]
   1877   // -----------------------------------
   1878 
   1879   // Reserve stack space for the [[BoundArguments]].
   1880   {
   1881     Label done;
   1882     __ dsll(a5, a4, kPointerSizeLog2);
   1883     __ Dsubu(sp, sp, Operand(a5));
   1884     // Check the stack for overflow. We are not trying to catch interruptions
   1885     // (i.e. debug break and preemption) here, so check the "real stack limit".
   1886     __ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
   1887     __ Branch(&done, gt, sp, Operand(kScratchReg));  // Signed comparison.
   1888     // Restore the stack pointer.
   1889     __ Daddu(sp, sp, Operand(a5));
   1890     {
   1891       FrameScope scope(masm, StackFrame::MANUAL);
   1892       __ EnterFrame(StackFrame::INTERNAL);
   1893       __ CallRuntime(Runtime::kThrowStackOverflow);
   1894     }
   1895     __ bind(&done);
   1896   }
   1897 
   1898   // Relocate arguments down the stack.
   1899   {
   1900     Label loop, done_loop;
   1901     __ mov(a5, zero_reg);
   1902     __ bind(&loop);
   1903     __ Branch(&done_loop, gt, a5, Operand(a0));
   1904     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
   1905     __ Ld(kScratchReg, MemOperand(a6));
   1906     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
   1907     __ Sd(kScratchReg, MemOperand(a6));
   1908     __ Daddu(a4, a4, Operand(1));
   1909     __ Daddu(a5, a5, Operand(1));
   1910     __ Branch(&loop);
   1911     __ bind(&done_loop);
   1912   }
   1913 
   1914   // Copy [[BoundArguments]] to the stack (below the arguments).
   1915   {
   1916     Label loop, done_loop;
   1917     __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
   1918     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1919     __ bind(&loop);
   1920     __ Dsubu(a4, a4, Operand(1));
   1921     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
   1922     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
   1923     __ Ld(kScratchReg, MemOperand(a5));
   1924     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
   1925     __ Sd(kScratchReg, MemOperand(a5));
   1926     __ Daddu(a0, a0, Operand(1));
   1927     __ Branch(&loop);
   1928     __ bind(&done_loop);
   1929   }
   1930 
   1931   // Call the [[BoundTargetFunction]] via the Call builtin.
   1932   __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   1933   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
   1934           RelocInfo::CODE_TARGET);
   1935 }
   1936 
   1937 // static
   1938 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
   1939   // ----------- S t a t e -------------
   1940   //  -- a0 : the number of arguments (not including the receiver)
   1941   //  -- a1 : the target to call (can be any Object).
   1942   // -----------------------------------
   1943 
   1944   Label non_callable, non_function, non_smi;
   1945   __ JumpIfSmi(a1, &non_callable);
   1946   __ bind(&non_smi);
   1947   __ GetObjectType(a1, t1, t2);
   1948   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
   1949           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
   1950   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
   1951           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
   1952 
   1953   // Check if target has a [[Call]] internal method.
   1954   __ Lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
   1955   __ And(t1, t1, Operand(Map::IsCallableBit::kMask));
   1956   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
   1957 
   1958   __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
   1959   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
   1960 
   1961   // 2. Call to something else, which might have a [[Call]] internal method (if
   1962   // not we raise an exception).
   1963   __ bind(&non_function);
   1964   // Overwrite the original receiver with the (original) target.
   1965   __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
   1966   __ Sd(a1, MemOperand(kScratchReg));
   1967   // Let the "call_as_function_delegate" take care of the rest.
   1968   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
   1969   __ Jump(masm->isolate()->builtins()->CallFunction(
   1970               ConvertReceiverMode::kNotNullOrUndefined),
   1971           RelocInfo::CODE_TARGET);
   1972 
   1973   // 3. Call to something that is not callable.
   1974   __ bind(&non_callable);
   1975   {
   1976     FrameScope scope(masm, StackFrame::INTERNAL);
   1977     __ Push(a1);
   1978     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   1979   }
   1980 }
   1981 
   1982 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   1983   // ----------- S t a t e -------------
   1984   //  -- a0 : the number of arguments (not including the receiver)
   1985   //  -- a1 : the constructor to call (checked to be a JSFunction)
   1986   //  -- a3 : the new target (checked to be a constructor)
   1987   // -----------------------------------
   1988   __ AssertConstructor(a1);
   1989   __ AssertFunction(a1);
   1990 
   1991   // Calling convention for function specific ConstructStubs require
   1992   // a2 to contain either an AllocationSite or undefined.
   1993   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   1994 
   1995   Label call_generic_stub;
   1996 
   1997   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
   1998   __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1999   __ lwu(a4, FieldMemOperand(a4, SharedFunctionInfo::kFlagsOffset));
   2000   __ And(a4, a4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
   2001   __ Branch(&call_generic_stub, eq, a4, Operand(zero_reg));
   2002 
   2003   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
   2004           RelocInfo::CODE_TARGET);
   2005 
   2006   __ bind(&call_generic_stub);
   2007   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
   2008           RelocInfo::CODE_TARGET);
   2009 }
   2010 
   2011 // static
   2012 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2013   // ----------- S t a t e -------------
   2014   //  -- a0 : the number of arguments (not including the receiver)
   2015   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2016   //  -- a3 : the new target (checked to be a constructor)
   2017   // -----------------------------------
   2018   __ AssertConstructor(a1);
   2019   __ AssertBoundFunction(a1);
   2020 
   2021   // Load [[BoundArguments]] into a2 and length of that into a4.
   2022   __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
   2023   __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
   2024 
   2025   // ----------- S t a t e -------------
   2026   //  -- a0 : the number of arguments (not including the receiver)
   2027   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2028   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
   2029   //  -- a3 : the new target (checked to be a constructor)
   2030   //  -- a4 : the number of [[BoundArguments]]
   2031   // -----------------------------------
   2032 
   2033   // Reserve stack space for the [[BoundArguments]].
   2034   {
   2035     Label done;
   2036     __ dsll(a5, a4, kPointerSizeLog2);
   2037     __ Dsubu(sp, sp, Operand(a5));
   2038     // Check the stack for overflow. We are not trying to catch interruptions
   2039     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2040     __ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
   2041     __ Branch(&done, gt, sp, Operand(kScratchReg));  // Signed comparison.
   2042     // Restore the stack pointer.
   2043     __ Daddu(sp, sp, Operand(a5));
   2044     {
   2045       FrameScope scope(masm, StackFrame::MANUAL);
   2046       __ EnterFrame(StackFrame::INTERNAL);
   2047       __ CallRuntime(Runtime::kThrowStackOverflow);
   2048     }
   2049     __ bind(&done);
   2050   }
   2051 
   2052   // Relocate arguments down the stack.
   2053   {
   2054     Label loop, done_loop;
   2055     __ mov(a5, zero_reg);
   2056     __ bind(&loop);
   2057     __ Branch(&done_loop, ge, a5, Operand(a0));
   2058     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
   2059     __ Ld(kScratchReg, MemOperand(a6));
   2060     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
   2061     __ Sd(kScratchReg, MemOperand(a6));
   2062     __ Daddu(a4, a4, Operand(1));
   2063     __ Daddu(a5, a5, Operand(1));
   2064     __ Branch(&loop);
   2065     __ bind(&done_loop);
   2066   }
   2067 
   2068   // Copy [[BoundArguments]] to the stack (below the arguments).
   2069   {
   2070     Label loop, done_loop;
   2071     __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
   2072     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2073     __ bind(&loop);
   2074     __ Dsubu(a4, a4, Operand(1));
   2075     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
   2076     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
   2077     __ Ld(kScratchReg, MemOperand(a5));
   2078     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
   2079     __ Sd(kScratchReg, MemOperand(a5));
   2080     __ Daddu(a0, a0, Operand(1));
   2081     __ Branch(&loop);
   2082     __ bind(&done_loop);
   2083   }
   2084 
   2085   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2086   {
   2087     Label skip_load;
   2088     __ Branch(&skip_load, ne, a1, Operand(a3));
   2089     __ Ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2090     __ bind(&skip_load);
   2091   }
   2092 
   2093   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2094   __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2095   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
   2096 }
   2097 
   2098 // static
   2099 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2100   // ----------- S t a t e -------------
   2101   //  -- a0 : the number of arguments (not including the receiver)
   2102   //  -- a1 : the constructor to call (can be any Object)
   2103   //  -- a3 : the new target (either the same as the constructor or
   2104   //          the JSFunction on which new was invoked initially)
   2105   // -----------------------------------
   2106 
   2107   // Check if target is a Smi.
   2108   Label non_constructor, non_proxy;
   2109   __ JumpIfSmi(a1, &non_constructor);
   2110 
   2111   // Check if target has a [[Construct]] internal method.
   2112   __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
   2113   __ Lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
   2114   __ And(t3, t3, Operand(Map::IsConstructorBit::kMask));
   2115   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
   2116 
   2117   // Dispatch based on instance type.
   2118   __ Lhu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
   2119   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
   2120           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
   2121 
   2122   // Only dispatch to bound functions after checking whether they are
   2123   // constructors.
   2124   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
   2125           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
   2126 
   2127   // Only dispatch to proxies after checking whether they are constructors.
   2128   __ Branch(&non_proxy, ne, t2, Operand(JS_PROXY_TYPE));
   2129   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
   2130           RelocInfo::CODE_TARGET);
   2131 
   2132   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2133   __ bind(&non_proxy);
   2134   {
   2135     // Overwrite the original receiver with the (original) target.
   2136     __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
   2137     __ Sd(a1, MemOperand(kScratchReg));
   2138     // Let the "call_as_constructor_delegate" take care of the rest.
   2139     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
   2140     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2141             RelocInfo::CODE_TARGET);
   2142   }
   2143 
   2144   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2145   // method.
   2146   __ bind(&non_constructor);
   2147   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
   2148           RelocInfo::CODE_TARGET);
   2149 }
   2150 
   2151 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   2152   // State setup as expected by MacroAssembler::InvokePrologue.
   2153   // ----------- S t a t e -------------
   2154   //  -- a0: actual arguments count
   2155   //  -- a1: function (passed through to callee)
   2156   //  -- a2: expected arguments count
   2157   //  -- a3: new target (passed through to callee)
   2158   // -----------------------------------
   2159 
   2160   Label invoke, dont_adapt_arguments, stack_overflow;
   2161 
   2162   Label enough, too_few;
   2163   __ Branch(&dont_adapt_arguments, eq, a2,
   2164             Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   2165   // We use Uless as the number of argument should always be greater than 0.
   2166   __ Branch(&too_few, Uless, a0, Operand(a2));
   2167 
   2168   {  // Enough parameters: actual >= expected.
   2169     // a0: actual number of arguments as a smi
   2170     // a1: function
   2171     // a2: expected number of arguments
   2172     // a3: new target (passed through to callee)
   2173     __ bind(&enough);
   2174     EnterArgumentsAdaptorFrame(masm);
   2175     Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
   2176 
   2177     // Calculate copy start address into a0 and copy end address into a4.
   2178     __ SmiScale(a0, a0, kPointerSizeLog2);
   2179     __ Daddu(a0, fp, a0);
   2180     // Adjust for return address and receiver.
   2181     __ Daddu(a0, a0, Operand(2 * kPointerSize));
   2182     // Compute copy end address.
   2183     __ dsll(a4, a2, kPointerSizeLog2);
   2184     __ dsubu(a4, a0, a4);
   2185 
   2186     // Copy the arguments (including the receiver) to the new stack frame.
   2187     // a0: copy start address
   2188     // a1: function
   2189     // a2: expected number of arguments
   2190     // a3: new target (passed through to callee)
   2191     // a4: copy end address
   2192 
   2193     Label copy;
   2194     __ bind(&copy);
   2195     __ Ld(a5, MemOperand(a0));
   2196     __ push(a5);
   2197     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
   2198     __ daddiu(a0, a0, -kPointerSize);  // In delay slot.
   2199 
   2200     __ jmp(&invoke);
   2201   }
   2202 
   2203   {  // Too few parameters: Actual < expected.
   2204     __ bind(&too_few);
   2205     EnterArgumentsAdaptorFrame(masm);
   2206     Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
   2207 
   2208     // Calculate copy start address into a0 and copy end address into a7.
   2209     // a0: actual number of arguments as a smi
   2210     // a1: function
   2211     // a2: expected number of arguments
   2212     // a3: new target (passed through to callee)
   2213     __ SmiScale(a0, a0, kPointerSizeLog2);
   2214     __ Daddu(a0, fp, a0);
   2215     // Adjust for return address and receiver.
   2216     __ Daddu(a0, a0, Operand(2 * kPointerSize));
   2217     // Compute copy end address. Also adjust for return address.
   2218     __ Daddu(a7, fp, kPointerSize);
   2219 
   2220     // Copy the arguments (including the receiver) to the new stack frame.
   2221     // a0: copy start address
   2222     // a1: function
   2223     // a2: expected number of arguments
   2224     // a3: new target (passed through to callee)
   2225     // a7: copy end address
   2226     Label copy;
   2227     __ bind(&copy);
   2228     __ Ld(a4, MemOperand(a0));  // Adjusted above for return addr and receiver.
   2229     __ Dsubu(sp, sp, kPointerSize);
   2230     __ Dsubu(a0, a0, kPointerSize);
   2231     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
   2232     __ Sd(a4, MemOperand(sp));  // In the delay slot.
   2233 
   2234     // Fill the remaining expected arguments with undefined.
   2235     // a1: function
   2236     // a2: expected number of arguments
   2237     // a3: new target (passed through to callee)
   2238     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
   2239     __ dsll(a6, a2, kPointerSizeLog2);
   2240     __ Dsubu(a4, fp, Operand(a6));
   2241     // Adjust for frame.
   2242     __ Dsubu(a4, a4,
   2243              Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
   2244                      kPointerSize));
   2245 
   2246     Label fill;
   2247     __ bind(&fill);
   2248     __ Dsubu(sp, sp, kPointerSize);
   2249     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
   2250     __ Sd(a5, MemOperand(sp));
   2251   }
   2252 
   2253   // Call the entry point.
   2254   __ bind(&invoke);
   2255   __ mov(a0, a2);
   2256   // a0 : expected number of arguments
   2257   // a1 : function (passed through to callee)
   2258   // a3: new target (passed through to callee)
   2259   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
   2260   __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
   2261   __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
   2262   __ Call(a2);
   2263 
   2264   // Store offset of return address for deoptimizer.
   2265   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   2266 
   2267   // Exit frame and return.
   2268   LeaveArgumentsAdaptorFrame(masm);
   2269   __ Ret();
   2270 
   2271   // -------------------------------------------
   2272   // Don't adapt arguments.
   2273   // -------------------------------------------
   2274   __ bind(&dont_adapt_arguments);
   2275   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
   2276   __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
   2277   __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
   2278   __ Jump(a2);
   2279 
   2280   __ bind(&stack_overflow);
   2281   {
   2282     FrameScope frame(masm, StackFrame::MANUAL);
   2283     __ CallRuntime(Runtime::kThrowStackOverflow);
   2284     __ break_(0xCC);
   2285   }
   2286 }
   2287 
   2288 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
   2289   // The function index was put in t0 by the jump table trampoline.
   2290   // Convert to Smi for the runtime call
   2291   __ SmiTag(t0);
   2292   {
   2293     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
   2294     FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
   2295 
   2296     // Save all parameter registers (see wasm-linkage.cc). They might be
   2297     // overwritten in the runtime call below. We don't have any callee-saved
   2298     // registers in wasm, so no need to store anything else.
   2299     constexpr RegList gp_regs =
   2300         Register::ListOf<a0, a1, a2, a3, a4, a5, a6, a7>();
   2301     constexpr RegList fp_regs =
   2302         DoubleRegister::ListOf<f2, f4, f6, f8, f10, f12, f14>();
   2303     __ MultiPush(gp_regs);
   2304     __ MultiPushFPU(fp_regs);
   2305 
   2306     // Pass instance and function index as an explicit arguments to the runtime
   2307     // function.
   2308     __ Push(kWasmInstanceRegister, t0);
   2309     // Load the correct CEntry builtin from the instance object.
   2310     __ Ld(a2, FieldMemOperand(kWasmInstanceRegister,
   2311                               WasmInstanceObject::kCEntryStubOffset));
   2312     // Initialize the JavaScript context with 0. CEntry will use it to
   2313     // set the current context on the isolate.
   2314     __ Move(kContextRegister, Smi::kZero);
   2315     __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, a2);
   2316 
   2317     // Restore registers.
   2318     __ MultiPopFPU(fp_regs);
   2319     __ MultiPop(gp_regs);
   2320   }
   2321   // Finally, jump to the entrypoint.
   2322   __ Jump(v0);
   2323 }
   2324 
   2325 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
   2326                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
   2327                                bool builtin_exit_frame) {
   2328   // Called from JavaScript; parameters are on stack as if calling JS function
   2329   // a0: number of arguments including receiver
   2330   // a1: pointer to builtin function
   2331   // fp: frame pointer    (restored after C call)
   2332   // sp: stack pointer    (restored as callee's sp after C call)
   2333   // cp: current context  (C callee-saved)
   2334   //
   2335   // If argv_mode == kArgvInRegister:
   2336   // a2: pointer to the first argument
   2337 
   2338   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   2339 
   2340   if (argv_mode == kArgvInRegister) {
   2341     // Move argv into the correct register.
   2342     __ mov(s1, a2);
   2343   } else {
   2344     // Compute the argv pointer in a callee-saved register.
   2345     __ Dlsa(s1, sp, a0, kPointerSizeLog2);
   2346     __ Dsubu(s1, s1, kPointerSize);
   2347   }
   2348 
   2349   // Enter the exit frame that transitions from JavaScript to C++.
   2350   FrameScope scope(masm, StackFrame::MANUAL);
   2351   __ EnterExitFrame(
   2352       save_doubles == kSaveFPRegs, 0,
   2353       builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
   2354 
   2355   // s0: number of arguments  including receiver (C callee-saved)
   2356   // s1: pointer to first argument (C callee-saved)
   2357   // s2: pointer to builtin function (C callee-saved)
   2358 
   2359   // Prepare arguments for C routine.
   2360   // a0 = argc
   2361   __ mov(s0, a0);
   2362   __ mov(s2, a1);
   2363 
   2364   // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
   2365   // also need to reserve the 4 argument slots on the stack.
   2366 
   2367   __ AssertStackIsAligned();
   2368 
   2369   // a0 = argc, a1 = argv, a2 = isolate
   2370   __ li(a2, ExternalReference::isolate_address(masm->isolate()));
   2371   __ mov(a1, s1);
   2372 
   2373   // To let the GC traverse the return address of the exit frames, we need to
   2374   // know where the return address is. The CEntry is unmovable, so
   2375   // we can store the address on the stack to be able to find it again and
   2376   // we never have to restore it, because it will not change.
   2377   {
   2378     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
   2379     int kNumInstructionsToJump = 4;
   2380     Label find_ra;
   2381     // Adjust the value in ra to point to the correct return location, 2nd
   2382     // instruction past the real call into C code (the jalr(t9)), and push it.
   2383     // This is the return address of the exit frame.
   2384     if (kArchVariant >= kMips64r6) {
   2385       __ addiupc(ra, kNumInstructionsToJump + 1);
   2386     } else {
   2387       // This no-op-and-link sequence saves PC + 8 in ra register on pre-r6 MIPS
   2388       __ nal();  // nal has branch delay slot.
   2389       __ Daddu(ra, ra, kNumInstructionsToJump * kInstrSize);
   2390     }
   2391     __ bind(&find_ra);
   2392 
   2393     // This spot was reserved in EnterExitFrame.
   2394     __ Sd(ra, MemOperand(sp));
   2395     // Stack space reservation moved to the branch delay slot below.
   2396     // Stack is still aligned.
   2397 
   2398     // Call the C routine.
   2399     __ mov(t9, s2);  // Function pointer to t9 to conform to ABI for PIC.
   2400     __ jalr(t9);
   2401     // Set up sp in the delay slot.
   2402     __ daddiu(sp, sp, -kCArgsSlotsSize);
   2403     // Make sure the stored 'ra' points to this position.
   2404     DCHECK_EQ(kNumInstructionsToJump,
   2405               masm->InstructionsGeneratedSince(&find_ra));
   2406   }
   2407 
   2408   // Result returned in v0 or v1:v0 - do not destroy these registers!
   2409 
   2410   // Check result for exception sentinel.
   2411   Label exception_returned;
   2412   __ LoadRoot(a4, Heap::kExceptionRootIndex);
   2413   __ Branch(&exception_returned, eq, a4, Operand(v0));
   2414 
   2415   // Check that there is no pending exception, otherwise we
   2416   // should have returned the exception sentinel.
   2417   if (FLAG_debug_code) {
   2418     Label okay;
   2419     ExternalReference pending_exception_address = ExternalReference::Create(
   2420         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
   2421     __ li(a2, pending_exception_address);
   2422     __ Ld(a2, MemOperand(a2));
   2423     __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
   2424     // Cannot use check here as it attempts to generate call into runtime.
   2425     __ Branch(&okay, eq, a4, Operand(a2));
   2426     __ stop("Unexpected pending exception");
   2427     __ bind(&okay);
   2428   }
   2429 
   2430   // Exit C frame and return.
   2431   // v0:v1: result
   2432   // sp: stack pointer
   2433   // fp: frame pointer
   2434   Register argc = argv_mode == kArgvInRegister
   2435                       // We don't want to pop arguments so set argc to no_reg.
   2436                       ? no_reg
   2437                       // s0: still holds argc (callee-saved).
   2438                       : s0;
   2439   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc, EMIT_RETURN);
   2440 
   2441   // Handling of exception.
   2442   __ bind(&exception_returned);
   2443 
   2444   ExternalReference pending_handler_context_address = ExternalReference::Create(
   2445       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
   2446   ExternalReference pending_handler_entrypoint_address =
   2447       ExternalReference::Create(
   2448           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
   2449   ExternalReference pending_handler_fp_address = ExternalReference::Create(
   2450       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
   2451   ExternalReference pending_handler_sp_address = ExternalReference::Create(
   2452       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
   2453 
   2454   // Ask the runtime for help to determine the handler. This will set v0 to
   2455   // contain the current pending exception, don't clobber it.
   2456   ExternalReference find_handler =
   2457       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
   2458   {
   2459     FrameScope scope(masm, StackFrame::MANUAL);
   2460     __ PrepareCallCFunction(3, 0, a0);
   2461     __ mov(a0, zero_reg);
   2462     __ mov(a1, zero_reg);
   2463     __ li(a2, ExternalReference::isolate_address(masm->isolate()));
   2464     __ CallCFunction(find_handler, 3);
   2465   }
   2466 
   2467   // Retrieve the handler context, SP and FP.
   2468   __ li(cp, pending_handler_context_address);
   2469   __ Ld(cp, MemOperand(cp));
   2470   __ li(sp, pending_handler_sp_address);
   2471   __ Ld(sp, MemOperand(sp));
   2472   __ li(fp, pending_handler_fp_address);
   2473   __ Ld(fp, MemOperand(fp));
   2474 
   2475   // If the handler is a JS frame, restore the context to the frame. Note that
   2476   // the context will be set to (cp == 0) for non-JS frames.
   2477   Label zero;
   2478   __ Branch(&zero, eq, cp, Operand(zero_reg));
   2479   __ Sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2480   __ bind(&zero);
   2481 
   2482   // Reset the masking register. This is done independent of the underlying
   2483   // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with
   2484   // both configurations. It is safe to always do this, because the underlying
   2485   // register is caller-saved and can be arbitrarily clobbered.
   2486   __ ResetSpeculationPoisonRegister();
   2487 
   2488   // Compute the handler entry address and jump to it.
   2489   __ li(t9, pending_handler_entrypoint_address);
   2490   __ Ld(t9, MemOperand(t9));
   2491   __ Jump(t9);
   2492 }
   2493 
   2494 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
   2495   Label out_of_range, only_low, negate, done;
   2496   Register result_reg = t0;
   2497 
   2498   Register scratch = GetRegisterThatIsNotOneOf(result_reg);
   2499   Register scratch2 = GetRegisterThatIsNotOneOf(result_reg, scratch);
   2500   Register scratch3 = GetRegisterThatIsNotOneOf(result_reg, scratch, scratch2);
   2501   DoubleRegister double_scratch = kScratchDoubleReg;
   2502 
   2503   // Account for saved regs.
   2504   const int kArgumentOffset = 4 * kPointerSize;
   2505 
   2506   __ Push(result_reg);
   2507   __ Push(scratch, scratch2, scratch3);
   2508 
   2509   // Load double input.
   2510   __ Ldc1(double_scratch, MemOperand(sp, kArgumentOffset));
   2511 
   2512   // Clear cumulative exception flags and save the FCSR.
   2513   __ cfc1(scratch2, FCSR);
   2514   __ ctc1(zero_reg, FCSR);
   2515 
   2516   // Try a conversion to a signed integer.
   2517   __ Trunc_w_d(double_scratch, double_scratch);
   2518   // Move the converted value into the result register.
   2519   __ mfc1(scratch3, double_scratch);
   2520 
   2521   // Retrieve and restore the FCSR.
   2522   __ cfc1(scratch, FCSR);
   2523   __ ctc1(scratch2, FCSR);
   2524 
   2525   // Check for overflow and NaNs.
   2526   __ And(
   2527       scratch, scratch,
   2528       kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
   2529   // If we had no exceptions then set result_reg and we are done.
   2530   Label error;
   2531   __ Branch(&error, ne, scratch, Operand(zero_reg));
   2532   __ Move(result_reg, scratch3);
   2533   __ Branch(&done);
   2534   __ bind(&error);
   2535 
   2536   // Load the double value and perform a manual truncation.
   2537   Register input_high = scratch2;
   2538   Register input_low = scratch3;
   2539 
   2540   __ Lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset));
   2541   __ Lw(input_high,
   2542         MemOperand(sp, kArgumentOffset + Register::kExponentOffset));
   2543 
   2544   Label normal_exponent, restore_sign;
   2545   // Extract the biased exponent in result.
   2546   __ Ext(result_reg, input_high, HeapNumber::kExponentShift,
   2547          HeapNumber::kExponentBits);
   2548 
   2549   // Check for Infinity and NaNs, which should return 0.
   2550   __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
   2551   __ Movz(result_reg, zero_reg, scratch);
   2552   __ Branch(&done, eq, scratch, Operand(zero_reg));
   2553 
   2554   // Express exponent as delta to (number of mantissa bits + 31).
   2555   __ Subu(result_reg, result_reg,
   2556           Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
   2557 
   2558   // If the delta is strictly positive, all bits would be shifted away,
   2559   // which means that we can return 0.
   2560   __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
   2561   __ mov(result_reg, zero_reg);
   2562   __ Branch(&done);
   2563 
   2564   __ bind(&normal_exponent);
   2565   const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
   2566   // Calculate shift.
   2567   __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
   2568 
   2569   // Save the sign.
   2570   Register sign = result_reg;
   2571   result_reg = no_reg;
   2572   __ And(sign, input_high, Operand(HeapNumber::kSignMask));
   2573 
   2574   // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
   2575   // to check for this specific case.
   2576   Label high_shift_needed, high_shift_done;
   2577   __ Branch(&high_shift_needed, lt, scratch, Operand(32));
   2578   __ mov(input_high, zero_reg);
   2579   __ Branch(&high_shift_done);
   2580   __ bind(&high_shift_needed);
   2581 
   2582   // Set the implicit 1 before the mantissa part in input_high.
   2583   __ Or(input_high, input_high,
   2584         Operand(1 << HeapNumber::kMantissaBitsInTopWord));
   2585   // Shift the mantissa bits to the correct position.
   2586   // We don't need to clear non-mantissa bits as they will be shifted away.
   2587   // If they weren't, it would mean that the answer is in the 32bit range.
   2588   __ sllv(input_high, input_high, scratch);
   2589 
   2590   __ bind(&high_shift_done);
   2591 
   2592   // Replace the shifted bits with bits from the lower mantissa word.
   2593   Label pos_shift, shift_done;
   2594   __ li(kScratchReg, 32);
   2595   __ subu(scratch, kScratchReg, scratch);
   2596   __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
   2597 
   2598   // Negate scratch.
   2599   __ Subu(scratch, zero_reg, scratch);
   2600   __ sllv(input_low, input_low, scratch);
   2601   __ Branch(&shift_done);
   2602 
   2603   __ bind(&pos_shift);
   2604   __ srlv(input_low, input_low, scratch);
   2605 
   2606   __ bind(&shift_done);
   2607   __ Or(input_high, input_high, Operand(input_low));
   2608   // Restore sign if necessary.
   2609   __ mov(scratch, sign);
   2610   result_reg = sign;
   2611   sign = no_reg;
   2612   __ Subu(result_reg, zero_reg, input_high);
   2613   __ Movz(result_reg, input_high, scratch);
   2614 
   2615   __ bind(&done);
   2616 
   2617   __ Sd(result_reg, MemOperand(sp, kArgumentOffset));
   2618   __ Pop(scratch, scratch2, scratch3);
   2619   __ Pop(result_reg);
   2620   __ Ret();
   2621 }
   2622 
   2623 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
   2624   const Register exponent = a2;
   2625   const DoubleRegister double_base = f2;
   2626   const DoubleRegister double_exponent = f4;
   2627   const DoubleRegister double_result = f0;
   2628   const DoubleRegister double_scratch = f6;
   2629   const FPURegister single_scratch = f8;
   2630   const Register scratch = t1;
   2631   const Register scratch2 = a7;
   2632 
   2633   Label call_runtime, done, int_exponent;
   2634 
   2635   Label int_exponent_convert;
   2636   // Detect integer exponents stored as double.
   2637   __ EmitFPUTruncate(kRoundToMinusInf, scratch, double_exponent, kScratchReg,
   2638                      double_scratch, scratch2, kCheckForInexactConversion);
   2639   // scratch2 == 0 means there was no conversion error.
   2640   __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
   2641 
   2642   __ push(ra);
   2643   {
   2644     AllowExternalCallThatCantCauseGC scope(masm);
   2645     __ PrepareCallCFunction(0, 2, scratch2);
   2646     __ MovToFloatParameters(double_base, double_exponent);
   2647     __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
   2648   }
   2649   __ pop(ra);
   2650   __ MovFromFloatResult(double_result);
   2651   __ jmp(&done);
   2652 
   2653   __ bind(&int_exponent_convert);
   2654 
   2655   // Calculate power with integer exponent.
   2656   __ bind(&int_exponent);
   2657 
   2658   // Get two copies of exponent in the registers scratch and exponent.
   2659   // Exponent has previously been stored into scratch as untagged integer.
   2660   __ mov(exponent, scratch);
   2661 
   2662   __ mov_d(double_scratch, double_base);  // Back up base.
   2663   __ Move(double_result, 1.0);
   2664 
   2665   // Get absolute value of exponent.
   2666   Label positive_exponent, bail_out;
   2667   __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
   2668   __ Dsubu(scratch, zero_reg, scratch);
   2669   // Check when Dsubu overflows and we get negative result
   2670   // (happens only when input is MIN_INT).
   2671   __ Branch(&bail_out, gt, zero_reg, Operand(scratch));
   2672   __ bind(&positive_exponent);
   2673   __ Assert(ge, AbortReason::kUnexpectedNegativeValue, scratch,
   2674             Operand(zero_reg));
   2675 
   2676   Label while_true, no_carry, loop_end;
   2677   __ bind(&while_true);
   2678 
   2679   __ And(scratch2, scratch, 1);
   2680 
   2681   __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
   2682   __ mul_d(double_result, double_result, double_scratch);
   2683   __ bind(&no_carry);
   2684 
   2685   __ dsra(scratch, scratch, 1);
   2686 
   2687   __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
   2688   __ mul_d(double_scratch, double_scratch, double_scratch);
   2689 
   2690   __ Branch(&while_true);
   2691 
   2692   __ bind(&loop_end);
   2693 
   2694   __ Branch(&done, ge, exponent, Operand(zero_reg));
   2695   __ Move(double_scratch, 1.0);
   2696   __ div_d(double_result, double_scratch, double_result);
   2697   // Test whether result is zero.  Bail out to check for subnormal result.
   2698   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
   2699   __ CompareF64(EQ, double_result, kDoubleRegZero);
   2700   __ BranchFalseShortF(&done);
   2701 
   2702   // double_exponent may not contain the exponent value if the input was a
   2703   // smi.  We set it with exponent value before bailing out.
   2704   __ bind(&bail_out);
   2705   __ mtc1(exponent, single_scratch);
   2706   __ cvt_d_w(double_exponent, single_scratch);
   2707 
   2708   // Returning or bailing out.
   2709   __ push(ra);
   2710   {
   2711     AllowExternalCallThatCantCauseGC scope(masm);
   2712     __ PrepareCallCFunction(0, 2, scratch);
   2713     __ MovToFloatParameters(double_base, double_exponent);
   2714     __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
   2715   }
   2716   __ pop(ra);
   2717   __ MovFromFloatResult(double_result);
   2718 
   2719   __ bind(&done);
   2720   __ Ret();
   2721 }
   2722 
   2723 namespace {
   2724 
   2725 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
   2726                                           ElementsKind kind) {
   2727   __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
   2728               .code(),
   2729           RelocInfo::CODE_TARGET, lo, a0, Operand(1));
   2730 
   2731   __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
   2732           RelocInfo::CODE_TARGET, hi, a0, Operand(1));
   2733 
   2734   if (IsFastPackedElementsKind(kind)) {
   2735     // We might need to create a holey array
   2736     // look at the first argument.
   2737     __ Ld(kScratchReg, MemOperand(sp, 0));
   2738 
   2739     __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
   2740                 masm->isolate(), GetHoleyElementsKind(kind))
   2741                 .code(),
   2742             RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg));
   2743   }
   2744 
   2745   __ Jump(
   2746       CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
   2747           .code(),
   2748       RelocInfo::CODE_TARGET);
   2749 }
   2750 
   2751 }  // namespace
   2752 
   2753 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
   2754   // ----------- S t a t e -------------
   2755   //  -- a0 : argc
   2756   //  -- a1 : constructor
   2757   //  -- sp[0] : return address
   2758   //  -- sp[4] : last argument
   2759   // -----------------------------------
   2760 
   2761   if (FLAG_debug_code) {
   2762     // The array construct code is only set for the global and natives
   2763     // builtin Array functions which always have maps.
   2764 
   2765     // Initial map for the builtin Array function should be a map.
   2766     __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
   2767     // Will both indicate a nullptr and a Smi.
   2768     __ SmiTst(a3, kScratchReg);
   2769     __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
   2770               kScratchReg, Operand(zero_reg));
   2771     __ GetObjectType(a3, a3, a4);
   2772     __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a4,
   2773               Operand(MAP_TYPE));
   2774   }
   2775 
   2776   // Figure out the right elements kind.
   2777   __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
   2778 
   2779   // Load the map's "bit field 2" into a3. We only need the first byte,
   2780   // but the following bit field extraction takes care of that anyway.
   2781   __ Lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
   2782   // Retrieve elements_kind from bit field 2.
   2783   __ DecodeField<Map::ElementsKindBits>(a3);
   2784 
   2785   if (FLAG_debug_code) {
   2786     Label done;
   2787     __ Branch(&done, eq, a3, Operand(PACKED_ELEMENTS));
   2788     __ Assert(
   2789         eq,
   2790         AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray,
   2791         a3, Operand(HOLEY_ELEMENTS));
   2792     __ bind(&done);
   2793   }
   2794 
   2795   Label fast_elements_case;
   2796   __ Branch(&fast_elements_case, eq, a3, Operand(PACKED_ELEMENTS));
   2797   GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
   2798 
   2799   __ bind(&fast_elements_case);
   2800   GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
   2801 }
   2802 
   2803 #undef __
   2804 
   2805 }  // namespace internal
   2806 }  // namespace v8
   2807 
   2808 #endif  // V8_TARGET_ARCH_MIPS64
   2809