Home | History | Annotate | Download | only in mips
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_MIPS
      6 
      7 #include "src/codegen.h"
      8 #include "src/debug/debug.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 #include "src/runtime/runtime.h"
     12 
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 
     18 #define __ ACCESS_MASM(masm)
     19 
     20 
     21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
     22                                 CFunctionId id,
     23                                 BuiltinExtraArguments extra_args) {
     24   // ----------- S t a t e -------------
     25   //  -- a0                 : number of arguments excluding receiver
     26   //  -- a1                 : target
     27   //  -- a3                 : new.target
     28   //  -- sp[0]              : last argument
     29   //  -- ...
     30   //  -- sp[4 * (argc - 1)] : first argument
     31   //  -- sp[4 * agrc]       : receiver
     32   // -----------------------------------
     33   __ AssertFunction(a1);
     34 
     35   // Make sure we operate in the context of the called function (for example
     36   // ConstructStubs implemented in C++ will be run in the context of the caller
     37   // instead of the callee, due to the way that [[Construct]] is defined for
     38   // ordinary functions).
     39   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
     40 
     41   // Insert extra arguments.
     42   int num_extra_args = 0;
     43   switch (extra_args) {
     44     case BuiltinExtraArguments::kTarget:
     45       __ Push(a1);
     46       ++num_extra_args;
     47       break;
     48     case BuiltinExtraArguments::kNewTarget:
     49       __ Push(a3);
     50       ++num_extra_args;
     51       break;
     52     case BuiltinExtraArguments::kTargetAndNewTarget:
     53       __ Push(a1, a3);
     54       num_extra_args += 2;
     55       break;
     56     case BuiltinExtraArguments::kNone:
     57       break;
     58   }
     59 
     60   // JumpToExternalReference expects a0 to contain the number of arguments
     61   // including the receiver and the extra arguments.
     62   __ Addu(a0, a0, num_extra_args + 1);
     63 
     64   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     65 }
     66 
     67 
     68 // Load the built-in InternalArray function from the current context.
     69 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     70                                               Register result) {
     71   // Load the InternalArray function from the native context.
     72   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
     73 }
     74 
     75 
     76 // Load the built-in Array function from the current context.
     77 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     78   // Load the Array function from the native context.
     79   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
     80 }
     81 
     82 
     83 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     84   // ----------- S t a t e -------------
     85   //  -- a0     : number of arguments
     86   //  -- ra     : return address
     87   //  -- sp[...]: constructor arguments
     88   // -----------------------------------
     89   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
     90 
     91   // Get the InternalArray function.
     92   GenerateLoadInternalArrayFunction(masm, a1);
     93 
     94   if (FLAG_debug_code) {
     95     // Initial map for the builtin InternalArray functions should be maps.
     96     __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
     97     __ SmiTst(a2, t0);
     98     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
     99               t0, Operand(zero_reg));
    100     __ GetObjectType(a2, a3, t0);
    101     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
    102               t0, Operand(MAP_TYPE));
    103   }
    104 
    105   // Run the native code for the InternalArray function called as a normal
    106   // function.
    107   // Tail call a stub.
    108   InternalArrayConstructorStub stub(masm->isolate());
    109   __ TailCallStub(&stub);
    110 }
    111 
    112 
    113 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
    114   // ----------- S t a t e -------------
    115   //  -- a0     : number of arguments
    116   //  -- ra     : return address
    117   //  -- sp[...]: constructor arguments
    118   // -----------------------------------
    119   Label generic_array_code;
    120 
    121   // Get the Array function.
    122   GenerateLoadArrayFunction(masm, a1);
    123 
    124   if (FLAG_debug_code) {
    125     // Initial map for the builtin Array functions should be maps.
    126     __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
    127     __ SmiTst(a2, t0);
    128     __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
    129               t0, Operand(zero_reg));
    130     __ GetObjectType(a2, a3, t0);
    131     __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
    132               t0, Operand(MAP_TYPE));
    133   }
    134 
    135   // Run the native code for the Array function called as a normal function.
    136   // Tail call a stub.
    137   __ mov(a3, a1);
    138   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
    139   ArrayConstructorStub stub(masm->isolate());
    140   __ TailCallStub(&stub);
    141 }
    142 
    143 
    144 // static
    145 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
    146   // ----------- S t a t e -------------
    147   //  -- a0                     : number of arguments
    148   //  -- a1                     : constructor function
    149   //  -- ra                     : return address
    150   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    151   //  -- sp[argc * 4]           : receiver
    152   // -----------------------------------
    153 
    154   // 1. Load the first argument into a0 and get rid of the rest (including the
    155   // receiver).
    156   Label no_arguments;
    157   {
    158     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    159     __ Subu(a0, a0, Operand(1));
    160     __ sll(a0, a0, kPointerSizeLog2);
    161     __ Addu(sp, a0, sp);
    162     __ lw(a0, MemOperand(sp));
    163     __ Drop(2);
    164   }
    165 
    166   // 2a. Convert first argument to number.
    167   ToNumberStub stub(masm->isolate());
    168   __ TailCallStub(&stub);
    169 
    170   // 2b. No arguments, return +0.
    171   __ bind(&no_arguments);
    172   __ Move(v0, Smi::FromInt(0));
    173   __ DropAndRet(1);
    174 }
    175 
    176 
    177 // static
    178 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
    179   // ----------- S t a t e -------------
    180   //  -- a0                     : number of arguments
    181   //  -- a1                     : constructor function
    182   //  -- a3                     : new target
    183   //  -- ra                     : return address
    184   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    185   //  -- sp[argc * 4]           : receiver
    186   // -----------------------------------
    187 
    188   // 1. Make sure we operate in the context of the called function.
    189   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    190 
    191   // 2. Load the first argument into a0 and get rid of the rest (including the
    192   // receiver).
    193   {
    194     Label no_arguments, done;
    195     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    196     __ Subu(a0, a0, Operand(1));
    197     __ sll(a0, a0, kPointerSizeLog2);
    198     __ Addu(sp, a0, sp);
    199     __ lw(a0, MemOperand(sp));
    200     __ Drop(2);
    201     __ jmp(&done);
    202     __ bind(&no_arguments);
    203     __ Move(a0, Smi::FromInt(0));
    204     __ Drop(1);
    205     __ bind(&done);
    206   }
    207 
    208   // 3. Make sure a0 is a number.
    209   {
    210     Label done_convert;
    211     __ JumpIfSmi(a0, &done_convert);
    212     __ GetObjectType(a0, a2, a2);
    213     __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
    214     {
    215       FrameScope scope(masm, StackFrame::INTERNAL);
    216       __ Push(a1, a3);
    217       ToNumberStub stub(masm->isolate());
    218       __ CallStub(&stub);
    219       __ Move(a0, v0);
    220       __ Pop(a1, a3);
    221     }
    222     __ bind(&done_convert);
    223   }
    224 
    225   // 4. Check if new target and constructor differ.
    226   Label new_object;
    227   __ Branch(&new_object, ne, a1, Operand(a3));
    228 
    229   // 5. Allocate a JSValue wrapper for the number.
    230   __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
    231   __ Ret();
    232 
    233   // 6. Fallback to the runtime to create new object.
    234   __ bind(&new_object);
    235   {
    236     FrameScope scope(masm, StackFrame::INTERNAL);
    237     __ Push(a0, a1, a3);  // first argument, constructor, new target
    238     __ CallRuntime(Runtime::kNewObject);
    239     __ Pop(a0);
    240   }
    241   __ Ret(USE_DELAY_SLOT);
    242   __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));  // In delay slot
    243 }
    244 
    245 
    246 // static
    247 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
    248   // ----------- S t a t e -------------
    249   //  -- a0                     : number of arguments
    250   //  -- a1                     : constructor function
    251   //  -- ra                     : return address
    252   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    253   //  -- sp[argc * 4]           : receiver
    254   // -----------------------------------
    255 
    256   // 1. Load the first argument into a0 and get rid of the rest (including the
    257   // receiver).
    258   Label no_arguments;
    259   {
    260     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    261     __ Subu(a0, a0, Operand(1));
    262     __ sll(a0, a0, kPointerSizeLog2);
    263     __ Addu(sp, a0, sp);
    264     __ lw(a0, MemOperand(sp));
    265     __ Drop(2);
    266   }
    267 
    268   // 2a. At least one argument, return a0 if it's a string, otherwise
    269   // dispatch to appropriate conversion.
    270   Label to_string, symbol_descriptive_string;
    271   {
    272     __ JumpIfSmi(a0, &to_string);
    273     __ GetObjectType(a0, a1, a1);
    274     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
    275     __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
    276     __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
    277     __ Branch(&to_string, gt, a1, Operand(zero_reg));
    278     __ Ret(USE_DELAY_SLOT);
    279     __ mov(v0, a0);
    280   }
    281 
    282   // 2b. No arguments, return the empty string (and pop the receiver).
    283   __ bind(&no_arguments);
    284   {
    285     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
    286     __ DropAndRet(1);
    287   }
    288 
    289   // 3a. Convert a0 to a string.
    290   __ bind(&to_string);
    291   {
    292     ToStringStub stub(masm->isolate());
    293     __ TailCallStub(&stub);
    294   }
    295 
    296   // 3b. Convert symbol in a0 to a string.
    297   __ bind(&symbol_descriptive_string);
    298   {
    299     __ Push(a0);
    300     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
    301   }
    302 }
    303 
    304 
    305 // static
    306 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
    307   // ----------- S t a t e -------------
    308   //  -- a0                     : number of arguments
    309   //  -- a1                     : constructor function
    310   //  -- a3                     : new target
    311   //  -- ra                     : return address
    312   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    313   //  -- sp[argc * 4]           : receiver
    314   // -----------------------------------
    315 
    316   // 1. Make sure we operate in the context of the called function.
    317   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    318 
    319   // 2. Load the first argument into a0 and get rid of the rest (including the
    320   // receiver).
    321   {
    322     Label no_arguments, done;
    323     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
    324     __ Subu(a0, a0, Operand(1));
    325     __ sll(a0, a0, kPointerSizeLog2);
    326     __ Addu(sp, a0, sp);
    327     __ lw(a0, MemOperand(sp));
    328     __ Drop(2);
    329     __ jmp(&done);
    330     __ bind(&no_arguments);
    331     __ LoadRoot(a0, Heap::kempty_stringRootIndex);
    332     __ Drop(1);
    333     __ bind(&done);
    334   }
    335 
    336   // 3. Make sure a0 is a string.
    337   {
    338     Label convert, done_convert;
    339     __ JumpIfSmi(a0, &convert);
    340     __ GetObjectType(a0, a2, a2);
    341     __ And(t0, a2, Operand(kIsNotStringMask));
    342     __ Branch(&done_convert, eq, t0, Operand(zero_reg));
    343     __ bind(&convert);
    344     {
    345       FrameScope scope(masm, StackFrame::INTERNAL);
    346       ToStringStub stub(masm->isolate());
    347       __ Push(a1, a3);
    348       __ CallStub(&stub);
    349       __ Move(a0, v0);
    350       __ Pop(a1, a3);
    351     }
    352     __ bind(&done_convert);
    353   }
    354 
    355   // 4. Check if new target and constructor differ.
    356   Label new_object;
    357   __ Branch(&new_object, ne, a1, Operand(a3));
    358 
    359   // 5. Allocate a JSValue wrapper for the string.
    360   __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
    361   __ Ret();
    362 
    363   // 6. Fallback to the runtime to create new object.
    364   __ bind(&new_object);
    365   {
    366     FrameScope scope(masm, StackFrame::INTERNAL);
    367     __ Push(a0, a1, a3);  // first argument, constructor, new target
    368     __ CallRuntime(Runtime::kNewObject);
    369     __ Pop(a0);
    370   }
    371   __ Ret(USE_DELAY_SLOT);
    372   __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));  // In delay slot
    373 }
    374 
    375 
    376 static void CallRuntimePassFunction(
    377     MacroAssembler* masm, Runtime::FunctionId function_id) {
    378   // ----------- S t a t e -------------
    379   //  -- a1 : target function (preserved for callee)
    380   //  -- a3 : new target (preserved for callee)
    381   // -----------------------------------
    382 
    383   FrameScope scope(masm, StackFrame::INTERNAL);
    384   // Push a copy of the target function and the new target.
    385   // Push function as parameter to the runtime call.
    386   __ Push(a1, a3, a1);
    387 
    388   __ CallRuntime(function_id, 1);
    389   // Restore target function and new target.
    390   __ Pop(a1, a3);
    391 }
    392 
    393 
    394 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    395   __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
    396   __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
    397   __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
    398   __ Jump(at);
    399 }
    400 
    401 
    402 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
    403   __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
    404   __ Jump(at);
    405 }
    406 
    407 
    408 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    409   // Checking whether the queued function is ready for install is optional,
    410   // since we come across interrupts and stack checks elsewhere.  However,
    411   // not checking may delay installing ready functions, and always checking
    412   // would be quite expensive.  A good compromise is to first check against
    413   // stack limit as a cue for an interrupt signal.
    414   Label ok;
    415   __ LoadRoot(t0, Heap::kStackLimitRootIndex);
    416   __ Branch(&ok, hs, sp, Operand(t0));
    417 
    418   CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
    419   GenerateTailCallToReturnedCode(masm);
    420 
    421   __ bind(&ok);
    422   GenerateTailCallToSharedCode(masm);
    423 }
    424 
    425 
    426 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    427                                            bool is_api_function,
    428                                            bool create_implicit_receiver) {
    429   // ----------- S t a t e -------------
    430   //  -- a0     : number of arguments
    431   //  -- a1     : constructor function
    432   //  -- a2     : allocation site or undefined
    433   //  -- a3     : new target
    434   //  -- ra     : return address
    435   //  -- sp[...]: constructor arguments
    436   // -----------------------------------
    437 
    438   Isolate* isolate = masm->isolate();
    439 
    440   // Enter a construct frame.
    441   {
    442     FrameScope scope(masm, StackFrame::CONSTRUCT);
    443 
    444     // Preserve the incoming parameters on the stack.
    445     __ AssertUndefinedOrAllocationSite(a2, t0);
    446     __ SmiTag(a0);
    447     __ Push(a2, a0);
    448 
    449     if (create_implicit_receiver) {
    450       // Try to allocate the object without transitioning into C code. If any of
    451       // the preconditions is not met, the code bails out to the runtime call.
    452       Label rt_call, allocated;
    453       if (FLAG_inline_new) {
    454         // Verify that the new target is a JSFunction.
    455         __ GetObjectType(a3, t1, t0);
    456         __ Branch(&rt_call, ne, t0, Operand(JS_FUNCTION_TYPE));
    457 
    458         // Load the initial map and verify that it is in fact a map.
    459         // a3: new target
    460         __ lw(a2,
    461               FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
    462         __ JumpIfSmi(a2, &rt_call);
    463         __ GetObjectType(a2, t5, t4);
    464         __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
    465 
    466         // Fall back to runtime if the expected base constructor and base
    467         // constructor differ.
    468         __ lw(t1, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
    469         __ Branch(&rt_call, ne, a1, Operand(t1));
    470 
    471         // Check that the constructor is not constructing a JSFunction (see
    472         // comments in Runtime_NewObject in runtime.cc). In which case the
    473         // initial map's instance type would be JS_FUNCTION_TYPE.
    474         // a1: constructor function
    475         // a2: initial map
    476         __ lbu(t5, FieldMemOperand(a2, Map::kInstanceTypeOffset));
    477         __ Branch(&rt_call, eq, t5, Operand(JS_FUNCTION_TYPE));
    478 
    479         // Now allocate the JSObject on the heap.
    480         // a1: constructor function
    481         // a2: initial map
    482         // a3: new target
    483         __ lbu(t3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
    484 
    485         __ Allocate(t3, t4, t3, t6, &rt_call, SIZE_IN_WORDS);
    486 
    487         // Allocated the JSObject, now initialize the fields. Map is set to
    488         // initial map and properties and elements are set to empty fixed array.
    489         // a1: constructor function
    490         // a2: initial map
    491         // a3: new target
    492         // t4: JSObject (not HeapObject tagged - the actual address).
    493         // t3: start of next object
    494         __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
    495         __ mov(t5, t4);
    496         STATIC_ASSERT(0 * kPointerSize == JSObject::kMapOffset);
    497         __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
    498         STATIC_ASSERT(1 * kPointerSize == JSObject::kPropertiesOffset);
    499         __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
    500         STATIC_ASSERT(2 * kPointerSize == JSObject::kElementsOffset);
    501         __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
    502         STATIC_ASSERT(3 * kPointerSize == JSObject::kHeaderSize);
    503         __ Addu(t5, t5, Operand(3 * kPointerSize));
    504 
    505         // Add the object tag to make the JSObject real, so that we can continue
    506         // and jump into the continuation code at any time from now on.
    507         __ Addu(t4, t4, Operand(kHeapObjectTag));
    508 
    509         // Fill all the in-object properties with appropriate filler.
    510         // t4: JSObject (tagged)
    511         // t5: First in-object property of JSObject (not tagged)
    512         __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
    513 
    514         if (!is_api_function) {
    515           Label no_inobject_slack_tracking;
    516 
    517           MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
    518           // Check if slack tracking is enabled.
    519           __ lw(t0, bit_field3);
    520           __ DecodeField<Map::ConstructionCounter>(t2, t0);
    521           // t2: slack tracking counter
    522           __ Branch(&no_inobject_slack_tracking, lt, t2,
    523                     Operand(Map::kSlackTrackingCounterEnd));
    524           // Decrease generous allocation count.
    525           __ Subu(t0, t0, Operand(1 << Map::ConstructionCounter::kShift));
    526           __ sw(t0, bit_field3);
    527 
    528           // Allocate object with a slack.
    529           __ lbu(a0, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
    530           __ sll(a0, a0, kPointerSizeLog2);
    531           __ subu(a0, t3, a0);
    532           // a0: offset of first field after pre-allocated fields
    533           if (FLAG_debug_code) {
    534             __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields, t5,
    535                       Operand(a0));
    536           }
    537           __ InitializeFieldsWithFiller(t5, a0, t7);
    538 
    539           // To allow truncation fill the remaining fields with one pointer
    540           // filler map.
    541           __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
    542           __ InitializeFieldsWithFiller(t5, t3, t7);
    543 
    544           // t2: slack tracking counter value before decreasing.
    545           __ Branch(&allocated, ne, t2, Operand(Map::kSlackTrackingCounterEnd));
    546 
    547           // Push the constructor, new_target and the object to the stack,
    548           // and then the initial map as an argument to the runtime call.
    549           __ Push(a1, a3, t4, a2);
    550           __ CallRuntime(Runtime::kFinalizeInstanceSize);
    551           __ Pop(a1, a3, t4);
    552 
    553           // Continue with JSObject being successfully allocated.
    554           // a1: constructor function
    555           // a3: new target
    556           // t4: JSObject
    557           __ jmp(&allocated);
    558 
    559           __ bind(&no_inobject_slack_tracking);
    560         }
    561 
    562         __ InitializeFieldsWithFiller(t5, t3, t7);
    563 
    564         // Continue with JSObject being successfully allocated.
    565         // a1: constructor function
    566         // a3: new target
    567         // t4: JSObject
    568         __ jmp(&allocated);
    569       }
    570 
    571       // Allocate the new receiver object using the runtime call.
    572       // a1: constructor function
    573       // a3: new target
    574       __ bind(&rt_call);
    575 
    576       // Push the constructor and new_target twice, second pair as arguments
    577       // to the runtime call.
    578       __ Push(a1, a3, a1, a3);  // constructor function, new target
    579       __ CallRuntime(Runtime::kNewObject);
    580       __ mov(t4, v0);
    581       __ Pop(a1, a3);
    582 
    583       // Receiver for constructor call allocated.
    584       // a1: constructor function
    585       // a3: new target
    586       // t4: JSObject
    587       __ bind(&allocated);
    588 
    589       // Retrieve smi-tagged arguments count from the stack.
    590       __ lw(a0, MemOperand(sp));
    591     }
    592 
    593     __ SmiUntag(a0);
    594 
    595     if (create_implicit_receiver) {
    596       // Push the allocated receiver to the stack. We need two copies
    597       // because we may have to return the original one and the calling
    598       // conventions dictate that the called function pops the receiver.
    599       __ Push(t4, t4);
    600     } else {
    601       __ PushRoot(Heap::kTheHoleValueRootIndex);
    602     }
    603 
    604     // Set up pointer to last argument.
    605     __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    606 
    607     // Copy arguments and receiver to the expression stack.
    608     // a0: number of arguments
    609     // a1: constructor function
    610     // a2: address of last argument (caller sp)
    611     // a3: new target
    612     // t4: number of arguments (smi-tagged)
    613     // sp[0]: receiver
    614     // sp[1]: receiver
    615     // sp[2]: number of arguments (smi-tagged)
    616     Label loop, entry;
    617     __ SmiTag(t4, a0);
    618     __ jmp(&entry);
    619     __ bind(&loop);
    620     __ sll(t0, t4, kPointerSizeLog2 - kSmiTagSize);
    621     __ Addu(t0, a2, Operand(t0));
    622     __ lw(t1, MemOperand(t0));
    623     __ push(t1);
    624     __ bind(&entry);
    625     __ Addu(t4, t4, Operand(-2));
    626     __ Branch(&loop, greater_equal, t4, Operand(zero_reg));
    627 
    628     // Call the function.
    629     // a0: number of arguments
    630     // a1: constructor function
    631     // a3: new target
    632     if (is_api_function) {
    633       __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
    634       Handle<Code> code =
    635           masm->isolate()->builtins()->HandleApiCallConstruct();
    636       __ Call(code, RelocInfo::CODE_TARGET);
    637     } else {
    638       ParameterCount actual(a0);
    639       __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
    640                         CheckDebugStepCallWrapper());
    641     }
    642 
    643     // Store offset of return address for deoptimizer.
    644     if (create_implicit_receiver && !is_api_function) {
    645       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    646     }
    647 
    648     // Restore context from the frame.
    649     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    650 
    651     if (create_implicit_receiver) {
    652       // If the result is an object (in the ECMA sense), we should get rid
    653       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    654       // on page 74.
    655       Label use_receiver, exit;
    656 
    657       // If the result is a smi, it is *not* an object in the ECMA sense.
    658       // v0: result
    659       // sp[0]: receiver (newly allocated object)
    660       // sp[1]: number of arguments (smi-tagged)
    661       __ JumpIfSmi(v0, &use_receiver);
    662 
    663       // If the type of the result (stored in its map) is less than
    664       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    665       __ GetObjectType(v0, a1, a3);
    666       __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
    667 
    668       // Throw away the result of the constructor invocation and use the
    669       // on-stack receiver as the result.
    670       __ bind(&use_receiver);
    671       __ lw(v0, MemOperand(sp));
    672 
    673       // Remove receiver from the stack, remove caller arguments, and
    674       // return.
    675       __ bind(&exit);
    676       // v0: result
    677       // sp[0]: receiver (newly allocated object)
    678       // sp[1]: number of arguments (smi-tagged)
    679       __ lw(a1, MemOperand(sp, 1 * kPointerSize));
    680     } else {
    681       __ lw(a1, MemOperand(sp));
    682     }
    683 
    684     // Leave construct frame.
    685   }
    686 
    687   __ sll(t0, a1, kPointerSizeLog2 - 1);
    688   __ Addu(sp, sp, t0);
    689   __ Addu(sp, sp, kPointerSize);
    690   if (create_implicit_receiver) {
    691     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
    692   }
    693   __ Ret();
    694 }
    695 
    696 
    697 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    698   Generate_JSConstructStubHelper(masm, false, true);
    699 }
    700 
    701 
    702 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    703   Generate_JSConstructStubHelper(masm, true, true);
    704 }
    705 
    706 
    707 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    708   Generate_JSConstructStubHelper(masm, false, false);
    709 }
    710 
    711 
    712 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    713   FrameScope scope(masm, StackFrame::INTERNAL);
    714   __ Push(a1);
    715   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    716 }
    717 
    718 
    719 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
    720 
    721 
    722 // Clobbers a2; preserves all other registers.
    723 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
    724                                         IsTagged argc_is_tagged) {
    725   // Check the stack for overflow. We are not trying to catch
    726   // interruptions (e.g. debug break and preemption) here, so the "real stack
    727   // limit" is checked.
    728   Label okay;
    729   __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    730   // Make a2 the space we have left. The stack might already be overflowed
    731   // here which will cause a2 to become negative.
    732   __ Subu(a2, sp, a2);
    733   // Check if the arguments will overflow the stack.
    734   if (argc_is_tagged == kArgcIsSmiTagged) {
    735     __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
    736   } else {
    737     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    738     __ sll(t3, argc, kPointerSizeLog2);
    739   }
    740   // Signed comparison.
    741   __ Branch(&okay, gt, a2, Operand(t3));
    742 
    743   // Out of stack space.
    744   __ CallRuntime(Runtime::kThrowStackOverflow);
    745 
    746   __ bind(&okay);
    747 }
    748 
    749 
    750 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    751                                              bool is_construct) {
    752   // Called from JSEntryStub::GenerateBody
    753 
    754   // ----------- S t a t e -------------
    755   //  -- a0: new.target
    756   //  -- a1: function
    757   //  -- a2: receiver_pointer
    758   //  -- a3: argc
    759   //  -- s0: argv
    760   // -----------------------------------
    761   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    762 
    763   // Clear the context before we push it when entering the JS frame.
    764   __ mov(cp, zero_reg);
    765 
    766   // Enter an internal frame.
    767   {
    768     FrameScope scope(masm, StackFrame::INTERNAL);
    769 
    770     // Setup the context (we need to use the caller context from the isolate).
    771     ExternalReference context_address(Isolate::kContextAddress,
    772                                       masm->isolate());
    773     __ li(cp, Operand(context_address));
    774     __ lw(cp, MemOperand(cp));
    775 
    776     // Push the function and the receiver onto the stack.
    777     __ Push(a1, a2);
    778 
    779     // Check if we have enough stack space to push all arguments.
    780     // Clobbers a2.
    781     Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
    782 
    783     // Remember new.target.
    784     __ mov(t1, a0);
    785 
    786     // Copy arguments to the stack in a loop.
    787     // a3: argc
    788     // s0: argv, i.e. points to first arg
    789     Label loop, entry;
    790     __ sll(t0, a3, kPointerSizeLog2);
    791     __ addu(t2, s0, t0);
    792     __ b(&entry);
    793     __ nop();   // Branch delay slot nop.
    794     // t2 points past last arg.
    795     __ bind(&loop);
    796     __ lw(t0, MemOperand(s0));  // Read next parameter.
    797     __ addiu(s0, s0, kPointerSize);
    798     __ lw(t0, MemOperand(t0));  // Dereference handle.
    799     __ push(t0);  // Push parameter.
    800     __ bind(&entry);
    801     __ Branch(&loop, ne, s0, Operand(t2));
    802 
    803     // Setup new.target and argc.
    804     __ mov(a0, a3);
    805     __ mov(a3, t1);
    806 
    807     // Initialize all JavaScript callee-saved registers, since they will be seen
    808     // by the garbage collector as part of handlers.
    809     __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
    810     __ mov(s1, t0);
    811     __ mov(s2, t0);
    812     __ mov(s3, t0);
    813     __ mov(s4, t0);
    814     __ mov(s5, t0);
    815     // s6 holds the root address. Do not clobber.
    816     // s7 is cp. Do not init.
    817 
    818     // Invoke the code.
    819     Handle<Code> builtin = is_construct
    820                                ? masm->isolate()->builtins()->Construct()
    821                                : masm->isolate()->builtins()->Call();
    822     __ Call(builtin, RelocInfo::CODE_TARGET);
    823 
    824     // Leave internal frame.
    825   }
    826 
    827   __ Jump(ra);
    828 }
    829 
    830 
    831 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    832   Generate_JSEntryTrampolineHelper(masm, false);
    833 }
    834 
    835 
    836 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    837   Generate_JSEntryTrampolineHelper(masm, true);
    838 }
    839 
    840 
    841 // Generate code for entering a JS function with the interpreter.
    842 // On entry to the function the receiver and arguments have been pushed on the
    843 // stack left to right.  The actual argument count matches the formal parameter
    844 // count expected by the function.
    845 //
    846 // The live registers are:
    847 //   o a1: the JS function object being called.
    848 //   o a3: the new target
    849 //   o cp: our context
    850 //   o fp: the caller's frame pointer
    851 //   o sp: stack pointer
    852 //   o ra: return address
    853 //
    854 // The function builds a JS frame. Please see JavaScriptFrameConstants in
    855 // frames-mips.h for its layout.
    856 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
    857 // frame.
    858 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
    859   // Open a frame scope to indicate that there is a frame on the stack.  The
    860   // MANUAL indicates that the scope shouldn't actually generate code to set up
    861   // the frame (that is done below).
    862   FrameScope frame_scope(masm, StackFrame::MANUAL);
    863 
    864   __ Push(ra, fp, cp, a1);
    865   __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
    866   __ Push(a3);
    867 
    868   // Push zero for bytecode array offset.
    869   __ Push(zero_reg);
    870 
    871   // Get the bytecode array from the function object and load the pointer to the
    872   // first entry into kInterpreterBytecodeRegister.
    873   __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
    874   __ lw(kInterpreterBytecodeArrayRegister,
    875         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
    876 
    877   if (FLAG_debug_code) {
    878     // Check function data field is actually a BytecodeArray object.
    879     __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
    880     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
    881               Operand(zero_reg));
    882     __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
    883     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
    884               Operand(BYTECODE_ARRAY_TYPE));
    885   }
    886 
    887   // Allocate the local and temporary register file on the stack.
    888   {
    889     // Load frame size from the BytecodeArray object.
    890     __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
    891                               BytecodeArray::kFrameSizeOffset));
    892 
    893     // Do a stack check to ensure we don't go over the limit.
    894     Label ok;
    895     __ Subu(t1, sp, Operand(t0));
    896     __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    897     __ Branch(&ok, hs, t1, Operand(a2));
    898     __ CallRuntime(Runtime::kThrowStackOverflow);
    899     __ bind(&ok);
    900 
    901     // If ok, push undefined as the initial value for all register file entries.
    902     Label loop_header;
    903     Label loop_check;
    904     __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
    905     __ Branch(&loop_check);
    906     __ bind(&loop_header);
    907     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
    908     __ push(t1);
    909     // Continue loop if not done.
    910     __ bind(&loop_check);
    911     __ Subu(t0, t0, Operand(kPointerSize));
    912     __ Branch(&loop_header, ge, t0, Operand(zero_reg));
    913   }
    914 
    915   // TODO(rmcilroy): List of things not currently dealt with here but done in
    916   // fullcodegen's prologue:
    917   //  - Support profiler (specifically profiling_counter).
    918   //  - Call ProfileEntryHookStub when isolate has a function_entry_hook.
    919   //  - Allow simulator stop operations if FLAG_stop_at is set.
    920   //  - Code aging of the BytecodeArray object.
    921 
    922   // Perform stack guard check.
    923   {
    924     Label ok;
    925     __ LoadRoot(at, Heap::kStackLimitRootIndex);
    926     __ Branch(&ok, hs, sp, Operand(at));
    927     __ push(kInterpreterBytecodeArrayRegister);
    928     __ CallRuntime(Runtime::kStackGuard);
    929     __ pop(kInterpreterBytecodeArrayRegister);
    930     __ bind(&ok);
    931   }
    932 
    933   // Load bytecode offset and dispatch table into registers.
    934   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
    935   __ Addu(kInterpreterRegisterFileRegister, fp,
    936           Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
    937   __ li(kInterpreterBytecodeOffsetRegister,
    938         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
    939   __ LoadRoot(kInterpreterDispatchTableRegister,
    940               Heap::kInterpreterTableRootIndex);
    941   __ Addu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
    942           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
    943 
    944   // Dispatch to the first bytecode handler for the function.
    945   __ Addu(a0, kInterpreterBytecodeArrayRegister,
    946           kInterpreterBytecodeOffsetRegister);
    947   __ lbu(a0, MemOperand(a0));
    948   __ sll(at, a0, kPointerSizeLog2);
    949   __ Addu(at, kInterpreterDispatchTableRegister, at);
    950   __ lw(at, MemOperand(at));
    951   // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
    952   // and header removal.
    953   __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
    954   __ Call(at);
    955 }
    956 
    957 
    958 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
    959   // TODO(rmcilroy): List of things not currently dealt with here but done in
    960   // fullcodegen's EmitReturnSequence.
    961   //  - Supporting FLAG_trace for Runtime::TraceExit.
    962   //  - Support profiler (specifically decrementing profiling_counter
    963   //    appropriately and calling out to HandleInterrupts if necessary).
    964 
    965   // The return value is in accumulator, which is already in v0.
    966 
    967   // Leave the frame (also dropping the register file).
    968   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
    969 
    970   // Drop receiver + arguments and return.
    971   __ lw(at, FieldMemOperand(kInterpreterBytecodeArrayRegister,
    972                             BytecodeArray::kParameterSizeOffset));
    973   __ Addu(sp, sp, at);
    974   __ Jump(ra);
    975 }
    976 
    977 
    978 // static
    979 void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
    980   // ----------- S t a t e -------------
    981   //  -- a0 : the number of arguments (not including the receiver)
    982   //  -- a2 : the address of the first argument to be pushed. Subsequent
    983   //          arguments should be consecutive above this, in the same order as
    984   //          they are to be pushed onto the stack.
    985   //  -- a1 : the target to call (can be any Object).
    986   // -----------------------------------
    987 
    988   // Find the address of the last argument.
    989   __ Addu(a3, a0, Operand(1));  // Add one for receiver.
    990   __ sll(a3, a3, kPointerSizeLog2);
    991   __ Subu(a3, a2, Operand(a3));
    992 
    993   // Push the arguments.
    994   Label loop_header, loop_check;
    995   __ Branch(&loop_check);
    996   __ bind(&loop_header);
    997   __ lw(t0, MemOperand(a2));
    998   __ Addu(a2, a2, Operand(-kPointerSize));
    999   __ push(t0);
   1000   __ bind(&loop_check);
   1001   __ Branch(&loop_header, gt, a2, Operand(a3));
   1002 
   1003   // Call the target.
   1004   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1005 }
   1006 
   1007 
   1008 // static
   1009 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
   1010   // ----------- S t a t e -------------
   1011   // -- a0 : argument count (not including receiver)
   1012   // -- a3 : new target
   1013   // -- a1 : constructor to call
   1014   // -- a2 : address of the first argument
   1015   // -----------------------------------
   1016 
   1017   // Find the address of the last argument.
   1018   __ sll(t0, a0, kPointerSizeLog2);
   1019   __ Subu(t0, a2, Operand(t0));
   1020 
   1021   // Push a slot for the receiver.
   1022   __ push(zero_reg);
   1023 
   1024   // Push the arguments.
   1025   Label loop_header, loop_check;
   1026   __ Branch(&loop_check);
   1027   __ bind(&loop_header);
   1028   __ lw(t1, MemOperand(a2));
   1029   __ Addu(a2, a2, Operand(-kPointerSize));
   1030   __ push(t1);
   1031   __ bind(&loop_check);
   1032   __ Branch(&loop_header, gt, a2, Operand(t0));
   1033 
   1034   // Call the constructor with a0, a1, and a3 unmodified.
   1035   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1036 }
   1037 
   1038 
   1039 static void Generate_InterpreterNotifyDeoptimizedHelper(
   1040     MacroAssembler* masm, Deoptimizer::BailoutType type) {
   1041   // Enter an internal frame.
   1042   {
   1043     FrameScope scope(masm, StackFrame::INTERNAL);
   1044     __ push(kInterpreterAccumulatorRegister);  // Save accumulator register.
   1045 
   1046     // Pass the deoptimization type to the runtime system.
   1047     __ li(a1, Operand(Smi::FromInt(static_cast<int>(type))));
   1048     __ push(a1);
   1049     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1050 
   1051     __ pop(kInterpreterAccumulatorRegister);  // Restore accumulator register.
   1052     // Tear down internal frame.
   1053   }
   1054 
   1055   // Drop state (we don't use this for interpreter deopts).
   1056   __ Drop(1);
   1057 
   1058   // Initialize register file register and dispatch table register.
   1059   __ Addu(kInterpreterRegisterFileRegister, fp,
   1060           Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
   1061   __ LoadRoot(kInterpreterDispatchTableRegister,
   1062               Heap::kInterpreterTableRootIndex);
   1063   __ Addu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
   1064           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1065 
   1066   // Get the context from the frame.
   1067   // TODO(rmcilroy): Update interpreter frame to expect current context at the
   1068   // context slot instead of the function context.
   1069   __ lw(kContextRegister,
   1070         MemOperand(kInterpreterRegisterFileRegister,
   1071                    InterpreterFrameConstants::kContextFromRegisterPointer));
   1072 
   1073   // Get the bytecode array pointer from the frame.
   1074   __ lw(a1,
   1075         MemOperand(kInterpreterRegisterFileRegister,
   1076                    InterpreterFrameConstants::kFunctionFromRegisterPointer));
   1077   __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1078   __ lw(kInterpreterBytecodeArrayRegister,
   1079         FieldMemOperand(a1, SharedFunctionInfo::kFunctionDataOffset));
   1080 
   1081   if (FLAG_debug_code) {
   1082     // Check function data field is actually a BytecodeArray object.
   1083     __ SmiTst(kInterpreterBytecodeArrayRegister, at);
   1084     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
   1085               Operand(zero_reg));
   1086     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
   1087     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
   1088               Operand(BYTECODE_ARRAY_TYPE));
   1089   }
   1090 
   1091   // Get the target bytecode offset from the frame.
   1092   __ lw(kInterpreterBytecodeOffsetRegister,
   1093         MemOperand(
   1094             kInterpreterRegisterFileRegister,
   1095             InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
   1096   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
   1097 
   1098   // Dispatch to the target bytecode.
   1099   __ Addu(a1, kInterpreterBytecodeArrayRegister,
   1100           kInterpreterBytecodeOffsetRegister);
   1101   __ lbu(a1, MemOperand(a1));
   1102   __ sll(a1, a1, kPointerSizeLog2);
   1103   __ Addu(a1, kInterpreterDispatchTableRegister, a1);
   1104   __ lw(a1, MemOperand(a1));
   1105   __ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
   1106   __ Jump(a1);
   1107 }
   1108 
   1109 
   1110 void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
   1111   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1112 }
   1113 
   1114 
   1115 void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
   1116   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1117 }
   1118 
   1119 
   1120 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
   1121   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1122 }
   1123 
   1124 
   1125 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1126   CallRuntimePassFunction(masm, Runtime::kCompileLazy);
   1127   GenerateTailCallToReturnedCode(masm);
   1128 }
   1129 
   1130 
   1131 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1132   CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
   1133   GenerateTailCallToReturnedCode(masm);
   1134 }
   1135 
   1136 
   1137 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1138   CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
   1139   GenerateTailCallToReturnedCode(masm);
   1140 }
   1141 
   1142 
   1143 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1144   // For now, we are relying on the fact that make_code_young doesn't do any
   1145   // garbage collection which allows us to save/restore the registers without
   1146   // worrying about which of them contain pointers. We also don't build an
   1147   // internal frame to make the code faster, since we shouldn't have to do stack
   1148   // crawls in MakeCodeYoung. This seems a bit fragile.
   1149 
   1150   // Set a0 to point to the head of the PlatformCodeAge sequence.
   1151   __ Subu(a0, a0,
   1152       Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
   1153 
   1154   // The following registers must be saved and restored when calling through to
   1155   // the runtime:
   1156   //   a0 - contains return address (beginning of patch sequence)
   1157   //   a1 - isolate
   1158   //   a3 - new target
   1159   RegList saved_regs =
   1160       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
   1161   FrameScope scope(masm, StackFrame::MANUAL);
   1162   __ MultiPush(saved_regs);
   1163   __ PrepareCallCFunction(2, 0, a2);
   1164   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1165   __ CallCFunction(
   1166       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1167   __ MultiPop(saved_regs);
   1168   __ Jump(a0);
   1169 }
   1170 
   1171 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
   1172 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
   1173     MacroAssembler* masm) {                                  \
   1174   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1175 }                                                            \
   1176 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
   1177     MacroAssembler* masm) {                                  \
   1178   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1179 }
   1180 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1181 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1182 
   1183 
   1184 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1185   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1186   // that make_code_young doesn't do any garbage collection which allows us to
   1187   // save/restore the registers without worrying about which of them contain
   1188   // pointers.
   1189 
   1190   // Set a0 to point to the head of the PlatformCodeAge sequence.
   1191   __ Subu(a0, a0,
   1192       Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
   1193 
   1194   // The following registers must be saved and restored when calling through to
   1195   // the runtime:
   1196   //   a0 - contains return address (beginning of patch sequence)
   1197   //   a1 - isolate
   1198   //   a3 - new target
   1199   RegList saved_regs =
   1200       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
   1201   FrameScope scope(masm, StackFrame::MANUAL);
   1202   __ MultiPush(saved_regs);
   1203   __ PrepareCallCFunction(2, 0, a2);
   1204   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
   1205   __ CallCFunction(
   1206       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
   1207       2);
   1208   __ MultiPop(saved_regs);
   1209 
   1210   // Perform prologue operations usually performed by the young code stub.
   1211   __ Push(ra, fp, cp, a1);
   1212   __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
   1213 
   1214   // Jump to point after the code-age stub.
   1215   __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
   1216   __ Jump(a0);
   1217 }
   1218 
   1219 
   1220 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1221   GenerateMakeCodeYoungAgainCommon(masm);
   1222 }
   1223 
   1224 
   1225 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1226   Generate_MarkCodeAsExecutedOnce(masm);
   1227 }
   1228 
   1229 
   1230 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1231                                              SaveFPRegsMode save_doubles) {
   1232   {
   1233     FrameScope scope(masm, StackFrame::INTERNAL);
   1234 
   1235     // Preserve registers across notification, this is important for compiled
   1236     // stubs that tail call the runtime on deopts passing their parameters in
   1237     // registers.
   1238     __ MultiPush(kJSCallerSaved | kCalleeSaved);
   1239     // Pass the function and deoptimization type to the runtime system.
   1240     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1241     __ MultiPop(kJSCallerSaved | kCalleeSaved);
   1242   }
   1243 
   1244   __ Addu(sp, sp, Operand(kPointerSize));  // Ignore state
   1245   __ Jump(ra);  // Jump to miss handler
   1246 }
   1247 
   1248 
   1249 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1250   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1251 }
   1252 
   1253 
   1254 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1255   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1256 }
   1257 
   1258 
   1259 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1260                                              Deoptimizer::BailoutType type) {
   1261   {
   1262     FrameScope scope(masm, StackFrame::INTERNAL);
   1263     // Pass the function and deoptimization type to the runtime system.
   1264     __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
   1265     __ push(a0);
   1266     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1267   }
   1268 
   1269   // Get the full codegen state from the stack and untag it -> t2.
   1270   __ lw(t2, MemOperand(sp, 0 * kPointerSize));
   1271   __ SmiUntag(t2);
   1272   // Switch on the state.
   1273   Label with_tos_register, unknown_state;
   1274   __ Branch(&with_tos_register,
   1275             ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
   1276   __ Ret(USE_DELAY_SLOT);
   1277   // Safe to fill delay slot Addu will emit one instruction.
   1278   __ Addu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
   1279 
   1280   __ bind(&with_tos_register);
   1281   __ lw(v0, MemOperand(sp, 1 * kPointerSize));
   1282   __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
   1283 
   1284   __ Ret(USE_DELAY_SLOT);
   1285   // Safe to fill delay slot Addu will emit one instruction.
   1286   __ Addu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
   1287 
   1288   __ bind(&unknown_state);
   1289   __ stop("no cases left");
   1290 }
   1291 
   1292 
   1293 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1294   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1295 }
   1296 
   1297 
   1298 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1299   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1300 }
   1301 
   1302 
   1303 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1304   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1305 }
   1306 
   1307 
   1308 // Clobbers {t2, t3, t4, t5}.
   1309 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   1310                                     Register function_template_info,
   1311                                     Label* receiver_check_failed) {
   1312   Register signature = t2;
   1313   Register map = t3;
   1314   Register constructor = t4;
   1315   Register scratch = t5;
   1316 
   1317   // If there is no signature, return the holder.
   1318   __ lw(signature, FieldMemOperand(function_template_info,
   1319                                    FunctionTemplateInfo::kSignatureOffset));
   1320   Label receiver_check_passed;
   1321   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
   1322                 &receiver_check_passed);
   1323 
   1324   // Walk the prototype chain.
   1325   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1326   Label prototype_loop_start;
   1327   __ bind(&prototype_loop_start);
   1328 
   1329   // Get the constructor, if any.
   1330   __ GetMapConstructor(constructor, map, scratch, scratch);
   1331   Label next_prototype;
   1332   __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
   1333   Register type = constructor;
   1334   __ lw(type,
   1335         FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   1336   __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   1337 
   1338   // Loop through the chain of inheriting function templates.
   1339   Label function_template_loop;
   1340   __ bind(&function_template_loop);
   1341 
   1342   // If the signatures match, we have a compatible receiver.
   1343   __ Branch(&receiver_check_passed, eq, signature, Operand(type),
   1344             USE_DELAY_SLOT);
   1345 
   1346   // If the current type is not a FunctionTemplateInfo, load the next prototype
   1347   // in the chain.
   1348   __ JumpIfSmi(type, &next_prototype);
   1349   __ GetObjectType(type, scratch, scratch);
   1350   __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
   1351 
   1352   // Otherwise load the parent function template and iterate.
   1353   __ lw(type,
   1354         FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
   1355   __ Branch(&function_template_loop);
   1356 
   1357   // Load the next prototype and iterate.
   1358   __ bind(&next_prototype);
   1359   __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
   1360   // End if the prototype is null or not hidden.
   1361   __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, receiver_check_failed);
   1362   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1363   __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset));
   1364   __ DecodeField<Map::IsHiddenPrototype>(scratch);
   1365   __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
   1366 
   1367   __ Branch(&prototype_loop_start);
   1368 
   1369   __ bind(&receiver_check_passed);
   1370 }
   1371 
   1372 
   1373 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   1374   // ----------- S t a t e -------------
   1375   //  -- a0                 : number of arguments excluding receiver
   1376   //  -- a1                 : callee
   1377   //  -- ra                 : return address
   1378   //  -- sp[0]              : last argument
   1379   //  -- ...
   1380   //  -- sp[4 * (argc - 1)] : first argument
   1381   //  -- sp[4 * argc]       : receiver
   1382   // -----------------------------------
   1383 
   1384   // Load the FunctionTemplateInfo.
   1385   __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1386   __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
   1387 
   1388   // Do the compatible receiver check.
   1389   Label receiver_check_failed;
   1390   __ sll(at, a0, kPointerSizeLog2);
   1391   __ Addu(t8, sp, at);
   1392   __ lw(t0, MemOperand(t8));
   1393   CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
   1394 
   1395   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   1396   // beginning of the code.
   1397   __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
   1398   __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
   1399   __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
   1400   __ Jump(t2);
   1401 
   1402   // Compatible receiver check failed: throw an Illegal Invocation exception.
   1403   __ bind(&receiver_check_failed);
   1404   // Drop the arguments (including the receiver);
   1405   __ Addu(t8, t8, Operand(kPointerSize));
   1406   __ addu(sp, t8, zero_reg);
   1407   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   1408 }
   1409 
   1410 
   1411 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1412   // Lookup the function in the JavaScript frame.
   1413   __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1414   {
   1415     FrameScope scope(masm, StackFrame::INTERNAL);
   1416     // Pass function as argument.
   1417     __ push(a0);
   1418     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1419   }
   1420 
   1421   // If the code object is null, just return to the unoptimized code.
   1422   __ Ret(eq, v0, Operand(Smi::FromInt(0)));
   1423 
   1424   // Load deoptimization data from the code object.
   1425   // <deopt_data> = <code>[#deoptimization_data_offset]
   1426   __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   1427 
   1428   // Load the OSR entrypoint offset from the deoptimization data.
   1429   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1430   __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
   1431       DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
   1432   __ SmiUntag(a1);
   1433 
   1434   // Compute the target address = code_obj + header_size + osr_offset
   1435   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
   1436   __ addu(v0, v0, a1);
   1437   __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
   1438 
   1439   // And "return" to the OSR entry point of the function.
   1440   __ Ret();
   1441 }
   1442 
   1443 
   1444 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
   1445   // We check the stack limit as indicator that recompilation might be done.
   1446   Label ok;
   1447   __ LoadRoot(at, Heap::kStackLimitRootIndex);
   1448   __ Branch(&ok, hs, sp, Operand(at));
   1449   {
   1450     FrameScope scope(masm, StackFrame::INTERNAL);
   1451     __ CallRuntime(Runtime::kStackGuard);
   1452   }
   1453   __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
   1454           RelocInfo::CODE_TARGET);
   1455 
   1456   __ bind(&ok);
   1457   __ Ret();
   1458 }
   1459 
   1460 
   1461 // static
   1462 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
   1463                                                int field_index) {
   1464   // ----------- S t a t e -------------
   1465   //  -- sp[0] : receiver
   1466   // -----------------------------------
   1467 
   1468   // 1. Pop receiver into a0 and check that it's actually a JSDate object.
   1469   Label receiver_not_date;
   1470   {
   1471     __ Pop(a0);
   1472     __ JumpIfSmi(a0, &receiver_not_date);
   1473     __ GetObjectType(a0, t0, t0);
   1474     __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
   1475   }
   1476 
   1477   // 2. Load the specified date field, falling back to the runtime as necessary.
   1478   if (field_index == JSDate::kDateValue) {
   1479     __ Ret(USE_DELAY_SLOT);
   1480     __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset));  // In delay slot.
   1481   } else {
   1482     if (field_index < JSDate::kFirstUncachedField) {
   1483       Label stamp_mismatch;
   1484       __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
   1485       __ lw(a1, MemOperand(a1));
   1486       __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
   1487       __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
   1488       __ Ret(USE_DELAY_SLOT);
   1489       __ lw(v0, FieldMemOperand(
   1490                     a0, JSDate::kValueOffset +
   1491                             field_index * kPointerSize));  // In delay slot.
   1492       __ bind(&stamp_mismatch);
   1493     }
   1494     FrameScope scope(masm, StackFrame::INTERNAL);
   1495     __ PrepareCallCFunction(2, t0);
   1496     __ li(a1, Operand(Smi::FromInt(field_index)));
   1497     __ CallCFunction(
   1498         ExternalReference::get_date_field_function(masm->isolate()), 2);
   1499   }
   1500   __ Ret();
   1501 
   1502   // 3. Raise a TypeError if the receiver is not a date.
   1503   __ bind(&receiver_not_date);
   1504   __ TailCallRuntime(Runtime::kThrowNotDateError);
   1505 }
   1506 
   1507 
   1508 // static
   1509 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1510   // ----------- S t a t e -------------
   1511   //  -- a0    : argc
   1512   //  -- sp[0] : argArray
   1513   //  -- sp[4] : thisArg
   1514   //  -- sp[8] : receiver
   1515   // -----------------------------------
   1516 
   1517   // 1. Load receiver into a1, argArray into a0 (if present), remove all
   1518   // arguments from the stack (including the receiver), and push thisArg (if
   1519   // present) instead.
   1520   {
   1521     Label no_arg;
   1522     Register scratch = t0;
   1523     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   1524     __ mov(a3, a2);
   1525     __ sll(scratch, a0, kPointerSizeLog2);
   1526     __ Addu(a0, sp, Operand(scratch));
   1527     __ lw(a1, MemOperand(a0));  // receiver
   1528     __ Subu(a0, a0, Operand(kPointerSize));
   1529     __ Branch(&no_arg, lt, a0, Operand(sp));
   1530     __ lw(a2, MemOperand(a0));  // thisArg
   1531     __ Subu(a0, a0, Operand(kPointerSize));
   1532     __ Branch(&no_arg, lt, a0, Operand(sp));
   1533     __ lw(a3, MemOperand(a0));  // argArray
   1534     __ bind(&no_arg);
   1535     __ Addu(sp, sp, Operand(scratch));
   1536     __ sw(a2, MemOperand(sp));
   1537     __ mov(a0, a3);
   1538   }
   1539 
   1540   // ----------- S t a t e -------------
   1541   //  -- a0    : argArray
   1542   //  -- a1    : receiver
   1543   //  -- sp[0] : thisArg
   1544   // -----------------------------------
   1545 
   1546   // 2. Make sure the receiver is actually callable.
   1547   Label receiver_not_callable;
   1548   __ JumpIfSmi(a1, &receiver_not_callable);
   1549   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
   1550   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
   1551   __ And(t0, t0, Operand(1 << Map::kIsCallable));
   1552   __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
   1553 
   1554   // 3. Tail call with no arguments if argArray is null or undefined.
   1555   Label no_arguments;
   1556   __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
   1557   __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
   1558 
   1559   // 4a. Apply the receiver to the given argArray (passing undefined for
   1560   // new.target).
   1561   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
   1562   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1563 
   1564   // 4b. The argArray is either null or undefined, so we tail call without any
   1565   // arguments to the receiver.
   1566   __ bind(&no_arguments);
   1567   {
   1568     __ mov(a0, zero_reg);
   1569     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1570   }
   1571 
   1572   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1573   __ bind(&receiver_not_callable);
   1574   {
   1575     __ sw(a1, MemOperand(sp));
   1576     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1577   }
   1578 }
   1579 
   1580 
   1581 // static
   1582 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1583   // 1. Make sure we have at least one argument.
   1584   // a0: actual number of arguments
   1585   {
   1586     Label done;
   1587     __ Branch(&done, ne, a0, Operand(zero_reg));
   1588     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1589     __ Addu(a0, a0, Operand(1));
   1590     __ bind(&done);
   1591   }
   1592 
   1593   // 2. Get the function to call (passed as receiver) from the stack.
   1594   // a0: actual number of arguments
   1595   __ sll(at, a0, kPointerSizeLog2);
   1596   __ addu(at, sp, at);
   1597   __ lw(a1, MemOperand(at));
   1598 
   1599   // 3. Shift arguments and return address one slot down on the stack
   1600   //    (overwriting the original receiver).  Adjust argument count to make
   1601   //    the original first argument the new receiver.
   1602   // a0: actual number of arguments
   1603   // a1: function
   1604   {
   1605     Label loop;
   1606     // Calculate the copy start address (destination). Copy end address is sp.
   1607     __ sll(at, a0, kPointerSizeLog2);
   1608     __ addu(a2, sp, at);
   1609 
   1610     __ bind(&loop);
   1611     __ lw(at, MemOperand(a2, -kPointerSize));
   1612     __ sw(at, MemOperand(a2));
   1613     __ Subu(a2, a2, Operand(kPointerSize));
   1614     __ Branch(&loop, ne, a2, Operand(sp));
   1615     // Adjust the actual number of arguments and remove the top element
   1616     // (which is a copy of the last argument).
   1617     __ Subu(a0, a0, Operand(1));
   1618     __ Pop();
   1619   }
   1620 
   1621   // 4. Call the callable.
   1622   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1623 }
   1624 
   1625 
   1626 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1627   // ----------- S t a t e -------------
   1628   //  -- a0     : argc
   1629   //  -- sp[0]  : argumentsList
   1630   //  -- sp[4]  : thisArgument
   1631   //  -- sp[8]  : target
   1632   //  -- sp[12] : receiver
   1633   // -----------------------------------
   1634 
   1635   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
   1636   // remove all arguments from the stack (including the receiver), and push
   1637   // thisArgument (if present) instead.
   1638   {
   1639     Label no_arg;
   1640     Register scratch = t0;
   1641     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
   1642     __ mov(a2, a1);
   1643     __ mov(a3, a1);
   1644     __ sll(scratch, a0, kPointerSizeLog2);
   1645     __ mov(a0, scratch);
   1646     __ Subu(a0, a0, Operand(kPointerSize));
   1647     __ Branch(&no_arg, lt, a0, Operand(zero_reg));
   1648     __ Addu(a0, sp, Operand(a0));
   1649     __ lw(a1, MemOperand(a0));  // target
   1650     __ Subu(a0, a0, Operand(kPointerSize));
   1651     __ Branch(&no_arg, lt, a0, Operand(sp));
   1652     __ lw(a2, MemOperand(a0));  // thisArgument
   1653     __ Subu(a0, a0, Operand(kPointerSize));
   1654     __ Branch(&no_arg, lt, a0, Operand(sp));
   1655     __ lw(a3, MemOperand(a0));  // argumentsList
   1656     __ bind(&no_arg);
   1657     __ Addu(sp, sp, Operand(scratch));
   1658     __ sw(a2, MemOperand(sp));
   1659     __ mov(a0, a3);
   1660   }
   1661 
   1662   // ----------- S t a t e -------------
   1663   //  -- a0    : argumentsList
   1664   //  -- a1    : target
   1665   //  -- sp[0] : thisArgument
   1666   // -----------------------------------
   1667 
   1668   // 2. Make sure the target is actually callable.
   1669   Label target_not_callable;
   1670   __ JumpIfSmi(a1, &target_not_callable);
   1671   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
   1672   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
   1673   __ And(t0, t0, Operand(1 << Map::kIsCallable));
   1674   __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
   1675 
   1676   // 3a. Apply the target to the given argumentsList (passing undefined for
   1677   // new.target).
   1678   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
   1679   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1680 
   1681   // 3b. The target is not callable, throw an appropriate TypeError.
   1682   __ bind(&target_not_callable);
   1683   {
   1684     __ sw(a1, MemOperand(sp));
   1685     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1686   }
   1687 }
   1688 
   1689 
   1690 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1691   // ----------- S t a t e -------------
   1692   //  -- a0     : argc
   1693   //  -- sp[0]  : new.target (optional)
   1694   //  -- sp[4]  : argumentsList
   1695   //  -- sp[8]  : target
   1696   //  -- sp[12] : receiver
   1697   // -----------------------------------
   1698 
   1699   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
   1700   // new.target into a3 (if present, otherwise use target), remove all
   1701   // arguments from the stack (including the receiver), and push thisArgument
   1702   // (if present) instead.
   1703   {
   1704     Label no_arg;
   1705     Register scratch = t0;
   1706     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
   1707     __ mov(a2, a1);
   1708     __ sll(scratch, a0, kPointerSizeLog2);
   1709     __ Addu(a0, sp, Operand(scratch));
   1710     __ sw(a2, MemOperand(a0));  // receiver
   1711     __ Subu(a0, a0, Operand(kPointerSize));
   1712     __ Branch(&no_arg, lt, a0, Operand(sp));
   1713     __ lw(a1, MemOperand(a0));  // target
   1714     __ mov(a3, a1);             // new.target defaults to target
   1715     __ Subu(a0, a0, Operand(kPointerSize));
   1716     __ Branch(&no_arg, lt, a0, Operand(sp));
   1717     __ lw(a2, MemOperand(a0));  // argumentsList
   1718     __ Subu(a0, a0, Operand(kPointerSize));
   1719     __ Branch(&no_arg, lt, a0, Operand(sp));
   1720     __ lw(a3, MemOperand(a0));  // new.target
   1721     __ bind(&no_arg);
   1722     __ Addu(sp, sp, Operand(scratch));
   1723     __ mov(a0, a2);
   1724   }
   1725 
   1726   // ----------- S t a t e -------------
   1727   //  -- a0    : argumentsList
   1728   //  -- a3    : new.target
   1729   //  -- a1    : target
   1730   //  -- sp[0] : receiver (undefined)
   1731   // -----------------------------------
   1732 
   1733   // 2. Make sure the target is actually a constructor.
   1734   Label target_not_constructor;
   1735   __ JumpIfSmi(a1, &target_not_constructor);
   1736   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
   1737   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
   1738   __ And(t0, t0, Operand(1 << Map::kIsConstructor));
   1739   __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
   1740 
   1741   // 3. Make sure the target is actually a constructor.
   1742   Label new_target_not_constructor;
   1743   __ JumpIfSmi(a3, &new_target_not_constructor);
   1744   __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
   1745   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
   1746   __ And(t0, t0, Operand(1 << Map::kIsConstructor));
   1747   __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
   1748 
   1749   // 4a. Construct the target with the given new.target and argumentsList.
   1750   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1751 
   1752   // 4b. The target is not a constructor, throw an appropriate TypeError.
   1753   __ bind(&target_not_constructor);
   1754   {
   1755     __ sw(a1, MemOperand(sp));
   1756     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   1757   }
   1758 
   1759   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   1760   __ bind(&new_target_not_constructor);
   1761   {
   1762     __ sw(a3, MemOperand(sp));
   1763     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   1764   }
   1765 }
   1766 
   1767 
   1768 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
   1769                                       Label* stack_overflow) {
   1770   // ----------- S t a t e -------------
   1771   //  -- a0 : actual number of arguments
   1772   //  -- a1 : function (passed through to callee)
   1773   //  -- a2 : expected number of arguments
   1774   //  -- a3 : new target (passed through to callee)
   1775   // -----------------------------------
   1776   // Check the stack for overflow. We are not trying to catch
   1777   // interruptions (e.g. debug break and preemption) here, so the "real stack
   1778   // limit" is checked.
   1779   __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
   1780   // Make t1 the space we have left. The stack might already be overflowed
   1781   // here which will cause t1 to become negative.
   1782   __ subu(t1, sp, t1);
   1783   // Check if the arguments will overflow the stack.
   1784   __ sll(at, a2, kPointerSizeLog2);
   1785   // Signed comparison.
   1786   __ Branch(stack_overflow, le, t1, Operand(at));
   1787 }
   1788 
   1789 
   1790 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1791   __ sll(a0, a0, kSmiTagSize);
   1792   __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   1793   __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
   1794   __ Addu(fp, sp,
   1795       Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
   1796 }
   1797 
   1798 
   1799 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1800   // ----------- S t a t e -------------
   1801   //  -- v0 : result being passed through
   1802   // -----------------------------------
   1803   // Get the number of arguments passed (as a smi), tear down the frame and
   1804   // then tear down the parameters.
   1805   __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   1806                              kPointerSize)));
   1807   __ mov(sp, fp);
   1808   __ MultiPop(fp.bit() | ra.bit());
   1809   __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
   1810   __ Addu(sp, sp, t0);
   1811   // Adjust for the receiver.
   1812   __ Addu(sp, sp, Operand(kPointerSize));
   1813 }
   1814 
   1815 
   1816 // static
   1817 void Builtins::Generate_Apply(MacroAssembler* masm) {
   1818   // ----------- S t a t e -------------
   1819   //  -- a0    : argumentsList
   1820   //  -- a1    : target
   1821   //  -- a3    : new.target (checked to be constructor or undefined)
   1822   //  -- sp[0] : thisArgument
   1823   // -----------------------------------
   1824 
   1825   // Create the list of arguments from the array-like argumentsList.
   1826   {
   1827     Label create_arguments, create_array, create_runtime, done_create;
   1828     __ JumpIfSmi(a0, &create_runtime);
   1829 
   1830     // Load the map of argumentsList into a2.
   1831     __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
   1832 
   1833     // Load native context into t0.
   1834     __ lw(t0, NativeContextMemOperand());
   1835 
   1836     // Check if argumentsList is an (unmodified) arguments object.
   1837     __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   1838     __ Branch(&create_arguments, eq, a2, Operand(at));
   1839     __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
   1840     __ Branch(&create_arguments, eq, a2, Operand(at));
   1841 
   1842     // Check if argumentsList is a fast JSArray.
   1843     __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
   1844     __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
   1845     __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
   1846 
   1847     // Ask the runtime to create the list (actually a FixedArray).
   1848     __ bind(&create_runtime);
   1849     {
   1850       FrameScope scope(masm, StackFrame::INTERNAL);
   1851       __ Push(a1, a3, a0);
   1852       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   1853       __ mov(a0, v0);
   1854       __ Pop(a1, a3);
   1855       __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
   1856       __ SmiUntag(a2);
   1857     }
   1858     __ Branch(&done_create);
   1859 
   1860     // Try to create the list from an arguments object.
   1861     __ bind(&create_arguments);
   1862     __ lw(a2,
   1863           FieldMemOperand(a0, JSObject::kHeaderSize +
   1864                                   Heap::kArgumentsLengthIndex * kPointerSize));
   1865     __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
   1866     __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
   1867     __ Branch(&create_runtime, ne, a2, Operand(at));
   1868     __ SmiUntag(a2);
   1869     __ mov(a0, t0);
   1870     __ Branch(&done_create);
   1871 
   1872     // Try to create the list from a JSArray object.
   1873     __ bind(&create_array);
   1874     __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset));
   1875     __ DecodeField<Map::ElementsKindBits>(a2);
   1876     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   1877     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   1878     STATIC_ASSERT(FAST_ELEMENTS == 2);
   1879     __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
   1880     __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
   1881     __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
   1882     __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
   1883     __ SmiUntag(a2);
   1884 
   1885     __ bind(&done_create);
   1886   }
   1887 
   1888   // Check for stack overflow.
   1889   {
   1890     // Check the stack for overflow. We are not trying to catch interruptions
   1891     // (i.e. debug break and preemption) here, so check the "real stack limit".
   1892     Label done;
   1893     __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
   1894     // Make ip the space we have left. The stack might already be overflowed
   1895     // here which will cause ip to become negative.
   1896     __ Subu(t0, sp, t0);
   1897     // Check if the arguments will overflow the stack.
   1898     __ sll(at, a2, kPointerSizeLog2);
   1899     __ Branch(&done, gt, t0, Operand(at));  // Signed comparison.
   1900     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   1901     __ bind(&done);
   1902   }
   1903 
   1904   // ----------- S t a t e -------------
   1905   //  -- a1    : target
   1906   //  -- a0    : args (a FixedArray built from argumentsList)
   1907   //  -- a2    : len (number of elements to push from args)
   1908   //  -- a3    : new.target (checked to be constructor or undefined)
   1909   //  -- sp[0] : thisArgument
   1910   // -----------------------------------
   1911 
   1912   // Push arguments onto the stack (thisArgument is already on the stack).
   1913   {
   1914     __ mov(t0, zero_reg);
   1915     Label done, loop;
   1916     __ bind(&loop);
   1917     __ Branch(&done, eq, t0, Operand(a2));
   1918     __ sll(at, t0, kPointerSizeLog2);
   1919     __ Addu(at, a0, at);
   1920     __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
   1921     __ Push(at);
   1922     __ Addu(t0, t0, Operand(1));
   1923     __ Branch(&loop);
   1924     __ bind(&done);
   1925     __ Move(a0, t0);
   1926   }
   1927 
   1928   // Dispatch to Call or Construct depending on whether new.target is undefined.
   1929   {
   1930     Label construct;
   1931     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1932     __ Branch(&construct, ne, a3, Operand(at));
   1933     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1934     __ bind(&construct);
   1935     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1936   }
   1937 }
   1938 
   1939 
   1940 // static
   1941 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   1942                                      ConvertReceiverMode mode) {
   1943   // ----------- S t a t e -------------
   1944   //  -- a0 : the number of arguments (not including the receiver)
   1945   //  -- a1 : the function to call (checked to be a JSFunction)
   1946   // -----------------------------------
   1947   __ AssertFunction(a1);
   1948 
   1949   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   1950   // Check that the function is not a "classConstructor".
   1951   Label class_constructor;
   1952   __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1953   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
   1954   __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
   1955   __ Branch(&class_constructor, ne, at, Operand(zero_reg));
   1956 
   1957   // Enter the context of the function; ToObject has to run in the function
   1958   // context, and we also need to take the global proxy from the function
   1959   // context in case of conversion.
   1960   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
   1961                 SharedFunctionInfo::kStrictModeByteOffset);
   1962   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
   1963   // We need to convert the receiver for non-native sloppy mode functions.
   1964   Label done_convert;
   1965   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
   1966   __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
   1967                          (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
   1968   __ Branch(&done_convert, ne, at, Operand(zero_reg));
   1969   {
   1970     // ----------- S t a t e -------------
   1971     //  -- a0 : the number of arguments (not including the receiver)
   1972     //  -- a1 : the function to call (checked to be a JSFunction)
   1973     //  -- a2 : the shared function info.
   1974     //  -- cp : the function context.
   1975     // -----------------------------------
   1976 
   1977     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   1978       // Patch receiver to global proxy.
   1979       __ LoadGlobalProxy(a3);
   1980     } else {
   1981       Label convert_to_object, convert_receiver;
   1982       __ sll(at, a0, kPointerSizeLog2);
   1983       __ addu(at, sp, at);
   1984       __ lw(a3, MemOperand(at));
   1985       __ JumpIfSmi(a3, &convert_to_object);
   1986       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   1987       __ GetObjectType(a3, t0, t0);
   1988       __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
   1989       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   1990         Label convert_global_proxy;
   1991         __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
   1992                       &convert_global_proxy);
   1993         __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
   1994         __ bind(&convert_global_proxy);
   1995         {
   1996           // Patch receiver to global proxy.
   1997           __ LoadGlobalProxy(a3);
   1998         }
   1999         __ Branch(&convert_receiver);
   2000       }
   2001       __ bind(&convert_to_object);
   2002       {
   2003         // Convert receiver using ToObject.
   2004         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2005         // in the fast case? (fall back to AllocateInNewSpace?)
   2006         FrameScope scope(masm, StackFrame::INTERNAL);
   2007         __ sll(a0, a0, kSmiTagSize);  // Smi tagged.
   2008         __ Push(a0, a1);
   2009         __ mov(a0, a3);
   2010         ToObjectStub stub(masm->isolate());
   2011         __ CallStub(&stub);
   2012         __ mov(a3, v0);
   2013         __ Pop(a0, a1);
   2014         __ sra(a0, a0, kSmiTagSize);  // Un-tag.
   2015       }
   2016       __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   2017       __ bind(&convert_receiver);
   2018     }
   2019     __ sll(at, a0, kPointerSizeLog2);
   2020     __ addu(at, sp, at);
   2021     __ sw(a3, MemOperand(at));
   2022   }
   2023   __ bind(&done_convert);
   2024 
   2025   // ----------- S t a t e -------------
   2026   //  -- a0 : the number of arguments (not including the receiver)
   2027   //  -- a1 : the function to call (checked to be a JSFunction)
   2028   //  -- a2 : the shared function info.
   2029   //  -- cp : the function context.
   2030   // -----------------------------------
   2031 
   2032   __ lw(a2,
   2033         FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
   2034   __ sra(a2, a2, kSmiTagSize);  // Un-tag.
   2035   ParameterCount actual(a0);
   2036   ParameterCount expected(a2);
   2037   __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
   2038                         CheckDebugStepCallWrapper());
   2039 
   2040   // The function is a "classConstructor", need to raise an exception.
   2041   __ bind(&class_constructor);
   2042   {
   2043     FrameScope frame(masm, StackFrame::INTERNAL);
   2044     __ Push(a1);
   2045     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2046   }
   2047 }
   2048 
   2049 
   2050 // static
   2051 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
   2052   // ----------- S t a t e -------------
   2053   //  -- a0 : the number of arguments (not including the receiver)
   2054   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2055   // -----------------------------------
   2056   __ AssertBoundFunction(a1);
   2057 
   2058   // Patch the receiver to [[BoundThis]].
   2059   {
   2060     __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
   2061     __ sll(t0, a0, kPointerSizeLog2);
   2062     __ addu(t0, t0, sp);
   2063     __ sw(at, MemOperand(t0));
   2064   }
   2065 
   2066   // Load [[BoundArguments]] into a2 and length of that into t0.
   2067   __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
   2068   __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
   2069   __ SmiUntag(t0);
   2070 
   2071   // ----------- S t a t e -------------
   2072   //  -- a0 : the number of arguments (not including the receiver)
   2073   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2074   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
   2075   //  -- t0 : the number of [[BoundArguments]]
   2076   // -----------------------------------
   2077 
   2078   // Reserve stack space for the [[BoundArguments]].
   2079   {
   2080     Label done;
   2081     __ sll(t1, t0, kPointerSizeLog2);
   2082     __ Subu(sp, sp, Operand(t1));
   2083     // Check the stack for overflow. We are not trying to catch interruptions
   2084     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2085     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
   2086     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
   2087     // Restore the stack pointer.
   2088     __ Addu(sp, sp, Operand(t1));
   2089     {
   2090       FrameScope scope(masm, StackFrame::MANUAL);
   2091       __ EnterFrame(StackFrame::INTERNAL);
   2092       __ CallRuntime(Runtime::kThrowStackOverflow);
   2093     }
   2094     __ bind(&done);
   2095   }
   2096 
   2097   // Relocate arguments down the stack.
   2098   {
   2099     Label loop, done_loop;
   2100     __ mov(t1, zero_reg);
   2101     __ bind(&loop);
   2102     __ Branch(&done_loop, gt, t1, Operand(a0));
   2103     __ sll(t2, t0, kPointerSizeLog2);
   2104     __ addu(t2, t2, sp);
   2105     __ lw(at, MemOperand(t2));
   2106     __ sll(t2, t1, kPointerSizeLog2);
   2107     __ addu(t2, t2, sp);
   2108     __ sw(at, MemOperand(t2));
   2109     __ Addu(t0, t0, Operand(1));
   2110     __ Addu(t1, t1, Operand(1));
   2111     __ Branch(&loop);
   2112     __ bind(&done_loop);
   2113   }
   2114 
   2115   // Copy [[BoundArguments]] to the stack (below the arguments).
   2116   {
   2117     Label loop, done_loop;
   2118     __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
   2119     __ SmiUntag(t0);
   2120     __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2121     __ bind(&loop);
   2122     __ Subu(t0, t0, Operand(1));
   2123     __ Branch(&done_loop, lt, t0, Operand(zero_reg));
   2124     __ sll(t1, t0, kPointerSizeLog2);
   2125     __ addu(t1, t1, a2);
   2126     __ lw(at, MemOperand(t1));
   2127     __ sll(t1, a0, kPointerSizeLog2);
   2128     __ addu(t1, t1, sp);
   2129     __ sw(at, MemOperand(t1));
   2130     __ Addu(a0, a0, Operand(1));
   2131     __ Branch(&loop);
   2132     __ bind(&done_loop);
   2133   }
   2134 
   2135   // Call the [[BoundTargetFunction]] via the Call builtin.
   2136   __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2137   __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
   2138                                       masm->isolate())));
   2139   __ lw(at, MemOperand(at));
   2140   __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
   2141   __ Jump(at);
   2142 }
   2143 
   2144 
   2145 // static
   2146 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
   2147   // ----------- S t a t e -------------
   2148   //  -- a0 : the number of arguments (not including the receiver)
   2149   //  -- a1 : the target to call (can be any Object).
   2150   // -----------------------------------
   2151 
   2152   Label non_callable, non_function, non_smi;
   2153   __ JumpIfSmi(a1, &non_callable);
   2154   __ bind(&non_smi);
   2155   __ GetObjectType(a1, t1, t2);
   2156   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
   2157           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
   2158   __ Jump(masm->isolate()->builtins()->CallBoundFunction(),
   2159           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
   2160   __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
   2161 
   2162   // 1. Runtime fallback for Proxy [[Call]].
   2163   __ Push(a1);
   2164   // Increase the arguments size to include the pushed function and the
   2165   // existing receiver on the stack.
   2166   __ Addu(a0, a0, 2);
   2167   // Tail-call to the runtime.
   2168   __ JumpToExternalReference(
   2169       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2170 
   2171   // 2. Call to something else, which might have a [[Call]] internal method (if
   2172   // not we raise an exception).
   2173   __ bind(&non_function);
   2174   // Check if target has a [[Call]] internal method.
   2175   __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
   2176   __ And(t1, t1, Operand(1 << Map::kIsCallable));
   2177   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
   2178   // Overwrite the original receiver with the (original) target.
   2179   __ sll(at, a0, kPointerSizeLog2);
   2180   __ addu(at, sp, at);
   2181   __ sw(a1, MemOperand(at));
   2182   // Let the "call_as_function_delegate" take care of the rest.
   2183   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
   2184   __ Jump(masm->isolate()->builtins()->CallFunction(
   2185               ConvertReceiverMode::kNotNullOrUndefined),
   2186           RelocInfo::CODE_TARGET);
   2187 
   2188   // 3. Call to something that is not callable.
   2189   __ bind(&non_callable);
   2190   {
   2191     FrameScope scope(masm, StackFrame::INTERNAL);
   2192     __ Push(a1);
   2193     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2194   }
   2195 }
   2196 
   2197 
   2198 // static
   2199 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2200   // ----------- S t a t e -------------
   2201   //  -- a0 : the number of arguments (not including the receiver)
   2202   //  -- a1 : the constructor to call (checked to be a JSFunction)
   2203   //  -- a3 : the new target (checked to be a constructor)
   2204   // -----------------------------------
   2205   __ AssertFunction(a1);
   2206 
   2207   // Calling convention for function specific ConstructStubs require
   2208   // a2 to contain either an AllocationSite or undefined.
   2209   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   2210 
   2211   // Tail call to the function-specific construct stub (still in the caller
   2212   // context at this point).
   2213   __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   2214   __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
   2215   __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
   2216   __ Jump(at);
   2217 }
   2218 
   2219 
   2220 // static
   2221 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2222   // ----------- S t a t e -------------
   2223   //  -- a0 : the number of arguments (not including the receiver)
   2224   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2225   //  -- a3 : the new target (checked to be a constructor)
   2226   // -----------------------------------
   2227   __ AssertBoundFunction(a1);
   2228 
   2229   // Load [[BoundArguments]] into a2 and length of that into t0.
   2230   __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
   2231   __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
   2232   __ SmiUntag(t0);
   2233 
   2234   // ----------- S t a t e -------------
   2235   //  -- a0 : the number of arguments (not including the receiver)
   2236   //  -- a1 : the function to call (checked to be a JSBoundFunction)
   2237   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
   2238   //  -- a3 : the new target (checked to be a constructor)
   2239   //  -- t0 : the number of [[BoundArguments]]
   2240   // -----------------------------------
   2241 
   2242   // Reserve stack space for the [[BoundArguments]].
   2243   {
   2244     Label done;
   2245     __ sll(t1, t0, kPointerSizeLog2);
   2246     __ Subu(sp, sp, Operand(t1));
   2247     // Check the stack for overflow. We are not trying to catch interruptions
   2248     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2249     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
   2250     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
   2251     // Restore the stack pointer.
   2252     __ Addu(sp, sp, Operand(t1));
   2253     {
   2254       FrameScope scope(masm, StackFrame::MANUAL);
   2255       __ EnterFrame(StackFrame::INTERNAL);
   2256       __ CallRuntime(Runtime::kThrowStackOverflow);
   2257     }
   2258     __ bind(&done);
   2259   }
   2260 
   2261   // Relocate arguments down the stack.
   2262   {
   2263     Label loop, done_loop;
   2264     __ mov(t1, zero_reg);
   2265     __ bind(&loop);
   2266     __ Branch(&done_loop, ge, t1, Operand(a0));
   2267     __ sll(t2, t0, kPointerSizeLog2);
   2268     __ addu(t2, t2, sp);
   2269     __ lw(at, MemOperand(t2));
   2270     __ sll(t2, t1, kPointerSizeLog2);
   2271     __ addu(t2, t2, sp);
   2272     __ sw(at, MemOperand(t2));
   2273     __ Addu(t0, t0, Operand(1));
   2274     __ Addu(t1, t1, Operand(1));
   2275     __ Branch(&loop);
   2276     __ bind(&done_loop);
   2277   }
   2278 
   2279   // Copy [[BoundArguments]] to the stack (below the arguments).
   2280   {
   2281     Label loop, done_loop;
   2282     __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
   2283     __ SmiUntag(t0);
   2284     __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2285     __ bind(&loop);
   2286     __ Subu(t0, t0, Operand(1));
   2287     __ Branch(&done_loop, lt, t0, Operand(zero_reg));
   2288     __ sll(t1, t0, kPointerSizeLog2);
   2289     __ addu(t1, t1, a2);
   2290     __ lw(at, MemOperand(t1));
   2291     __ sll(t1, a0, kPointerSizeLog2);
   2292     __ addu(t1, t1, sp);
   2293     __ sw(at, MemOperand(t1));
   2294     __ Addu(a0, a0, Operand(1));
   2295     __ Branch(&loop);
   2296     __ bind(&done_loop);
   2297   }
   2298 
   2299   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2300   {
   2301     Label skip_load;
   2302     __ Branch(&skip_load, ne, a1, Operand(a3));
   2303     __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2304     __ bind(&skip_load);
   2305   }
   2306 
   2307   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2308   __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
   2309   __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
   2310   __ lw(at, MemOperand(at));
   2311   __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
   2312   __ Jump(at);
   2313 }
   2314 
   2315 
   2316 // static
   2317 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2318   // ----------- S t a t e -------------
   2319   //  -- a0 : the number of arguments (not including the receiver)
   2320   //  -- a1 : the constructor to call (checked to be a JSProxy)
   2321   //  -- a3 : the new target (either the same as the constructor or
   2322   //          the JSFunction on which new was invoked initially)
   2323   // -----------------------------------
   2324 
   2325   // Call into the Runtime for Proxy [[Construct]].
   2326   __ Push(a1, a3);
   2327   // Include the pushed new_target, constructor and the receiver.
   2328   __ Addu(a0, a0, Operand(3));
   2329   // Tail-call to the runtime.
   2330   __ JumpToExternalReference(
   2331       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2332 }
   2333 
   2334 
   2335 // static
   2336 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2337   // ----------- S t a t e -------------
   2338   //  -- a0 : the number of arguments (not including the receiver)
   2339   //  -- a1 : the constructor to call (can be any Object)
   2340   //  -- a3 : the new target (either the same as the constructor or
   2341   //          the JSFunction on which new was invoked initially)
   2342   // -----------------------------------
   2343 
   2344   // Check if target is a Smi.
   2345   Label non_constructor;
   2346   __ JumpIfSmi(a1, &non_constructor);
   2347 
   2348   // Dispatch based on instance type.
   2349   __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
   2350   __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
   2351   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
   2352           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
   2353 
   2354   // Check if target has a [[Construct]] internal method.
   2355   __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
   2356   __ And(t3, t3, Operand(1 << Map::kIsConstructor));
   2357   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
   2358 
   2359   // Only dispatch to bound functions after checking whether they are
   2360   // constructors.
   2361   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
   2362           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
   2363 
   2364   // Only dispatch to proxies after checking whether they are constructors.
   2365   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
   2366           eq, t2, Operand(JS_PROXY_TYPE));
   2367 
   2368   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2369   {
   2370     // Overwrite the original receiver with the (original) target.
   2371     __ sll(at, a0, kPointerSizeLog2);
   2372     __ addu(at, sp, at);
   2373     __ sw(a1, MemOperand(at));
   2374     // Let the "call_as_constructor_delegate" take care of the rest.
   2375     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
   2376     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2377             RelocInfo::CODE_TARGET);
   2378   }
   2379 
   2380   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2381   // method.
   2382   __ bind(&non_constructor);
   2383   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   2384           RelocInfo::CODE_TARGET);
   2385 }
   2386 
   2387 
   2388 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   2389   // State setup as expected by MacroAssembler::InvokePrologue.
   2390   // ----------- S t a t e -------------
   2391   //  -- a0: actual arguments count
   2392   //  -- a1: function (passed through to callee)
   2393   //  -- a2: expected arguments count
   2394   //  -- a3: new target (passed through to callee)
   2395   // -----------------------------------
   2396 
   2397   Label invoke, dont_adapt_arguments, stack_overflow;
   2398 
   2399   Label enough, too_few;
   2400   __ Branch(&dont_adapt_arguments, eq,
   2401       a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   2402   // We use Uless as the number of argument should always be greater than 0.
   2403   __ Branch(&too_few, Uless, a0, Operand(a2));
   2404 
   2405   {  // Enough parameters: actual >= expected.
   2406     // a0: actual number of arguments as a smi
   2407     // a1: function
   2408     // a2: expected number of arguments
   2409     // a3: new target (passed through to callee)
   2410     __ bind(&enough);
   2411     EnterArgumentsAdaptorFrame(masm);
   2412     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2413 
   2414     // Calculate copy start address into a0 and copy end address into t1.
   2415     __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
   2416     __ Addu(a0, fp, a0);
   2417     // Adjust for return address and receiver.
   2418     __ Addu(a0, a0, Operand(2 * kPointerSize));
   2419     // Compute copy end address.
   2420     __ sll(t1, a2, kPointerSizeLog2);
   2421     __ subu(t1, a0, t1);
   2422 
   2423     // Copy the arguments (including the receiver) to the new stack frame.
   2424     // a0: copy start address
   2425     // a1: function
   2426     // a2: expected number of arguments
   2427     // a3: new target (passed through to callee)
   2428     // t1: copy end address
   2429 
   2430     Label copy;
   2431     __ bind(&copy);
   2432     __ lw(t0, MemOperand(a0));
   2433     __ push(t0);
   2434     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t1));
   2435     __ addiu(a0, a0, -kPointerSize);  // In delay slot.
   2436 
   2437     __ jmp(&invoke);
   2438   }
   2439 
   2440   {  // Too few parameters: Actual < expected.
   2441     __ bind(&too_few);
   2442 
   2443     // If the function is strong we need to throw an error.
   2444     Label no_strong_error;
   2445     __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   2446     __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kCompilerHintsOffset));
   2447     __ And(t3, t2, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
   2448                                  kSmiTagSize)));
   2449     __ Branch(&no_strong_error, eq, t3, Operand(zero_reg));
   2450 
   2451     // What we really care about is the required number of arguments.
   2452     __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset));
   2453     __ SmiUntag(t2);
   2454     __ Branch(&no_strong_error, ge, a0, Operand(t2));
   2455 
   2456     {
   2457       FrameScope frame(masm, StackFrame::MANUAL);
   2458       EnterArgumentsAdaptorFrame(masm);
   2459       __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
   2460     }
   2461 
   2462     __ bind(&no_strong_error);
   2463     EnterArgumentsAdaptorFrame(masm);
   2464     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2465 
   2466     // Calculate copy start address into a0 and copy end address into t3.
   2467     // a0: actual number of arguments as a smi
   2468     // a1: function
   2469     // a2: expected number of arguments
   2470     // a3: new target (passed through to callee)
   2471     __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
   2472     __ Addu(a0, fp, a0);
   2473     // Adjust for return address and receiver.
   2474     __ Addu(a0, a0, Operand(2 * kPointerSize));
   2475     // Compute copy end address. Also adjust for return address.
   2476     __ Addu(t3, fp, kPointerSize);
   2477 
   2478     // Copy the arguments (including the receiver) to the new stack frame.
   2479     // a0: copy start address
   2480     // a1: function
   2481     // a2: expected number of arguments
   2482     // a3: new target (passed through to callee)
   2483     // t3: copy end address
   2484     Label copy;
   2485     __ bind(&copy);
   2486     __ lw(t0, MemOperand(a0));  // Adjusted above for return addr and receiver.
   2487     __ Subu(sp, sp, kPointerSize);
   2488     __ Subu(a0, a0, kPointerSize);
   2489     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
   2490     __ sw(t0, MemOperand(sp));  // In the delay slot.
   2491 
   2492     // Fill the remaining expected arguments with undefined.
   2493     // a1: function
   2494     // a2: expected number of arguments
   2495     // a3: new target (passed through to callee)
   2496     __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
   2497     __ sll(t2, a2, kPointerSizeLog2);
   2498     __ Subu(t1, fp, Operand(t2));
   2499     // Adjust for frame.
   2500     __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   2501                             2 * kPointerSize));
   2502 
   2503     Label fill;
   2504     __ bind(&fill);
   2505     __ Subu(sp, sp, kPointerSize);
   2506     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
   2507     __ sw(t0, MemOperand(sp));
   2508   }
   2509 
   2510   // Call the entry point.
   2511   __ bind(&invoke);
   2512   __ mov(a0, a2);
   2513   // a0 : expected number of arguments
   2514   // a1 : function (passed through to callee)
   2515   // a3 : new target (passed through to callee)
   2516   __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
   2517   __ Call(t0);
   2518 
   2519   // Store offset of return address for deoptimizer.
   2520   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   2521 
   2522   // Exit frame and return.
   2523   LeaveArgumentsAdaptorFrame(masm);
   2524   __ Ret();
   2525 
   2526 
   2527   // -------------------------------------------
   2528   // Don't adapt arguments.
   2529   // -------------------------------------------
   2530   __ bind(&dont_adapt_arguments);
   2531   __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
   2532   __ Jump(t0);
   2533 
   2534   __ bind(&stack_overflow);
   2535   {
   2536     FrameScope frame(masm, StackFrame::MANUAL);
   2537     __ CallRuntime(Runtime::kThrowStackOverflow);
   2538     __ break_(0xCC);
   2539   }
   2540 }
   2541 
   2542 
   2543 #undef __
   2544 
   2545 }  // namespace internal
   2546 }  // namespace v8
   2547 
   2548 #endif  // V8_TARGET_ARCH_MIPS
   2549