Home | History | Annotate | Download | only in arm64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_ARM64
      8 
      9 #include "src/codegen.h"
     10 #include "src/debug.h"
     11 #include "src/deoptimizer.h"
     12 #include "src/full-codegen.h"
     13 #include "src/runtime.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 
     19 #define __ ACCESS_MASM(masm)
     20 
     21 
     22 // Load the built-in Array function from the current context.
     23 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     24   // Load the native context.
     25   __ Ldr(result, GlobalObjectMemOperand());
     26   __ Ldr(result,
     27          FieldMemOperand(result, GlobalObject::kNativeContextOffset));
     28   // Load the InternalArray function from the native context.
     29   __ Ldr(result,
     30          MemOperand(result,
     31                     Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
     32 }
     33 
     34 
     35 // Load the built-in InternalArray function from the current context.
     36 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     37                                               Register result) {
     38   // Load the native context.
     39   __ Ldr(result, GlobalObjectMemOperand());
     40   __ Ldr(result,
     41          FieldMemOperand(result, GlobalObject::kNativeContextOffset));
     42   // Load the InternalArray function from the native context.
     43   __ Ldr(result, ContextMemOperand(result,
     44                                    Context::INTERNAL_ARRAY_FUNCTION_INDEX));
     45 }
     46 
     47 
     48 void Builtins::Generate_Adaptor(MacroAssembler* masm,
     49                                 CFunctionId id,
     50                                 BuiltinExtraArguments extra_args) {
     51   // ----------- S t a t e -------------
     52   //  -- x0                 : number of arguments excluding receiver
     53   //  -- x1                 : called function (only guaranteed when
     54   //                          extra_args requires it)
     55   //  -- cp                 : context
     56   //  -- sp[0]              : last argument
     57   //  -- ...
     58   //  -- sp[4 * (argc - 1)] : first argument (argc == x0)
     59   //  -- sp[4 * argc]       : receiver
     60   // -----------------------------------
     61 
     62   // Insert extra arguments.
     63   int num_extra_args = 0;
     64   if (extra_args == NEEDS_CALLED_FUNCTION) {
     65     num_extra_args = 1;
     66     __ Push(x1);
     67   } else {
     68     DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
     69   }
     70 
     71   // JumpToExternalReference expects x0 to contain the number of arguments
     72   // including the receiver and the extra arguments.
     73   __ Add(x0, x0, num_extra_args + 1);
     74   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     75 }
     76 
     77 
     78 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     79   // ----------- S t a t e -------------
     80   //  -- x0     : number of arguments
     81   //  -- lr     : return address
     82   //  -- sp[...]: constructor arguments
     83   // -----------------------------------
     84   ASM_LOCATION("Builtins::Generate_InternalArrayCode");
     85   Label generic_array_code;
     86 
     87   // Get the InternalArray function.
     88   GenerateLoadInternalArrayFunction(masm, x1);
     89 
     90   if (FLAG_debug_code) {
     91     // Initial map for the builtin InternalArray functions should be maps.
     92     __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
     93     __ Tst(x10, kSmiTagMask);
     94     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
     95     __ CompareObjectType(x10, x11, x12, MAP_TYPE);
     96     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
     97   }
     98 
     99   // Run the native code for the InternalArray function called as a normal
    100   // function.
    101   InternalArrayConstructorStub stub(masm->isolate());
    102   __ TailCallStub(&stub);
    103 }
    104 
    105 
    106 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
    107   // ----------- S t a t e -------------
    108   //  -- x0     : number of arguments
    109   //  -- lr     : return address
    110   //  -- sp[...]: constructor arguments
    111   // -----------------------------------
    112   ASM_LOCATION("Builtins::Generate_ArrayCode");
    113   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
    114 
    115   // Get the Array function.
    116   GenerateLoadArrayFunction(masm, x1);
    117 
    118   if (FLAG_debug_code) {
    119     // Initial map for the builtin Array functions should be maps.
    120     __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
    121     __ Tst(x10, kSmiTagMask);
    122     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
    123     __ CompareObjectType(x10, x11, x12, MAP_TYPE);
    124     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
    125   }
    126 
    127   // Run the native code for the Array function called as a normal function.
    128   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
    129   ArrayConstructorStub stub(masm->isolate());
    130   __ TailCallStub(&stub);
    131 }
    132 
    133 
    134 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
    135   // ----------- S t a t e -------------
    136   //  -- x0                     : number of arguments
    137   //  -- x1                     : constructor function
    138   //  -- lr                     : return address
    139   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    140   //  -- sp[argc * 8]           : receiver
    141   // -----------------------------------
    142   ASM_LOCATION("Builtins::Generate_StringConstructCode");
    143   Counters* counters = masm->isolate()->counters();
    144   __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11);
    145 
    146   Register argc = x0;
    147   Register function = x1;
    148   if (FLAG_debug_code) {
    149     __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10);
    150     __ Cmp(function, x10);
    151     __ Assert(eq, kUnexpectedStringFunction);
    152   }
    153 
    154   // Load the first arguments in x0 and get rid of the rest.
    155   Label no_arguments;
    156   __ Cbz(argc, &no_arguments);
    157   // First args = sp[(argc - 1) * 8].
    158   __ Sub(argc, argc, 1);
    159   __ Claim(argc, kXRegSize);
    160   // jssp now point to args[0], load and drop args[0] + receiver.
    161   Register arg = argc;
    162   __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex));
    163   argc = NoReg;
    164 
    165   Register argument = x2;
    166   Label not_cached, argument_is_string;
    167   __ LookupNumberStringCache(arg,        // Input.
    168                              argument,   // Result.
    169                              x10,        // Scratch.
    170                              x11,        // Scratch.
    171                              x12,        // Scratch.
    172                              &not_cached);
    173   __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11);
    174   __ Bind(&argument_is_string);
    175 
    176   // ----------- S t a t e -------------
    177   //  -- x2     : argument converted to string
    178   //  -- x1     : constructor function
    179   //  -- lr     : return address
    180   // -----------------------------------
    181 
    182   Label gc_required;
    183   Register new_obj = x0;
    184   __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT);
    185 
    186   // Initialize the String object.
    187   Register map = x3;
    188   __ LoadGlobalFunctionInitialMap(function, map, x10);
    189   if (FLAG_debug_code) {
    190     __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset));
    191     __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2);
    192     __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
    193     __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
    194     __ Cmp(x4, 0);
    195     __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
    196   }
    197   __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset));
    198 
    199   Register empty = x3;
    200   __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
    201   __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset));
    202   __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset));
    203 
    204   __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset));
    205 
    206   // Ensure the object is fully initialized.
    207   STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize));
    208 
    209   __ Ret();
    210 
    211   // The argument was not found in the number to string cache. Check
    212   // if it's a string already before calling the conversion builtin.
    213   Label convert_argument;
    214   __ Bind(&not_cached);
    215   __ JumpIfSmi(arg, &convert_argument);
    216 
    217   // Is it a String?
    218   __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
    219   __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset));
    220   __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument);
    221   __ Mov(argument, arg);
    222   __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11);
    223   __ B(&argument_is_string);
    224 
    225   // Invoke the conversion builtin and put the result into x2.
    226   __ Bind(&convert_argument);
    227   __ Push(function);  // Preserve the function.
    228   __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11);
    229   {
    230     FrameScope scope(masm, StackFrame::INTERNAL);
    231     __ Push(arg);
    232     __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
    233   }
    234   __ Pop(function);
    235   __ Mov(argument, x0);
    236   __ B(&argument_is_string);
    237 
    238   // Load the empty string into x2, remove the receiver from the
    239   // stack, and jump back to the case where the argument is a string.
    240   __ Bind(&no_arguments);
    241   __ LoadRoot(argument, Heap::kempty_stringRootIndex);
    242   __ Drop(1);
    243   __ B(&argument_is_string);
    244 
    245   // At this point the argument is already a string. Call runtime to create a
    246   // string wrapper.
    247   __ Bind(&gc_required);
    248   __ IncrementCounter(counters->string_ctor_gc_required(), 1, x10, x11);
    249   {
    250     FrameScope scope(masm, StackFrame::INTERNAL);
    251     __ Push(argument);
    252     __ CallRuntime(Runtime::kNewStringWrapper, 1);
    253   }
    254   __ Ret();
    255 }
    256 
    257 
    258 static void CallRuntimePassFunction(MacroAssembler* masm,
    259                                     Runtime::FunctionId function_id) {
    260   FrameScope scope(masm, StackFrame::INTERNAL);
    261   //   - Push a copy of the function onto the stack.
    262   //   - Push another copy as a parameter to the runtime call.
    263   __ Push(x1, x1);
    264 
    265   __ CallRuntime(function_id, 1);
    266 
    267   //   - Restore receiver.
    268   __ Pop(x1);
    269 }
    270 
    271 
    272 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    273   __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
    274   __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
    275   __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
    276   __ Br(x2);
    277 }
    278 
    279 
    280 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
    281   __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag);
    282   __ Br(x0);
    283 }
    284 
    285 
    286 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    287   // Checking whether the queued function is ready for install is optional,
    288   // since we come across interrupts and stack checks elsewhere. However, not
    289   // checking may delay installing ready functions, and always checking would be
    290   // quite expensive. A good compromise is to first check against stack limit as
    291   // a cue for an interrupt signal.
    292   Label ok;
    293   __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
    294   __ B(hs, &ok);
    295 
    296   CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
    297   GenerateTailCallToReturnedCode(masm);
    298 
    299   __ Bind(&ok);
    300   GenerateTailCallToSharedCode(masm);
    301 }
    302 
    303 
    304 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    305                                            bool is_api_function,
    306                                            bool create_memento) {
    307   // ----------- S t a t e -------------
    308   //  -- x0     : number of arguments
    309   //  -- x1     : constructor function
    310   //  -- x2     : allocation site or undefined
    311   //  -- lr     : return address
    312   //  -- sp[...]: constructor arguments
    313   // -----------------------------------
    314 
    315   ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
    316   // Should never create mementos for api functions.
    317   DCHECK(!is_api_function || !create_memento);
    318 
    319   Isolate* isolate = masm->isolate();
    320 
    321   // Enter a construct frame.
    322   {
    323     FrameScope scope(masm, StackFrame::CONSTRUCT);
    324 
    325     // Preserve the three incoming parameters on the stack.
    326     if (create_memento) {
    327       __ AssertUndefinedOrAllocationSite(x2, x10);
    328       __ Push(x2);
    329     }
    330 
    331     Register argc = x0;
    332     Register constructor = x1;
    333     // x1: constructor function
    334     __ SmiTag(argc);
    335     __ Push(argc, constructor);
    336     // sp[0] : Constructor function.
    337     // sp[1]: number of arguments (smi-tagged)
    338 
    339     // Try to allocate the object without transitioning into C code. If any of
    340     // the preconditions is not met, the code bails out to the runtime call.
    341     Label rt_call, allocated;
    342     if (FLAG_inline_new) {
    343       Label undo_allocation;
    344       ExternalReference debug_step_in_fp =
    345           ExternalReference::debug_step_in_fp_address(isolate);
    346       __ Mov(x2, Operand(debug_step_in_fp));
    347       __ Ldr(x2, MemOperand(x2));
    348       __ Cbnz(x2, &rt_call);
    349       // Load the initial map and verify that it is in fact a map.
    350       Register init_map = x2;
    351       __ Ldr(init_map,
    352              FieldMemOperand(constructor,
    353                              JSFunction::kPrototypeOrInitialMapOffset));
    354       __ JumpIfSmi(init_map, &rt_call);
    355       __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
    356 
    357       // Check that the constructor is not constructing a JSFunction (see
    358       // comments in Runtime_NewObject in runtime.cc). In which case the initial
    359       // map's instance type would be JS_FUNCTION_TYPE.
    360       __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
    361       __ B(eq, &rt_call);
    362 
    363       Register constructon_count = x14;
    364       if (!is_api_function) {
    365         Label allocate;
    366         MemOperand bit_field3 =
    367             FieldMemOperand(init_map, Map::kBitField3Offset);
    368         // Check if slack tracking is enabled.
    369         __ Ldr(x4, bit_field3);
    370         __ DecodeField<Map::ConstructionCount>(constructon_count, x4);
    371         __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking));
    372         __ B(eq, &allocate);
    373         // Decrease generous allocation count.
    374         __ Subs(x4, x4, Operand(1 << Map::ConstructionCount::kShift));
    375         __ Str(x4, bit_field3);
    376         __ Cmp(constructon_count, Operand(JSFunction::kFinishSlackTracking));
    377         __ B(ne, &allocate);
    378 
    379         // Push the constructor and map to the stack, and the constructor again
    380         // as argument to the runtime call.
    381         __ Push(constructor, init_map, constructor);
    382         __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
    383         __ Pop(init_map, constructor);
    384         __ Mov(constructon_count, Operand(JSFunction::kNoSlackTracking));
    385         __ Bind(&allocate);
    386       }
    387 
    388       // Now allocate the JSObject on the heap.
    389       Register obj_size = x3;
    390       Register new_obj = x4;
    391       __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
    392       if (create_memento) {
    393         __ Add(x7, obj_size,
    394                Operand(AllocationMemento::kSize / kPointerSize));
    395         __ Allocate(x7, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
    396       } else {
    397         __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
    398       }
    399 
    400       // Allocated the JSObject, now initialize the fields. Map is set to
    401       // initial map and properties and elements are set to empty fixed array.
    402       // NB. the object pointer is not tagged, so MemOperand is used.
    403       Register empty = x5;
    404       __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
    405       __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset));
    406       STATIC_ASSERT(JSObject::kElementsOffset ==
    407           (JSObject::kPropertiesOffset + kPointerSize));
    408       __ Stp(empty, empty, MemOperand(new_obj, JSObject::kPropertiesOffset));
    409 
    410       Register first_prop = x5;
    411       __ Add(first_prop, new_obj, JSObject::kHeaderSize);
    412 
    413       // Fill all of the in-object properties with the appropriate filler.
    414       Register filler = x7;
    415       __ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
    416 
    417       // Obtain number of pre-allocated property fields and in-object
    418       // properties.
    419       Register prealloc_fields = x10;
    420       Register inobject_props = x11;
    421       Register inst_sizes = x11;
    422       __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset));
    423       __ Ubfx(prealloc_fields, inst_sizes,
    424               Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
    425               kBitsPerByte);
    426       __ Ubfx(inobject_props, inst_sizes,
    427               Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte);
    428 
    429       // Calculate number of property fields in the object.
    430       Register prop_fields = x6;
    431       __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize);
    432 
    433       if (!is_api_function) {
    434         Label no_inobject_slack_tracking;
    435 
    436         // Check if slack tracking is enabled.
    437         __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking));
    438         __ B(eq, &no_inobject_slack_tracking);
    439         constructon_count = NoReg;
    440 
    441         // Fill the pre-allocated fields with undef.
    442         __ FillFields(first_prop, prealloc_fields, filler);
    443 
    444         // Update first_prop register to be the offset of the first field after
    445         // pre-allocated fields.
    446         __ Add(first_prop, first_prop,
    447                Operand(prealloc_fields, LSL, kPointerSizeLog2));
    448 
    449         if (FLAG_debug_code) {
    450           Register obj_end = x14;
    451           __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
    452           __ Cmp(first_prop, obj_end);
    453           __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
    454         }
    455 
    456         // Fill the remaining fields with one pointer filler map.
    457         __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
    458         __ Sub(prop_fields, prop_fields, prealloc_fields);
    459 
    460         __ bind(&no_inobject_slack_tracking);
    461       }
    462       if (create_memento) {
    463         // Fill the pre-allocated fields with undef.
    464         __ FillFields(first_prop, prop_fields, filler);
    465         __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
    466         __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
    467         DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
    468         __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
    469         // Load the AllocationSite
    470         __ Peek(x14, 2 * kXRegSize);
    471         DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
    472         __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
    473         first_prop = NoReg;
    474       } else {
    475         // Fill all of the property fields with undef.
    476         __ FillFields(first_prop, prop_fields, filler);
    477         first_prop = NoReg;
    478         prop_fields = NoReg;
    479       }
    480 
    481       // Add the object tag to make the JSObject real, so that we can continue
    482       // and jump into the continuation code at any time from now on. Any
    483       // failures need to undo the allocation, so that the heap is in a
    484       // consistent state and verifiable.
    485       __ Add(new_obj, new_obj, kHeapObjectTag);
    486 
    487       // Check if a non-empty properties array is needed. Continue with
    488       // allocated object if not, or fall through to runtime call if it is.
    489       Register element_count = x3;
    490       __ Ldrb(element_count,
    491               FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset));
    492       // The field instance sizes contains both pre-allocated property fields
    493       // and in-object properties.
    494       __ Add(element_count, element_count, prealloc_fields);
    495       __ Subs(element_count, element_count, inobject_props);
    496 
    497       // Done if no extra properties are to be allocated.
    498       __ B(eq, &allocated);
    499       __ Assert(pl, kPropertyAllocationCountFailed);
    500 
    501       // Scale the number of elements by pointer size and add the header for
    502       // FixedArrays to the start of the next object calculation from above.
    503       Register new_array = x5;
    504       Register array_size = x6;
    505       __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize);
    506       __ Allocate(array_size, new_array, x11, x12, &undo_allocation,
    507                   static_cast<AllocationFlags>(RESULT_CONTAINS_TOP |
    508                                                SIZE_IN_WORDS));
    509 
    510       Register array_map = x10;
    511       __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex);
    512       __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset));
    513       __ SmiTag(x0, element_count);
    514       __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset));
    515 
    516       // Initialize the fields to undefined.
    517       Register elements = x10;
    518       __ Add(elements, new_array, FixedArray::kHeaderSize);
    519       __ FillFields(elements, element_count, filler);
    520 
    521       // Store the initialized FixedArray into the properties field of the
    522       // JSObject.
    523       __ Add(new_array, new_array, kHeapObjectTag);
    524       __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset));
    525 
    526       // Continue with JSObject being successfully allocated.
    527       __ B(&allocated);
    528 
    529       // Undo the setting of the new top so that the heap is verifiable. For
    530       // example, the map's unused properties potentially do not match the
    531       // allocated objects unused properties.
    532       __ Bind(&undo_allocation);
    533       __ UndoAllocationInNewSpace(new_obj, x14);
    534     }
    535 
    536     // Allocate the new receiver object using the runtime call.
    537     __ Bind(&rt_call);
    538     Label count_incremented;
    539     if (create_memento) {
    540       // Get the cell or allocation site.
    541       __ Peek(x4, 2 * kXRegSize);
    542       __ Push(x4);
    543       __ Push(constructor);  // Argument for Runtime_NewObject.
    544       __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
    545       __ Mov(x4, x0);
    546       // If we ended up using the runtime, and we want a memento, then the
    547       // runtime call made it for us, and we shouldn't do create count
    548       // increment.
    549       __ jmp(&count_incremented);
    550     } else {
    551       __ Push(constructor);  // Argument for Runtime_NewObject.
    552       __ CallRuntime(Runtime::kNewObject, 1);
    553       __ Mov(x4, x0);
    554     }
    555 
    556     // Receiver for constructor call allocated.
    557     // x4: JSObject
    558     __ Bind(&allocated);
    559 
    560     if (create_memento) {
    561       __ Peek(x10, 2 * kXRegSize);
    562       __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
    563       // r2 is an AllocationSite. We are creating a memento from it, so we
    564       // need to increment the memento create count.
    565       __ Ldr(x5, FieldMemOperand(x10,
    566                                  AllocationSite::kPretenureCreateCountOffset));
    567       __ Add(x5, x5, Operand(Smi::FromInt(1)));
    568       __ Str(x5, FieldMemOperand(x10,
    569                                  AllocationSite::kPretenureCreateCountOffset));
    570       __ bind(&count_incremented);
    571     }
    572 
    573     __ Push(x4, x4);
    574 
    575     // Reload the number of arguments from the stack.
    576     // Set it up in x0 for the function call below.
    577     // jssp[0]: receiver
    578     // jssp[1]: receiver
    579     // jssp[2]: constructor function
    580     // jssp[3]: number of arguments (smi-tagged)
    581     __ Peek(constructor, 2 * kXRegSize);  // Load constructor.
    582     __ Peek(argc, 3 * kXRegSize);  // Load number of arguments.
    583     __ SmiUntag(argc);
    584 
    585     // Set up pointer to last argument.
    586     __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
    587 
    588     // Copy arguments and receiver to the expression stack.
    589     // Copy 2 values every loop to use ldp/stp.
    590     // x0: number of arguments
    591     // x1: constructor function
    592     // x2: address of last argument (caller sp)
    593     // jssp[0]: receiver
    594     // jssp[1]: receiver
    595     // jssp[2]: constructor function
    596     // jssp[3]: number of arguments (smi-tagged)
    597     // Compute the start address of the copy in x3.
    598     __ Add(x3, x2, Operand(argc, LSL, kPointerSizeLog2));
    599     Label loop, entry, done_copying_arguments;
    600     __ B(&entry);
    601     __ Bind(&loop);
    602     __ Ldp(x10, x11, MemOperand(x3, -2 * kPointerSize, PreIndex));
    603     __ Push(x11, x10);
    604     __ Bind(&entry);
    605     __ Cmp(x3, x2);
    606     __ B(gt, &loop);
    607     // Because we copied values 2 by 2 we may have copied one extra value.
    608     // Drop it if that is the case.
    609     __ B(eq, &done_copying_arguments);
    610     __ Drop(1);
    611     __ Bind(&done_copying_arguments);
    612 
    613     // Call the function.
    614     // x0: number of arguments
    615     // x1: constructor function
    616     if (is_api_function) {
    617       __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
    618       Handle<Code> code =
    619           masm->isolate()->builtins()->HandleApiCallConstruct();
    620       __ Call(code, RelocInfo::CODE_TARGET);
    621     } else {
    622       ParameterCount actual(argc);
    623       __ InvokeFunction(constructor, actual, CALL_FUNCTION, NullCallWrapper());
    624     }
    625 
    626     // Store offset of return address for deoptimizer.
    627     if (!is_api_function) {
    628       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    629     }
    630 
    631     // Restore the context from the frame.
    632     // x0: result
    633     // jssp[0]: receiver
    634     // jssp[1]: constructor function
    635     // jssp[2]: number of arguments (smi-tagged)
    636     __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    637 
    638     // If the result is an object (in the ECMA sense), we should get rid
    639     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    640     // on page 74.
    641     Label use_receiver, exit;
    642 
    643     // If the result is a smi, it is *not* an object in the ECMA sense.
    644     // x0: result
    645     // jssp[0]: receiver (newly allocated object)
    646     // jssp[1]: constructor function
    647     // jssp[2]: number of arguments (smi-tagged)
    648     __ JumpIfSmi(x0, &use_receiver);
    649 
    650     // If the type of the result (stored in its map) is less than
    651     // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
    652     __ JumpIfObjectType(x0, x1, x3, FIRST_SPEC_OBJECT_TYPE, &exit, ge);
    653 
    654     // Throw away the result of the constructor invocation and use the
    655     // on-stack receiver as the result.
    656     __ Bind(&use_receiver);
    657     __ Peek(x0, 0);
    658 
    659     // Remove the receiver from the stack, remove caller arguments, and
    660     // return.
    661     __ Bind(&exit);
    662     // x0: result
    663     // jssp[0]: receiver (newly allocated object)
    664     // jssp[1]: constructor function
    665     // jssp[2]: number of arguments (smi-tagged)
    666     __ Peek(x1, 2 * kXRegSize);
    667 
    668     // Leave construct frame.
    669   }
    670 
    671   __ DropBySMI(x1);
    672   __ Drop(1);
    673   __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
    674   __ Ret();
    675 }
    676 
    677 
    678 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    679   Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
    680 }
    681 
    682 
    683 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    684   Generate_JSConstructStubHelper(masm, true, false);
    685 }
    686 
    687 
    688 // Input:
    689 //   x0: code entry.
    690 //   x1: function.
    691 //   x2: receiver.
    692 //   x3: argc.
    693 //   x4: argv.
    694 // Output:
    695 //   x0: result.
    696 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    697                                              bool is_construct) {
    698   // Called from JSEntryStub::GenerateBody().
    699   Register function = x1;
    700   Register receiver = x2;
    701   Register argc = x3;
    702   Register argv = x4;
    703 
    704   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    705 
    706   // Clear the context before we push it when entering the internal frame.
    707   __ Mov(cp, 0);
    708 
    709   {
    710     // Enter an internal frame.
    711     FrameScope scope(masm, StackFrame::INTERNAL);
    712 
    713     // Set up the context from the function argument.
    714     __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
    715 
    716     __ InitializeRootRegister();
    717 
    718     // Push the function and the receiver onto the stack.
    719     __ Push(function, receiver);
    720 
    721     // Copy arguments to the stack in a loop, in reverse order.
    722     // x3: argc.
    723     // x4: argv.
    724     Label loop, entry;
    725     // Compute the copy end address.
    726     __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2));
    727 
    728     __ B(&entry);
    729     __ Bind(&loop);
    730     __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
    731     __ Ldr(x12, MemOperand(x11));  // Dereference the handle.
    732     __ Push(x12);  // Push the argument.
    733     __ Bind(&entry);
    734     __ Cmp(x10, argv);
    735     __ B(ne, &loop);
    736 
    737     // Initialize all JavaScript callee-saved registers, since they will be seen
    738     // by the garbage collector as part of handlers.
    739     // The original values have been saved in JSEntryStub::GenerateBody().
    740     __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
    741     __ Mov(x20, x19);
    742     __ Mov(x21, x19);
    743     __ Mov(x22, x19);
    744     __ Mov(x23, x19);
    745     __ Mov(x24, x19);
    746     __ Mov(x25, x19);
    747     // Don't initialize the reserved registers.
    748     // x26 : root register (root).
    749     // x27 : context pointer (cp).
    750     // x28 : JS stack pointer (jssp).
    751     // x29 : frame pointer (fp).
    752 
    753     __ Mov(x0, argc);
    754     if (is_construct) {
    755       // No type feedback cell is available.
    756       __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
    757 
    758       CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
    759       __ CallStub(&stub);
    760     } else {
    761       ParameterCount actual(x0);
    762       __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper());
    763     }
    764     // Exit the JS internal frame and remove the parameters (except function),
    765     // and return.
    766   }
    767 
    768   // Result is in x0. Return.
    769   __ Ret();
    770 }
    771 
    772 
    773 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    774   Generate_JSEntryTrampolineHelper(masm, false);
    775 }
    776 
    777 
    778 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    779   Generate_JSEntryTrampolineHelper(masm, true);
    780 }
    781 
    782 
    783 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
    784   CallRuntimePassFunction(masm, Runtime::kCompileLazy);
    785   GenerateTailCallToReturnedCode(masm);
    786 }
    787 
    788 
    789 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
    790   FrameScope scope(masm, StackFrame::INTERNAL);
    791   Register function = x1;
    792 
    793   // Preserve function. At the same time, push arguments for
    794   // kCompileOptimized.
    795   __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
    796   __ Push(function, function, x10);
    797 
    798   __ CallRuntime(Runtime::kCompileOptimized, 2);
    799 
    800   // Restore receiver.
    801   __ Pop(function);
    802 }
    803 
    804 
    805 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
    806   CallCompileOptimized(masm, false);
    807   GenerateTailCallToReturnedCode(masm);
    808 }
    809 
    810 
    811 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
    812   CallCompileOptimized(masm, true);
    813   GenerateTailCallToReturnedCode(masm);
    814 }
    815 
    816 
    817 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
    818   // For now, we are relying on the fact that make_code_young doesn't do any
    819   // garbage collection which allows us to save/restore the registers without
    820   // worrying about which of them contain pointers. We also don't build an
    821   // internal frame to make the code fast, since we shouldn't have to do stack
    822   // crawls in MakeCodeYoung. This seems a bit fragile.
    823 
    824   // The following caller-saved registers must be saved and restored when
    825   // calling through to the runtime:
    826   //   x0 - The address from which to resume execution.
    827   //   x1 - isolate
    828   //   lr - The return address for the JSFunction itself. It has not yet been
    829   //        preserved on the stack because the frame setup code was replaced
    830   //        with a call to this stub, to handle code ageing.
    831   {
    832     FrameScope scope(masm, StackFrame::MANUAL);
    833     __ Push(x0, x1, fp, lr);
    834     __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
    835     __ CallCFunction(
    836         ExternalReference::get_make_code_young_function(masm->isolate()), 2);
    837     __ Pop(lr, fp, x1, x0);
    838   }
    839 
    840   // The calling function has been made young again, so return to execute the
    841   // real frame set-up code.
    842   __ Br(x0);
    843 }
    844 
    845 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
    846 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
    847     MacroAssembler* masm) {                                  \
    848   GenerateMakeCodeYoungAgainCommon(masm);                    \
    849 }                                                            \
    850 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
    851     MacroAssembler* masm) {                                  \
    852   GenerateMakeCodeYoungAgainCommon(masm);                    \
    853 }
    854 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
    855 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
    856 
    857 
    858 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
    859   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
    860   // that make_code_young doesn't do any garbage collection which allows us to
    861   // save/restore the registers without worrying about which of them contain
    862   // pointers.
    863 
    864   // The following caller-saved registers must be saved and restored when
    865   // calling through to the runtime:
    866   //   x0 - The address from which to resume execution.
    867   //   x1 - isolate
    868   //   lr - The return address for the JSFunction itself. It has not yet been
    869   //        preserved on the stack because the frame setup code was replaced
    870   //        with a call to this stub, to handle code ageing.
    871   {
    872     FrameScope scope(masm, StackFrame::MANUAL);
    873     __ Push(x0, x1, fp, lr);
    874     __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
    875     __ CallCFunction(
    876         ExternalReference::get_mark_code_as_executed_function(
    877             masm->isolate()), 2);
    878     __ Pop(lr, fp, x1, x0);
    879 
    880     // Perform prologue operations usually performed by the young code stub.
    881     __ EmitFrameSetupForCodeAgePatching(masm);
    882   }
    883 
    884   // Jump to point after the code-age stub.
    885   __ Add(x0, x0, kNoCodeAgeSequenceLength);
    886   __ Br(x0);
    887 }
    888 
    889 
    890 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
    891   GenerateMakeCodeYoungAgainCommon(masm);
    892 }
    893 
    894 
    895 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
    896                                              SaveFPRegsMode save_doubles) {
    897   {
    898     FrameScope scope(masm, StackFrame::INTERNAL);
    899 
    900     // Preserve registers across notification, this is important for compiled
    901     // stubs that tail call the runtime on deopts passing their parameters in
    902     // registers.
    903     // TODO(jbramley): Is it correct (and appropriate) to use safepoint
    904     // registers here? According to the comment above, we should only need to
    905     // preserve the registers with parameters.
    906     __ PushXRegList(kSafepointSavedRegisters);
    907     // Pass the function and deoptimization type to the runtime system.
    908     __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
    909     __ PopXRegList(kSafepointSavedRegisters);
    910   }
    911 
    912   // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
    913   __ Drop(1);
    914 
    915   // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
    916   // into lr before it jumps here.
    917   __ Br(lr);
    918 }
    919 
    920 
    921 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
    922   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
    923 }
    924 
    925 
    926 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
    927   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
    928 }
    929 
    930 
    931 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
    932                                              Deoptimizer::BailoutType type) {
    933   {
    934     FrameScope scope(masm, StackFrame::INTERNAL);
    935     // Pass the deoptimization type to the runtime system.
    936     __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
    937     __ Push(x0);
    938     __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
    939   }
    940 
    941   // Get the full codegen state from the stack and untag it.
    942   Register state = x6;
    943   __ Peek(state, 0);
    944   __ SmiUntag(state);
    945 
    946   // Switch on the state.
    947   Label with_tos_register, unknown_state;
    948   __ CompareAndBranch(
    949       state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
    950   __ Drop(1);  // Remove state.
    951   __ Ret();
    952 
    953   __ Bind(&with_tos_register);
    954   // Reload TOS register.
    955   __ Peek(x0, kPointerSize);
    956   __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
    957   __ Drop(2);  // Remove state and TOS.
    958   __ Ret();
    959 
    960   __ Bind(&unknown_state);
    961   __ Abort(kInvalidFullCodegenState);
    962 }
    963 
    964 
    965 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    966   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
    967 }
    968 
    969 
    970 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
    971   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
    972 }
    973 
    974 
    975 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
    976   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
    977 }
    978 
    979 
    980 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
    981   // Lookup the function in the JavaScript frame.
    982   __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    983   {
    984     FrameScope scope(masm, StackFrame::INTERNAL);
    985     // Pass function as argument.
    986     __ Push(x0);
    987     __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
    988   }
    989 
    990   // If the code object is null, just return to the unoptimized code.
    991   Label skip;
    992   __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
    993   __ Ret();
    994 
    995   __ Bind(&skip);
    996 
    997   // Load deoptimization data from the code object.
    998   // <deopt_data> = <code>[#deoptimization_data_offset]
    999   __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   1000 
   1001   // Load the OSR entrypoint offset from the deoptimization data.
   1002   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1003   __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
   1004       DeoptimizationInputData::kOsrPcOffsetIndex)));
   1005 
   1006   // Compute the target address = code_obj + header_size + osr_offset
   1007   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
   1008   __ Add(x0, x0, x1);
   1009   __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
   1010 
   1011   // And "return" to the OSR entry point of the function.
   1012   __ Ret();
   1013 }
   1014 
   1015 
   1016 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
   1017   // We check the stack limit as indicator that recompilation might be done.
   1018   Label ok;
   1019   __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
   1020   __ B(hs, &ok);
   1021   {
   1022     FrameScope scope(masm, StackFrame::INTERNAL);
   1023     __ CallRuntime(Runtime::kStackGuard, 0);
   1024   }
   1025   __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
   1026           RelocInfo::CODE_TARGET);
   1027 
   1028   __ Bind(&ok);
   1029   __ Ret();
   1030 }
   1031 
   1032 
   1033 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   1034   enum {
   1035     call_type_JS_func = 0,
   1036     call_type_func_proxy = 1,
   1037     call_type_non_func = 2
   1038   };
   1039   Register argc = x0;
   1040   Register function = x1;
   1041   Register call_type = x4;
   1042   Register scratch1 = x10;
   1043   Register scratch2 = x11;
   1044   Register receiver_type = x13;
   1045 
   1046   ASM_LOCATION("Builtins::Generate_FunctionCall");
   1047   // 1. Make sure we have at least one argument.
   1048   { Label done;
   1049     __ Cbnz(argc, &done);
   1050     __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
   1051     __ Push(scratch1);
   1052     __ Mov(argc, 1);
   1053     __ Bind(&done);
   1054   }
   1055 
   1056   // 2. Get the function to call (passed as receiver) from the stack, check
   1057   //    if it is a function.
   1058   Label slow, non_function;
   1059   __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
   1060   __ JumpIfSmi(function, &non_function);
   1061   __ JumpIfNotObjectType(function, scratch1, receiver_type,
   1062                          JS_FUNCTION_TYPE, &slow);
   1063 
   1064   // 3a. Patch the first argument if necessary when calling a function.
   1065   Label shift_arguments;
   1066   __ Mov(call_type, static_cast<int>(call_type_JS_func));
   1067   { Label convert_to_object, use_global_proxy, patch_receiver;
   1068     // Change context eagerly in case we need the global receiver.
   1069     __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
   1070 
   1071     // Do not transform the receiver for strict mode functions.
   1072     // Also do not transform the receiver for native (Compilerhints already in
   1073     // x3).
   1074     __ Ldr(scratch1,
   1075            FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
   1076     __ Ldr(scratch2.W(),
   1077            FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset));
   1078     __ TestAndBranchIfAnySet(
   1079         scratch2.W(),
   1080         (1 << SharedFunctionInfo::kStrictModeFunction) |
   1081         (1 << SharedFunctionInfo::kNative),
   1082         &shift_arguments);
   1083 
   1084     // Compute the receiver in sloppy mode.
   1085     Register receiver = x2;
   1086     __ Sub(scratch1, argc, 1);
   1087     __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeLog2));
   1088     __ JumpIfSmi(receiver, &convert_to_object);
   1089 
   1090     __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
   1091                   &use_global_proxy);
   1092     __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_proxy);
   1093 
   1094     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
   1095     __ JumpIfObjectType(receiver, scratch1, scratch2,
   1096                         FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge);
   1097 
   1098     __ Bind(&convert_to_object);
   1099 
   1100     {
   1101       // Enter an internal frame in order to preserve argument count.
   1102       FrameScope scope(masm, StackFrame::INTERNAL);
   1103       __ SmiTag(argc);
   1104 
   1105       __ Push(argc, receiver);
   1106       __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1107       __ Mov(receiver, x0);
   1108 
   1109       __ Pop(argc);
   1110       __ SmiUntag(argc);
   1111 
   1112       // Exit the internal frame.
   1113     }
   1114 
   1115     // Restore the function and flag in the registers.
   1116     __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
   1117     __ Mov(call_type, static_cast<int>(call_type_JS_func));
   1118     __ B(&patch_receiver);
   1119 
   1120     __ Bind(&use_global_proxy);
   1121     __ Ldr(receiver, GlobalObjectMemOperand());
   1122     __ Ldr(receiver,
   1123            FieldMemOperand(receiver, GlobalObject::kGlobalProxyOffset));
   1124 
   1125 
   1126     __ Bind(&patch_receiver);
   1127     __ Sub(scratch1, argc, 1);
   1128     __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeLog2));
   1129 
   1130     __ B(&shift_arguments);
   1131   }
   1132 
   1133   // 3b. Check for function proxy.
   1134   __ Bind(&slow);
   1135   __ Mov(call_type, static_cast<int>(call_type_func_proxy));
   1136   __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE);
   1137   __ B(eq, &shift_arguments);
   1138   __ Bind(&non_function);
   1139   __ Mov(call_type, static_cast<int>(call_type_non_func));
   1140 
   1141   // 3c. Patch the first argument when calling a non-function.  The
   1142   //     CALL_NON_FUNCTION builtin expects the non-function callee as
   1143   //     receiver, so overwrite the first argument which will ultimately
   1144   //     become the receiver.
   1145   // call type (0: JS function, 1: function proxy, 2: non-function)
   1146   __ Sub(scratch1, argc, 1);
   1147   __ Poke(function, Operand(scratch1, LSL, kXRegSizeLog2));
   1148 
   1149   // 4. Shift arguments and return address one slot down on the stack
   1150   //    (overwriting the original receiver).  Adjust argument count to make
   1151   //    the original first argument the new receiver.
   1152   // call type (0: JS function, 1: function proxy, 2: non-function)
   1153   __ Bind(&shift_arguments);
   1154   { Label loop;
   1155     // Calculate the copy start address (destination). Copy end address is jssp.
   1156     __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
   1157     __ Sub(scratch1, scratch2, kPointerSize);
   1158 
   1159     __ Bind(&loop);
   1160     __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
   1161     __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
   1162     __ Cmp(scratch1, jssp);
   1163     __ B(ge, &loop);
   1164     // Adjust the actual number of arguments and remove the top element
   1165     // (which is a copy of the last argument).
   1166     __ Sub(argc, argc, 1);
   1167     __ Drop(1);
   1168   }
   1169 
   1170   // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
   1171   //     or a function proxy via CALL_FUNCTION_PROXY.
   1172   // call type (0: JS function, 1: function proxy, 2: non-function)
   1173   { Label js_function, non_proxy;
   1174     __ Cbz(call_type, &js_function);
   1175     // Expected number of arguments is 0 for CALL_NON_FUNCTION.
   1176     __ Mov(x2, 0);
   1177     __ Cmp(call_type, static_cast<int>(call_type_func_proxy));
   1178     __ B(ne, &non_proxy);
   1179 
   1180     __ Push(function);  // Re-add proxy object as additional argument.
   1181     __ Add(argc, argc, 1);
   1182     __ GetBuiltinFunction(function, Builtins::CALL_FUNCTION_PROXY);
   1183     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1184             RelocInfo::CODE_TARGET);
   1185 
   1186     __ Bind(&non_proxy);
   1187     __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
   1188     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1189             RelocInfo::CODE_TARGET);
   1190     __ Bind(&js_function);
   1191   }
   1192 
   1193   // 5b. Get the code to call from the function and check that the number of
   1194   //     expected arguments matches what we're providing.  If so, jump
   1195   //     (tail-call) to the code in register edx without checking arguments.
   1196   __ Ldr(x3, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
   1197   __ Ldrsw(x2,
   1198            FieldMemOperand(x3,
   1199              SharedFunctionInfo::kFormalParameterCountOffset));
   1200   Label dont_adapt_args;
   1201   __ Cmp(x2, argc);  // Check formal and actual parameter counts.
   1202   __ B(eq, &dont_adapt_args);
   1203   __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1204           RelocInfo::CODE_TARGET);
   1205   __ Bind(&dont_adapt_args);
   1206 
   1207   __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
   1208   ParameterCount expected(0);
   1209   __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
   1210 }
   1211 
   1212 
   1213 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   1214   ASM_LOCATION("Builtins::Generate_FunctionApply");
   1215   const int kIndexOffset    =
   1216       StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
   1217   const int kLimitOffset    =
   1218       StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
   1219   const int kArgsOffset     =  2 * kPointerSize;
   1220   const int kReceiverOffset =  3 * kPointerSize;
   1221   const int kFunctionOffset =  4 * kPointerSize;
   1222 
   1223   {
   1224     FrameScope frame_scope(masm, StackFrame::INTERNAL);
   1225 
   1226     Register args = x12;
   1227     Register receiver = x14;
   1228     Register function = x15;
   1229 
   1230     // Get the length of the arguments via a builtin call.
   1231     __ Ldr(function, MemOperand(fp, kFunctionOffset));
   1232     __ Ldr(args, MemOperand(fp, kArgsOffset));
   1233     __ Push(function, args);
   1234     __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
   1235     Register argc = x0;
   1236 
   1237     // Check the stack for overflow.
   1238     // We are not trying to catch interruptions (e.g. debug break and
   1239     // preemption) here, so the "real stack limit" is checked.
   1240     Label enough_stack_space;
   1241     __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
   1242     __ Ldr(function, MemOperand(fp, kFunctionOffset));
   1243     // Make x10 the space we have left. The stack might already be overflowed
   1244     // here which will cause x10 to become negative.
   1245     // TODO(jbramley): Check that the stack usage here is safe.
   1246     __ Sub(x10, jssp, x10);
   1247     // Check if the arguments will overflow the stack.
   1248     __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
   1249     __ B(gt, &enough_stack_space);
   1250     // There is not enough stack space, so use a builtin to throw an appropriate
   1251     // error.
   1252     __ Push(function, argc);
   1253     __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
   1254     // We should never return from the APPLY_OVERFLOW builtin.
   1255     if (__ emit_debug_code()) {
   1256       __ Unreachable();
   1257     }
   1258 
   1259     __ Bind(&enough_stack_space);
   1260     // Push current limit and index.
   1261     __ Mov(x1, 0);  // Initial index.
   1262     __ Push(argc, x1);
   1263 
   1264     Label push_receiver;
   1265     __ Ldr(receiver, MemOperand(fp, kReceiverOffset));
   1266 
   1267     // Check that the function is a JS function. Otherwise it must be a proxy.
   1268     // When it is not the function proxy will be invoked later.
   1269     __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE,
   1270                            &push_receiver);
   1271 
   1272     // Change context eagerly to get the right global object if necessary.
   1273     __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
   1274     // Load the shared function info.
   1275     __ Ldr(x2, FieldMemOperand(function,
   1276                                JSFunction::kSharedFunctionInfoOffset));
   1277 
   1278     // Compute and push the receiver.
   1279     // Do not transform the receiver for strict mode functions.
   1280     Label convert_receiver_to_object, use_global_proxy;
   1281     __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
   1282     __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver);
   1283     // Do not transform the receiver for native functions.
   1284     __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver);
   1285 
   1286     // Compute the receiver in sloppy mode.
   1287     __ JumpIfSmi(receiver, &convert_receiver_to_object);
   1288     __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_proxy);
   1289     __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
   1290                   &use_global_proxy);
   1291 
   1292     // Check if the receiver is already a JavaScript object.
   1293     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
   1294     __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE,
   1295                         &push_receiver, ge);
   1296 
   1297     // Call a builtin to convert the receiver to a regular object.
   1298     __ Bind(&convert_receiver_to_object);
   1299     __ Push(receiver);
   1300     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1301     __ Mov(receiver, x0);
   1302     __ B(&push_receiver);
   1303 
   1304     __ Bind(&use_global_proxy);
   1305     __ Ldr(x10, GlobalObjectMemOperand());
   1306     __ Ldr(receiver, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
   1307 
   1308     // Push the receiver
   1309     __ Bind(&push_receiver);
   1310     __ Push(receiver);
   1311 
   1312     // Copy all arguments from the array to the stack.
   1313     Label entry, loop;
   1314     Register current = x0;
   1315     __ Ldr(current, MemOperand(fp, kIndexOffset));
   1316     __ B(&entry);
   1317 
   1318     __ Bind(&loop);
   1319     // Load the current argument from the arguments array and push it.
   1320     // TODO(all): Couldn't we optimize this for JS arrays?
   1321 
   1322     __ Ldr(x1, MemOperand(fp, kArgsOffset));
   1323     __ Push(x1, current);
   1324 
   1325     // Call the runtime to access the property in the arguments array.
   1326     __ CallRuntime(Runtime::kGetProperty, 2);
   1327     __ Push(x0);
   1328 
   1329     // Use inline caching to access the arguments.
   1330     __ Ldr(current, MemOperand(fp, kIndexOffset));
   1331     __ Add(current, current, Smi::FromInt(1));
   1332     __ Str(current, MemOperand(fp, kIndexOffset));
   1333 
   1334     // Test if the copy loop has finished copying all the elements from the
   1335     // arguments object.
   1336     __ Bind(&entry);
   1337     __ Ldr(x1, MemOperand(fp, kLimitOffset));
   1338     __ Cmp(current, x1);
   1339     __ B(ne, &loop);
   1340 
   1341     // At the end of the loop, the number of arguments is stored in 'current',
   1342     // represented as a smi.
   1343 
   1344     function = x1;  // From now on we want the function to be kept in x1;
   1345     __ Ldr(function, MemOperand(fp, kFunctionOffset));
   1346 
   1347     // Call the function.
   1348     Label call_proxy;
   1349     ParameterCount actual(current);
   1350     __ SmiUntag(current);
   1351     __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, &call_proxy);
   1352     __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper());
   1353     frame_scope.GenerateLeaveFrame();
   1354     __ Drop(3);
   1355     __ Ret();
   1356 
   1357     // Call the function proxy.
   1358     __ Bind(&call_proxy);
   1359     // x0 : argc
   1360     // x1 : function
   1361     __ Push(function);  // Add function proxy as last argument.
   1362     __ Add(x0, x0, 1);
   1363     __ Mov(x2, 0);
   1364     __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
   1365     __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1366             RelocInfo::CODE_TARGET);
   1367   }
   1368   __ Drop(3);
   1369   __ Ret();
   1370 }
   1371 
   1372 
   1373 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
   1374                                       Label* stack_overflow) {
   1375   // ----------- S t a t e -------------
   1376   //  -- x0 : actual number of arguments
   1377   //  -- x1 : function (passed through to callee)
   1378   //  -- x2 : expected number of arguments
   1379   // -----------------------------------
   1380   // Check the stack for overflow.
   1381   // We are not trying to catch interruptions (e.g. debug break and
   1382   // preemption) here, so the "real stack limit" is checked.
   1383   Label enough_stack_space;
   1384   __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
   1385   // Make x10 the space we have left. The stack might already be overflowed
   1386   // here which will cause x10 to become negative.
   1387   __ Sub(x10, jssp, x10);
   1388   // Check if the arguments will overflow the stack.
   1389   __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
   1390   __ B(le, stack_overflow);
   1391 }
   1392 
   1393 
   1394 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1395   __ SmiTag(x10, x0);
   1396   __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1397   __ Push(lr, fp);
   1398   __ Push(x11, x1, x10);
   1399   __ Add(fp, jssp,
   1400          StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
   1401 }
   1402 
   1403 
   1404 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1405   // ----------- S t a t e -------------
   1406   //  -- x0 : result being passed through
   1407   // -----------------------------------
   1408   // Get the number of arguments passed (as a smi), tear down the frame and
   1409   // then drop the parameters and the receiver.
   1410   __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   1411                                kPointerSize)));
   1412   __ Mov(jssp, fp);
   1413   __ Pop(fp, lr);
   1414   __ DropBySMI(x10, kXRegSize);
   1415   __ Drop(1);
   1416 }
   1417 
   1418 
   1419 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   1420   ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
   1421   // ----------- S t a t e -------------
   1422   //  -- x0 : actual number of arguments
   1423   //  -- x1 : function (passed through to callee)
   1424   //  -- x2 : expected number of arguments
   1425   // -----------------------------------
   1426 
   1427   Label stack_overflow;
   1428   ArgumentAdaptorStackCheck(masm, &stack_overflow);
   1429 
   1430   Register argc_actual = x0;  // Excluding the receiver.
   1431   Register argc_expected = x2;  // Excluding the receiver.
   1432   Register function = x1;
   1433   Register code_entry = x3;
   1434 
   1435   Label invoke, dont_adapt_arguments;
   1436 
   1437   Label enough, too_few;
   1438   __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
   1439   __ Cmp(argc_actual, argc_expected);
   1440   __ B(lt, &too_few);
   1441   __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
   1442   __ B(eq, &dont_adapt_arguments);
   1443 
   1444   {  // Enough parameters: actual >= expected
   1445     EnterArgumentsAdaptorFrame(masm);
   1446 
   1447     Register copy_start = x10;
   1448     Register copy_end = x11;
   1449     Register copy_to = x12;
   1450     Register scratch1 = x13, scratch2 = x14;
   1451 
   1452     __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
   1453 
   1454     // Adjust for fp, lr, and the receiver.
   1455     __ Add(copy_start, fp, 3 * kPointerSize);
   1456     __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
   1457     __ Sub(copy_end, copy_start, argc_expected);
   1458     __ Sub(copy_end, copy_end, kPointerSize);
   1459     __ Mov(copy_to, jssp);
   1460 
   1461     // Claim space for the arguments, the receiver, and one extra slot.
   1462     // The extra slot ensures we do not write under jssp. It will be popped
   1463     // later.
   1464     __ Add(scratch1, argc_expected, 2 * kPointerSize);
   1465     __ Claim(scratch1, 1);
   1466 
   1467     // Copy the arguments (including the receiver) to the new stack frame.
   1468     Label copy_2_by_2;
   1469     __ Bind(&copy_2_by_2);
   1470     __ Ldp(scratch1, scratch2,
   1471            MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
   1472     __ Stp(scratch1, scratch2,
   1473            MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
   1474     __ Cmp(copy_start, copy_end);
   1475     __ B(hi, &copy_2_by_2);
   1476 
   1477     // Correct the space allocated for the extra slot.
   1478     __ Drop(1);
   1479 
   1480     __ B(&invoke);
   1481   }
   1482 
   1483   {  // Too few parameters: Actual < expected
   1484     __ Bind(&too_few);
   1485     EnterArgumentsAdaptorFrame(masm);
   1486 
   1487     Register copy_from = x10;
   1488     Register copy_end = x11;
   1489     Register copy_to = x12;
   1490     Register scratch1 = x13, scratch2 = x14;
   1491 
   1492     __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
   1493     __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
   1494 
   1495     // Adjust for fp, lr, and the receiver.
   1496     __ Add(copy_from, fp, 3 * kPointerSize);
   1497     __ Add(copy_from, copy_from, argc_actual);
   1498     __ Mov(copy_to, jssp);
   1499     __ Sub(copy_end, copy_to, 1 * kPointerSize);   // Adjust for the receiver.
   1500     __ Sub(copy_end, copy_end, argc_actual);
   1501 
   1502     // Claim space for the arguments, the receiver, and one extra slot.
   1503     // The extra slot ensures we do not write under jssp. It will be popped
   1504     // later.
   1505     __ Add(scratch1, argc_expected, 2 * kPointerSize);
   1506     __ Claim(scratch1, 1);
   1507 
   1508     // Copy the arguments (including the receiver) to the new stack frame.
   1509     Label copy_2_by_2;
   1510     __ Bind(&copy_2_by_2);
   1511     __ Ldp(scratch1, scratch2,
   1512            MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
   1513     __ Stp(scratch1, scratch2,
   1514            MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
   1515     __ Cmp(copy_to, copy_end);
   1516     __ B(hi, &copy_2_by_2);
   1517 
   1518     __ Mov(copy_to, copy_end);
   1519 
   1520     // Fill the remaining expected arguments with undefined.
   1521     __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
   1522     __ Add(copy_end, jssp, kPointerSize);
   1523 
   1524     Label fill;
   1525     __ Bind(&fill);
   1526     __ Stp(scratch1, scratch1,
   1527            MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
   1528     __ Cmp(copy_to, copy_end);
   1529     __ B(hi, &fill);
   1530 
   1531     // Correct the space allocated for the extra slot.
   1532     __ Drop(1);
   1533   }
   1534 
   1535   // Arguments have been adapted. Now call the entry point.
   1536   __ Bind(&invoke);
   1537   __ Call(code_entry);
   1538 
   1539   // Store offset of return address for deoptimizer.
   1540   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   1541 
   1542   // Exit frame and return.
   1543   LeaveArgumentsAdaptorFrame(masm);
   1544   __ Ret();
   1545 
   1546   // Call the entry point without adapting the arguments.
   1547   __ Bind(&dont_adapt_arguments);
   1548   __ Jump(code_entry);
   1549 
   1550   __ Bind(&stack_overflow);
   1551   {
   1552     FrameScope frame(masm, StackFrame::MANUAL);
   1553     EnterArgumentsAdaptorFrame(masm);
   1554     __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
   1555     __ Unreachable();
   1556   }
   1557 }
   1558 
   1559 
   1560 #undef __
   1561 
   1562 } }  // namespace v8::internal
   1563 
   1564 #endif  // V8_TARGET_ARCH_ARM
   1565