Home | History | Annotate | Download | only in arm
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_ARM
     31 
     32 #include "codegen.h"
     33 #include "debug.h"
     34 #include "deoptimizer.h"
     35 #include "full-codegen.h"
     36 #include "runtime.h"
     37 
     38 namespace v8 {
     39 namespace internal {
     40 
     41 
     42 #define __ ACCESS_MASM(masm)
     43 
     44 
     45 void Builtins::Generate_Adaptor(MacroAssembler* masm,
     46                                 CFunctionId id,
     47                                 BuiltinExtraArguments extra_args) {
     48   // ----------- S t a t e -------------
     49   //  -- r0                 : number of arguments excluding receiver
     50   //  -- r1                 : called function (only guaranteed when
     51   //                          extra_args requires it)
     52   //  -- cp                 : context
     53   //  -- sp[0]              : last argument
     54   //  -- ...
     55   //  -- sp[4 * (argc - 1)] : first argument (argc == r0)
     56   //  -- sp[4 * argc]       : receiver
     57   // -----------------------------------
     58 
     59   // Insert extra arguments.
     60   int num_extra_args = 0;
     61   if (extra_args == NEEDS_CALLED_FUNCTION) {
     62     num_extra_args = 1;
     63     __ push(r1);
     64   } else {
     65     ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
     66   }
     67 
     68   // JumpToExternalReference expects r0 to contain the number of arguments
     69   // including the receiver and the extra arguments.
     70   __ add(r0, r0, Operand(num_extra_args + 1));
     71   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     72 }
     73 
     74 
     75 // Load the built-in InternalArray function from the current context.
     76 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     77                                               Register result) {
     78   // Load the native context.
     79 
     80   __ ldr(result,
     81          MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
     82   __ ldr(result,
     83          FieldMemOperand(result, GlobalObject::kNativeContextOffset));
     84   // Load the InternalArray function from the native context.
     85   __ ldr(result,
     86          MemOperand(result,
     87                     Context::SlotOffset(
     88                         Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
     89 }
     90 
     91 
     92 // Load the built-in Array function from the current context.
     93 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     94   // Load the native context.
     95 
     96   __ ldr(result,
     97          MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
     98   __ ldr(result,
     99          FieldMemOperand(result, GlobalObject::kNativeContextOffset));
    100   // Load the Array function from the native context.
    101   __ ldr(result,
    102          MemOperand(result,
    103                     Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
    104 }
    105 
    106 
    107 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
    108   // ----------- S t a t e -------------
    109   //  -- r0     : number of arguments
    110   //  -- lr     : return address
    111   //  -- sp[...]: constructor arguments
    112   // -----------------------------------
    113   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
    114 
    115   // Get the InternalArray function.
    116   GenerateLoadInternalArrayFunction(masm, r1);
    117 
    118   if (FLAG_debug_code) {
    119     // Initial map for the builtin InternalArray functions should be maps.
    120     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
    121     __ SmiTst(r2);
    122     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
    123     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
    124     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
    125   }
    126 
    127   // Run the native code for the InternalArray function called as a normal
    128   // function.
    129   // tail call a stub
    130   InternalArrayConstructorStub stub(masm->isolate());
    131   __ TailCallStub(&stub);
    132 }
    133 
    134 
    135 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
    136   // ----------- S t a t e -------------
    137   //  -- r0     : number of arguments
    138   //  -- lr     : return address
    139   //  -- sp[...]: constructor arguments
    140   // -----------------------------------
    141   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
    142 
    143   // Get the Array function.
    144   GenerateLoadArrayFunction(masm, r1);
    145 
    146   if (FLAG_debug_code) {
    147     // Initial map for the builtin Array functions should be maps.
    148     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
    149     __ SmiTst(r2);
    150     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
    151     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
    152     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
    153   }
    154 
    155   // Run the native code for the Array function called as a normal function.
    156   // tail call a stub
    157   Handle<Object> undefined_sentinel(
    158       masm->isolate()->heap()->undefined_value(),
    159       masm->isolate());
    160   __ mov(r2, Operand(undefined_sentinel));
    161   ArrayConstructorStub stub(masm->isolate());
    162   __ TailCallStub(&stub);
    163 }
    164 
    165 
    166 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
    167   // ----------- S t a t e -------------
    168   //  -- r0                     : number of arguments
    169   //  -- r1                     : constructor function
    170   //  -- lr                     : return address
    171   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
    172   //  -- sp[argc * 4]           : receiver
    173   // -----------------------------------
    174   Counters* counters = masm->isolate()->counters();
    175   __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
    176 
    177   Register function = r1;
    178   if (FLAG_debug_code) {
    179     __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
    180     __ cmp(function, Operand(r2));
    181     __ Assert(eq, kUnexpectedStringFunction);
    182   }
    183 
    184   // Load the first arguments in r0 and get rid of the rest.
    185   Label no_arguments;
    186   __ cmp(r0, Operand::Zero());
    187   __ b(eq, &no_arguments);
    188   // First args = sp[(argc - 1) * 4].
    189   __ sub(r0, r0, Operand(1));
    190   __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
    191   // sp now point to args[0], drop args[0] + receiver.
    192   __ Drop(2);
    193 
    194   Register argument = r2;
    195   Label not_cached, argument_is_string;
    196   __ LookupNumberStringCache(r0,        // Input.
    197                              argument,  // Result.
    198                              r3,        // Scratch.
    199                              r4,        // Scratch.
    200                              r5,        // Scratch.
    201                              &not_cached);
    202   __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
    203   __ bind(&argument_is_string);
    204 
    205   // ----------- S t a t e -------------
    206   //  -- r2     : argument converted to string
    207   //  -- r1     : constructor function
    208   //  -- lr     : return address
    209   // -----------------------------------
    210 
    211   Label gc_required;
    212   __ Allocate(JSValue::kSize,
    213               r0,  // Result.
    214               r3,  // Scratch.
    215               r4,  // Scratch.
    216               &gc_required,
    217               TAG_OBJECT);
    218 
    219   // Initialising the String Object.
    220   Register map = r3;
    221   __ LoadGlobalFunctionInitialMap(function, map, r4);
    222   if (FLAG_debug_code) {
    223     __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
    224     __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
    225     __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
    226     __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
    227     __ cmp(r4, Operand::Zero());
    228     __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
    229   }
    230   __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
    231 
    232   __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
    233   __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
    234   __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
    235 
    236   __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
    237 
    238   // Ensure the object is fully initialized.
    239   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
    240 
    241   __ Ret();
    242 
    243   // The argument was not found in the number to string cache. Check
    244   // if it's a string already before calling the conversion builtin.
    245   Label convert_argument;
    246   __ bind(&not_cached);
    247   __ JumpIfSmi(r0, &convert_argument);
    248 
    249   // Is it a String?
    250   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
    251   __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
    252   STATIC_ASSERT(kNotStringTag != 0);
    253   __ tst(r3, Operand(kIsNotStringMask));
    254   __ b(ne, &convert_argument);
    255   __ mov(argument, r0);
    256   __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
    257   __ b(&argument_is_string);
    258 
    259   // Invoke the conversion builtin and put the result into r2.
    260   __ bind(&convert_argument);
    261   __ push(function);  // Preserve the function.
    262   __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
    263   {
    264     FrameScope scope(masm, StackFrame::INTERNAL);
    265     __ push(r0);
    266     __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
    267   }
    268   __ pop(function);
    269   __ mov(argument, r0);
    270   __ b(&argument_is_string);
    271 
    272   // Load the empty string into r2, remove the receiver from the
    273   // stack, and jump back to the case where the argument is a string.
    274   __ bind(&no_arguments);
    275   __ LoadRoot(argument, Heap::kempty_stringRootIndex);
    276   __ Drop(1);
    277   __ b(&argument_is_string);
    278 
    279   // At this point the argument is already a string. Call runtime to
    280   // create a string wrapper.
    281   __ bind(&gc_required);
    282   __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
    283   {
    284     FrameScope scope(masm, StackFrame::INTERNAL);
    285     __ push(argument);
    286     __ CallRuntime(Runtime::kNewStringWrapper, 1);
    287   }
    288   __ Ret();
    289 }
    290 
    291 
    292 static void CallRuntimePassFunction(MacroAssembler* masm,
    293                                     Runtime::FunctionId function_id) {
    294   FrameScope scope(masm, StackFrame::INTERNAL);
    295   // Push a copy of the function onto the stack.
    296   __ push(r1);
    297   // Push call kind information and function as parameter to the runtime call.
    298   __ Push(r5, r1);
    299 
    300   __ CallRuntime(function_id, 1);
    301   // Restore call kind information.
    302   __ pop(r5);
    303   // Restore receiver.
    304   __ pop(r1);
    305 }
    306 
    307 
    308 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    309   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
    310   __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
    311   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
    312   __ Jump(r2);
    313 }
    314 
    315 
    316 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
    317   // Checking whether the queued function is ready for install is optional,
    318   // since we come across interrupts and stack checks elsewhere.  However,
    319   // not checking may delay installing ready functions, and always checking
    320   // would be quite expensive.  A good compromise is to first check against
    321   // stack limit as a cue for an interrupt signal.
    322   Label ok;
    323   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    324   __ cmp(sp, Operand(ip));
    325   __ b(hs, &ok);
    326 
    327   CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
    328   // Tail call to returned code.
    329   __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
    330   __ Jump(r0);
    331 
    332   __ bind(&ok);
    333   GenerateTailCallToSharedCode(masm);
    334 }
    335 
    336 
    337 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
    338   CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
    339   GenerateTailCallToSharedCode(masm);
    340 }
    341 
    342 
    343 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    344                                            bool is_api_function,
    345                                            bool count_constructions) {
    346   // ----------- S t a t e -------------
    347   //  -- r0     : number of arguments
    348   //  -- r1     : constructor function
    349   //  -- lr     : return address
    350   //  -- sp[...]: constructor arguments
    351   // -----------------------------------
    352 
    353   // Should never count constructions for api objects.
    354   ASSERT(!is_api_function || !count_constructions);
    355 
    356   Isolate* isolate = masm->isolate();
    357 
    358   // Enter a construct frame.
    359   {
    360     FrameScope scope(masm, StackFrame::CONSTRUCT);
    361 
    362     // Preserve the two incoming parameters on the stack.
    363     __ SmiTag(r0);
    364     __ push(r0);  // Smi-tagged arguments count.
    365     __ push(r1);  // Constructor function.
    366 
    367     // Try to allocate the object without transitioning into C code. If any of
    368     // the preconditions is not met, the code bails out to the runtime call.
    369     Label rt_call, allocated;
    370     if (FLAG_inline_new) {
    371       Label undo_allocation;
    372 #ifdef ENABLE_DEBUGGER_SUPPORT
    373       ExternalReference debug_step_in_fp =
    374           ExternalReference::debug_step_in_fp_address(isolate);
    375       __ mov(r2, Operand(debug_step_in_fp));
    376       __ ldr(r2, MemOperand(r2));
    377       __ tst(r2, r2);
    378       __ b(ne, &rt_call);
    379 #endif
    380 
    381       // Load the initial map and verify that it is in fact a map.
    382       // r1: constructor function
    383       __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
    384       __ JumpIfSmi(r2, &rt_call);
    385       __ CompareObjectType(r2, r3, r4, MAP_TYPE);
    386       __ b(ne, &rt_call);
    387 
    388       // Check that the constructor is not constructing a JSFunction (see
    389       // comments in Runtime_NewObject in runtime.cc). In which case the
    390       // initial map's instance type would be JS_FUNCTION_TYPE.
    391       // r1: constructor function
    392       // r2: initial map
    393       __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
    394       __ b(eq, &rt_call);
    395 
    396       if (count_constructions) {
    397         Label allocate;
    398         // Decrease generous allocation count.
    399         __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
    400         MemOperand constructor_count =
    401             FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
    402         __ ldrb(r4, constructor_count);
    403         __ sub(r4, r4, Operand(1), SetCC);
    404         __ strb(r4, constructor_count);
    405         __ b(ne, &allocate);
    406 
    407         __ push(r1);
    408 
    409         __ Push(r2, r1);  // r1 = constructor
    410         // The call will replace the stub, so the countdown is only done once.
    411         __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
    412 
    413         __ pop(r2);
    414         __ pop(r1);
    415 
    416         __ bind(&allocate);
    417       }
    418 
    419       // Now allocate the JSObject on the heap.
    420       // r1: constructor function
    421       // r2: initial map
    422       __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
    423       __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
    424 
    425       // Allocated the JSObject, now initialize the fields. Map is set to
    426       // initial map and properties and elements are set to empty fixed array.
    427       // r1: constructor function
    428       // r2: initial map
    429       // r3: object size
    430       // r4: JSObject (not tagged)
    431       __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
    432       __ mov(r5, r4);
    433       ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
    434       __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
    435       ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
    436       __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
    437       ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
    438       __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
    439 
    440       // Fill all the in-object properties with the appropriate filler.
    441       // r1: constructor function
    442       // r2: initial map
    443       // r3: object size (in words)
    444       // r4: JSObject (not tagged)
    445       // r5: First in-object property of JSObject (not tagged)
    446       ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
    447       __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
    448       if (count_constructions) {
    449         __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
    450         __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
    451                 kBitsPerByte);
    452         __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
    453         // r0: offset of first field after pre-allocated fields
    454         if (FLAG_debug_code) {
    455           __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
    456           __ cmp(r0, ip);
    457           __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
    458         }
    459         __ InitializeFieldsWithFiller(r5, r0, r6);
    460         // To allow for truncation.
    461         __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
    462       }
    463       __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
    464       __ InitializeFieldsWithFiller(r5, r0, r6);
    465 
    466       // Add the object tag to make the JSObject real, so that we can continue
    467       // and jump into the continuation code at any time from now on. Any
    468       // failures need to undo the allocation, so that the heap is in a
    469       // consistent state and verifiable.
    470       __ add(r4, r4, Operand(kHeapObjectTag));
    471 
    472       // Check if a non-empty properties array is needed. Continue with
    473       // allocated object if not fall through to runtime call if it is.
    474       // r1: constructor function
    475       // r4: JSObject
    476       // r5: start of next object (not tagged)
    477       __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
    478       // The field instance sizes contains both pre-allocated property fields
    479       // and in-object properties.
    480       __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
    481       __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
    482               kBitsPerByte);
    483       __ add(r3, r3, Operand(r6));
    484       __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
    485               kBitsPerByte);
    486       __ sub(r3, r3, Operand(r6), SetCC);
    487 
    488       // Done if no extra properties are to be allocated.
    489       __ b(eq, &allocated);
    490       __ Assert(pl, kPropertyAllocationCountFailed);
    491 
    492       // Scale the number of elements by pointer size and add the header for
    493       // FixedArrays to the start of the next object calculation from above.
    494       // r1: constructor
    495       // r3: number of elements in properties array
    496       // r4: JSObject
    497       // r5: start of next object
    498       __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
    499       __ Allocate(
    500           r0,
    501           r5,
    502           r6,
    503           r2,
    504           &undo_allocation,
    505           static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
    506 
    507       // Initialize the FixedArray.
    508       // r1: constructor
    509       // r3: number of elements in properties array
    510       // r4: JSObject
    511       // r5: FixedArray (not tagged)
    512       __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
    513       __ mov(r2, r5);
    514       ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
    515       __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
    516       ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
    517       __ SmiTag(r0, r3);
    518       __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
    519 
    520       // Initialize the fields to undefined.
    521       // r1: constructor function
    522       // r2: First element of FixedArray (not tagged)
    523       // r3: number of elements in properties array
    524       // r4: JSObject
    525       // r5: FixedArray (not tagged)
    526       __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
    527       ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
    528       { Label loop, entry;
    529         __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
    530         __ b(&entry);
    531         __ bind(&loop);
    532         __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
    533         __ bind(&entry);
    534         __ cmp(r2, r6);
    535         __ b(lt, &loop);
    536       }
    537 
    538       // Store the initialized FixedArray into the properties field of
    539       // the JSObject
    540       // r1: constructor function
    541       // r4: JSObject
    542       // r5: FixedArray (not tagged)
    543       __ add(r5, r5, Operand(kHeapObjectTag));  // Add the heap tag.
    544       __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
    545 
    546       // Continue with JSObject being successfully allocated
    547       // r1: constructor function
    548       // r4: JSObject
    549       __ jmp(&allocated);
    550 
    551       // Undo the setting of the new top so that the heap is verifiable. For
    552       // example, the map's unused properties potentially do not match the
    553       // allocated objects unused properties.
    554       // r4: JSObject (previous new top)
    555       __ bind(&undo_allocation);
    556       __ UndoAllocationInNewSpace(r4, r5);
    557     }
    558 
    559     // Allocate the new receiver object using the runtime call.
    560     // r1: constructor function
    561     __ bind(&rt_call);
    562     __ push(r1);  // argument for Runtime_NewObject
    563     __ CallRuntime(Runtime::kNewObject, 1);
    564     __ mov(r4, r0);
    565 
    566     // Receiver for constructor call allocated.
    567     // r4: JSObject
    568     __ bind(&allocated);
    569     __ push(r4);
    570     __ push(r4);
    571 
    572     // Reload the number of arguments and the constructor from the stack.
    573     // sp[0]: receiver
    574     // sp[1]: receiver
    575     // sp[2]: constructor function
    576     // sp[3]: number of arguments (smi-tagged)
    577     __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
    578     __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
    579 
    580     // Set up pointer to last argument.
    581     __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
    582 
    583     // Set up number of arguments for function call below
    584     __ SmiUntag(r0, r3);
    585 
    586     // Copy arguments and receiver to the expression stack.
    587     // r0: number of arguments
    588     // r1: constructor function
    589     // r2: address of last argument (caller sp)
    590     // r3: number of arguments (smi-tagged)
    591     // sp[0]: receiver
    592     // sp[1]: receiver
    593     // sp[2]: constructor function
    594     // sp[3]: number of arguments (smi-tagged)
    595     Label loop, entry;
    596     __ b(&entry);
    597     __ bind(&loop);
    598     __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
    599     __ push(ip);
    600     __ bind(&entry);
    601     __ sub(r3, r3, Operand(2), SetCC);
    602     __ b(ge, &loop);
    603 
    604     // Call the function.
    605     // r0: number of arguments
    606     // r1: constructor function
    607     if (is_api_function) {
    608       __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
    609       Handle<Code> code =
    610           masm->isolate()->builtins()->HandleApiCallConstruct();
    611       ParameterCount expected(0);
    612       __ InvokeCode(code, expected, expected,
    613                     RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
    614     } else {
    615       ParameterCount actual(r0);
    616       __ InvokeFunction(r1, actual, CALL_FUNCTION,
    617                         NullCallWrapper(), CALL_AS_METHOD);
    618     }
    619 
    620     // Store offset of return address for deoptimizer.
    621     if (!is_api_function && !count_constructions) {
    622       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    623     }
    624 
    625     // Restore context from the frame.
    626     // r0: result
    627     // sp[0]: receiver
    628     // sp[1]: constructor function
    629     // sp[2]: number of arguments (smi-tagged)
    630     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    631 
    632     // If the result is an object (in the ECMA sense), we should get rid
    633     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    634     // on page 74.
    635     Label use_receiver, exit;
    636 
    637     // If the result is a smi, it is *not* an object in the ECMA sense.
    638     // r0: result
    639     // sp[0]: receiver (newly allocated object)
    640     // sp[1]: constructor function
    641     // sp[2]: number of arguments (smi-tagged)
    642     __ JumpIfSmi(r0, &use_receiver);
    643 
    644     // If the type of the result (stored in its map) is less than
    645     // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
    646     __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
    647     __ b(ge, &exit);
    648 
    649     // Throw away the result of the constructor invocation and use the
    650     // on-stack receiver as the result.
    651     __ bind(&use_receiver);
    652     __ ldr(r0, MemOperand(sp));
    653 
    654     // Remove receiver from the stack, remove caller arguments, and
    655     // return.
    656     __ bind(&exit);
    657     // r0: result
    658     // sp[0]: receiver (newly allocated object)
    659     // sp[1]: constructor function
    660     // sp[2]: number of arguments (smi-tagged)
    661     __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
    662 
    663     // Leave construct frame.
    664   }
    665 
    666   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
    667   __ add(sp, sp, Operand(kPointerSize));
    668   __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
    669   __ Jump(lr);
    670 }
    671 
    672 
    673 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
    674   Generate_JSConstructStubHelper(masm, false, true);
    675 }
    676 
    677 
    678 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    679   Generate_JSConstructStubHelper(masm, false, false);
    680 }
    681 
    682 
    683 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    684   Generate_JSConstructStubHelper(masm, true, false);
    685 }
    686 
    687 
    688 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    689                                              bool is_construct) {
    690   // Called from Generate_JS_Entry
    691   // r0: code entry
    692   // r1: function
    693   // r2: receiver
    694   // r3: argc
    695   // r4: argv
    696   // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
    697   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    698 
    699   // Clear the context before we push it when entering the internal frame.
    700   __ mov(cp, Operand::Zero());
    701 
    702   // Enter an internal frame.
    703   {
    704     FrameScope scope(masm, StackFrame::INTERNAL);
    705 
    706     // Set up the context from the function argument.
    707     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
    708 
    709     __ InitializeRootRegister();
    710 
    711     // Push the function and the receiver onto the stack.
    712     __ push(r1);
    713     __ push(r2);
    714 
    715     // Copy arguments to the stack in a loop.
    716     // r1: function
    717     // r3: argc
    718     // r4: argv, i.e. points to first arg
    719     Label loop, entry;
    720     __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
    721     // r2 points past last arg.
    722     __ b(&entry);
    723     __ bind(&loop);
    724     __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
    725     __ ldr(r0, MemOperand(r0));  // dereference handle
    726     __ push(r0);  // push parameter
    727     __ bind(&entry);
    728     __ cmp(r4, r2);
    729     __ b(ne, &loop);
    730 
    731     // Initialize all JavaScript callee-saved registers, since they will be seen
    732     // by the garbage collector as part of handlers.
    733     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
    734     __ mov(r5, Operand(r4));
    735     __ mov(r6, Operand(r4));
    736     if (!FLAG_enable_ool_constant_pool) {
    737       __ mov(r8, Operand(r4));
    738     }
    739     if (kR9Available == 1) {
    740       __ mov(r9, Operand(r4));
    741     }
    742 
    743     // Invoke the code and pass argc as r0.
    744     __ mov(r0, Operand(r3));
    745     if (is_construct) {
    746       // No type feedback cell is available
    747       Handle<Object> undefined_sentinel(
    748           masm->isolate()->heap()->undefined_value(), masm->isolate());
    749       __ mov(r2, Operand(undefined_sentinel));
    750       CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
    751       __ CallStub(&stub);
    752     } else {
    753       ParameterCount actual(r0);
    754       __ InvokeFunction(r1, actual, CALL_FUNCTION,
    755                         NullCallWrapper(), CALL_AS_METHOD);
    756     }
    757     // Exit the JS frame and remove the parameters (except function), and
    758     // return.
    759     // Respect ABI stack constraint.
    760   }
    761   __ Jump(lr);
    762 
    763   // r0: result
    764 }
    765 
    766 
    767 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    768   Generate_JSEntryTrampolineHelper(masm, false);
    769 }
    770 
    771 
    772 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    773   Generate_JSEntryTrampolineHelper(masm, true);
    774 }
    775 
    776 
    777 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
    778   CallRuntimePassFunction(masm, Runtime::kLazyCompile);
    779   // Do a tail-call of the compiled function.
    780   __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
    781   __ Jump(r2);
    782 }
    783 
    784 
    785 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
    786   CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
    787   // Do a tail-call of the compiled function.
    788   __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
    789   __ Jump(r2);
    790 }
    791 
    792 
    793 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
    794   // For now, we are relying on the fact that make_code_young doesn't do any
    795   // garbage collection which allows us to save/restore the registers without
    796   // worrying about which of them contain pointers. We also don't build an
    797   // internal frame to make the code faster, since we shouldn't have to do stack
    798   // crawls in MakeCodeYoung. This seems a bit fragile.
    799 
    800   // The following registers must be saved and restored when calling through to
    801   // the runtime:
    802   //   r0 - contains return address (beginning of patch sequence)
    803   //   r1 - isolate
    804   FrameScope scope(masm, StackFrame::MANUAL);
    805   __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
    806   __ PrepareCallCFunction(1, 0, r2);
    807   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
    808   __ CallCFunction(
    809       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
    810   __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
    811   __ mov(pc, r0);
    812 }
    813 
    814 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
    815 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
    816     MacroAssembler* masm) {                                  \
    817   GenerateMakeCodeYoungAgainCommon(masm);                    \
    818 }                                                            \
    819 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
    820     MacroAssembler* masm) {                                  \
    821   GenerateMakeCodeYoungAgainCommon(masm);                    \
    822 }
    823 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
    824 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
    825 
    826 
    827 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
    828   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
    829   // that make_code_young doesn't do any garbage collection which allows us to
    830   // save/restore the registers without worrying about which of them contain
    831   // pointers.
    832 
    833   // The following registers must be saved and restored when calling through to
    834   // the runtime:
    835   //   r0 - contains return address (beginning of patch sequence)
    836   //   r1 - isolate
    837   FrameScope scope(masm, StackFrame::MANUAL);
    838   __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
    839   __ PrepareCallCFunction(1, 0, r2);
    840   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
    841   __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
    842         masm->isolate()), 2);
    843   __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
    844 
    845   // Perform prologue operations usually performed by the young code stub.
    846   __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
    847   __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
    848 
    849   // Jump to point after the code-age stub.
    850   __ add(r0, r0, Operand(kNoCodeAgeSequenceLength * Assembler::kInstrSize));
    851   __ mov(pc, r0);
    852 }
    853 
    854 
    855 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
    856   GenerateMakeCodeYoungAgainCommon(masm);
    857 }
    858 
    859 
    860 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
    861                                              SaveFPRegsMode save_doubles) {
    862   {
    863     FrameScope scope(masm, StackFrame::INTERNAL);
    864 
    865     // Preserve registers across notification, this is important for compiled
    866     // stubs that tail call the runtime on deopts passing their parameters in
    867     // registers.
    868     __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
    869     // Pass the function and deoptimization type to the runtime system.
    870     __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
    871     __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
    872   }
    873 
    874   __ add(sp, sp, Operand(kPointerSize));  // Ignore state
    875   __ mov(pc, lr);  // Jump to miss handler
    876 }
    877 
    878 
    879 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
    880   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
    881 }
    882 
    883 
    884 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
    885   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
    886 }
    887 
    888 
    889 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
    890                                              Deoptimizer::BailoutType type) {
    891   {
    892     FrameScope scope(masm, StackFrame::INTERNAL);
    893     // Pass the function and deoptimization type to the runtime system.
    894     __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
    895     __ push(r0);
    896     __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
    897   }
    898 
    899   // Get the full codegen state from the stack and untag it -> r6.
    900   __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
    901   __ SmiUntag(r6);
    902   // Switch on the state.
    903   Label with_tos_register, unknown_state;
    904   __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
    905   __ b(ne, &with_tos_register);
    906   __ add(sp, sp, Operand(1 * kPointerSize));  // Remove state.
    907   __ Ret();
    908 
    909   __ bind(&with_tos_register);
    910   __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
    911   __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
    912   __ b(ne, &unknown_state);
    913   __ add(sp, sp, Operand(2 * kPointerSize));  // Remove state.
    914   __ Ret();
    915 
    916   __ bind(&unknown_state);
    917   __ stop("no cases left");
    918 }
    919 
    920 
    921 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    922   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
    923 }
    924 
    925 
    926 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
    927   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
    928 }
    929 
    930 
    931 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
    932   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
    933 }
    934 
    935 
    936 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
    937   // Lookup the function in the JavaScript frame.
    938   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    939   {
    940     FrameScope scope(masm, StackFrame::INTERNAL);
    941     // Lookup and calculate pc offset.
    942     __ ldr(r1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
    943     __ ldr(r2, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
    944     __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
    945     __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
    946     __ sub(r1, r1, r2);
    947     __ SmiTag(r1);
    948 
    949     // Pass both function and pc offset as arguments.
    950     __ push(r0);
    951     __ push(r1);
    952     __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
    953   }
    954 
    955   // If the code object is null, just return to the unoptimized code.
    956   Label skip;
    957   __ cmp(r0, Operand(Smi::FromInt(0)));
    958   __ b(ne, &skip);
    959   __ Ret();
    960 
    961   __ bind(&skip);
    962 
    963   // Load deoptimization data from the code object.
    964   // <deopt_data> = <code>[#deoptimization_data_offset]
    965   __ ldr(r1, MemOperand(r0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
    966 
    967   // Load the OSR entrypoint offset from the deoptimization data.
    968   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
    969   __ ldr(r1, MemOperand(r1, FixedArray::OffsetOfElementAt(
    970       DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
    971 
    972   // Compute the target address = code_obj + header_size + osr_offset
    973   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
    974   __ add(r0, r0, Operand::SmiUntag(r1));
    975   __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
    976 
    977   // And "return" to the OSR entry point of the function.
    978   __ Ret();
    979 }
    980 
    981 
    982 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
    983   // We check the stack limit as indicator that recompilation might be done.
    984   Label ok;
    985   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    986   __ cmp(sp, Operand(ip));
    987   __ b(hs, &ok);
    988   {
    989     FrameScope scope(masm, StackFrame::INTERNAL);
    990     __ CallRuntime(Runtime::kStackGuard, 0);
    991   }
    992   __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
    993           RelocInfo::CODE_TARGET);
    994 
    995   __ bind(&ok);
    996   __ Ret();
    997 }
    998 
    999 
   1000 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   1001   // 1. Make sure we have at least one argument.
   1002   // r0: actual number of arguments
   1003   { Label done;
   1004     __ cmp(r0, Operand::Zero());
   1005     __ b(ne, &done);
   1006     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   1007     __ push(r2);
   1008     __ add(r0, r0, Operand(1));
   1009     __ bind(&done);
   1010   }
   1011 
   1012   // 2. Get the function to call (passed as receiver) from the stack, check
   1013   //    if it is a function.
   1014   // r0: actual number of arguments
   1015   Label slow, non_function;
   1016   __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   1017   __ JumpIfSmi(r1, &non_function);
   1018   __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
   1019   __ b(ne, &slow);
   1020 
   1021   // 3a. Patch the first argument if necessary when calling a function.
   1022   // r0: actual number of arguments
   1023   // r1: function
   1024   Label shift_arguments;
   1025   __ mov(r4, Operand::Zero());  // indicate regular JS_FUNCTION
   1026   { Label convert_to_object, use_global_receiver, patch_receiver;
   1027     // Change context eagerly in case we need the global receiver.
   1028     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
   1029 
   1030     // Do not transform the receiver for strict mode functions.
   1031     __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1032     __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
   1033     __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
   1034                              kSmiTagSize)));
   1035     __ b(ne, &shift_arguments);
   1036 
   1037     // Do not transform the receiver for native (Compilerhints already in r3).
   1038     __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
   1039     __ b(ne, &shift_arguments);
   1040 
   1041     // Compute the receiver in non-strict mode.
   1042     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
   1043     __ ldr(r2, MemOperand(r2, -kPointerSize));
   1044     // r0: actual number of arguments
   1045     // r1: function
   1046     // r2: first argument
   1047     __ JumpIfSmi(r2, &convert_to_object);
   1048 
   1049     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
   1050     __ cmp(r2, r3);
   1051     __ b(eq, &use_global_receiver);
   1052     __ LoadRoot(r3, Heap::kNullValueRootIndex);
   1053     __ cmp(r2, r3);
   1054     __ b(eq, &use_global_receiver);
   1055 
   1056     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
   1057     __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
   1058     __ b(ge, &shift_arguments);
   1059 
   1060     __ bind(&convert_to_object);
   1061 
   1062     {
   1063       // Enter an internal frame in order to preserve argument count.
   1064       FrameScope scope(masm, StackFrame::INTERNAL);
   1065       __ SmiTag(r0);
   1066       __ push(r0);
   1067 
   1068       __ push(r2);
   1069       __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1070       __ mov(r2, r0);
   1071 
   1072       __ pop(r0);
   1073       __ SmiUntag(r0);
   1074 
   1075       // Exit the internal frame.
   1076     }
   1077 
   1078     // Restore the function to r1, and the flag to r4.
   1079     __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
   1080     __ mov(r4, Operand::Zero());
   1081     __ jmp(&patch_receiver);
   1082 
   1083     // Use the global receiver object from the called function as the
   1084     // receiver.
   1085     __ bind(&use_global_receiver);
   1086     const int kGlobalIndex =
   1087         Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
   1088     __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
   1089     __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
   1090     __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
   1091     __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
   1092 
   1093     __ bind(&patch_receiver);
   1094     __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
   1095     __ str(r2, MemOperand(r3, -kPointerSize));
   1096 
   1097     __ jmp(&shift_arguments);
   1098   }
   1099 
   1100   // 3b. Check for function proxy.
   1101   __ bind(&slow);
   1102   __ mov(r4, Operand(1, RelocInfo::NONE32));  // indicate function proxy
   1103   __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
   1104   __ b(eq, &shift_arguments);
   1105   __ bind(&non_function);
   1106   __ mov(r4, Operand(2, RelocInfo::NONE32));  // indicate non-function
   1107 
   1108   // 3c. Patch the first argument when calling a non-function.  The
   1109   //     CALL_NON_FUNCTION builtin expects the non-function callee as
   1110   //     receiver, so overwrite the first argument which will ultimately
   1111   //     become the receiver.
   1112   // r0: actual number of arguments
   1113   // r1: function
   1114   // r4: call type (0: JS function, 1: function proxy, 2: non-function)
   1115   __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
   1116   __ str(r1, MemOperand(r2, -kPointerSize));
   1117 
   1118   // 4. Shift arguments and return address one slot down on the stack
   1119   //    (overwriting the original receiver).  Adjust argument count to make
   1120   //    the original first argument the new receiver.
   1121   // r0: actual number of arguments
   1122   // r1: function
   1123   // r4: call type (0: JS function, 1: function proxy, 2: non-function)
   1124   __ bind(&shift_arguments);
   1125   { Label loop;
   1126     // Calculate the copy start address (destination). Copy end address is sp.
   1127     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
   1128 
   1129     __ bind(&loop);
   1130     __ ldr(ip, MemOperand(r2, -kPointerSize));
   1131     __ str(ip, MemOperand(r2));
   1132     __ sub(r2, r2, Operand(kPointerSize));
   1133     __ cmp(r2, sp);
   1134     __ b(ne, &loop);
   1135     // Adjust the actual number of arguments and remove the top element
   1136     // (which is a copy of the last argument).
   1137     __ sub(r0, r0, Operand(1));
   1138     __ pop();
   1139   }
   1140 
   1141   // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
   1142   //     or a function proxy via CALL_FUNCTION_PROXY.
   1143   // r0: actual number of arguments
   1144   // r1: function
   1145   // r4: call type (0: JS function, 1: function proxy, 2: non-function)
   1146   { Label function, non_proxy;
   1147     __ tst(r4, r4);
   1148     __ b(eq, &function);
   1149     // Expected number of arguments is 0 for CALL_NON_FUNCTION.
   1150     __ mov(r2, Operand::Zero());
   1151     __ SetCallKind(r5, CALL_AS_METHOD);
   1152     __ cmp(r4, Operand(1));
   1153     __ b(ne, &non_proxy);
   1154 
   1155     __ push(r1);  // re-add proxy object as additional argument
   1156     __ add(r0, r0, Operand(1));
   1157     __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
   1158     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1159             RelocInfo::CODE_TARGET);
   1160 
   1161     __ bind(&non_proxy);
   1162     __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
   1163     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1164             RelocInfo::CODE_TARGET);
   1165     __ bind(&function);
   1166   }
   1167 
   1168   // 5b. Get the code to call from the function and check that the number of
   1169   //     expected arguments matches what we're providing.  If so, jump
   1170   //     (tail-call) to the code in register edx without checking arguments.
   1171   // r0: actual number of arguments
   1172   // r1: function
   1173   __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1174   __ ldr(r2,
   1175          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
   1176   __ SmiUntag(r2);
   1177   __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   1178   __ SetCallKind(r5, CALL_AS_METHOD);
   1179   __ cmp(r2, r0);  // Check formal and actual parameter counts.
   1180   __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1181           RelocInfo::CODE_TARGET,
   1182           ne);
   1183 
   1184   ParameterCount expected(0);
   1185   __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
   1186                 NullCallWrapper(), CALL_AS_METHOD);
   1187 }
   1188 
   1189 
   1190 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   1191   const int kIndexOffset    =
   1192       StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
   1193   const int kLimitOffset    =
   1194       StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
   1195   const int kArgsOffset     = 2 * kPointerSize;
   1196   const int kRecvOffset     = 3 * kPointerSize;
   1197   const int kFunctionOffset = 4 * kPointerSize;
   1198 
   1199   {
   1200     FrameScope frame_scope(masm, StackFrame::INTERNAL);
   1201 
   1202     __ ldr(r0, MemOperand(fp, kFunctionOffset));  // get the function
   1203     __ push(r0);
   1204     __ ldr(r0, MemOperand(fp, kArgsOffset));  // get the args array
   1205     __ push(r0);
   1206     __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
   1207 
   1208     // Check the stack for overflow. We are not trying to catch
   1209     // interruptions (e.g. debug break and preemption) here, so the "real stack
   1210     // limit" is checked.
   1211     Label okay;
   1212     __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
   1213     // Make r2 the space we have left. The stack might already be overflowed
   1214     // here which will cause r2 to become negative.
   1215     __ sub(r2, sp, r2);
   1216     // Check if the arguments will overflow the stack.
   1217     __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
   1218     __ b(gt, &okay);  // Signed comparison.
   1219 
   1220     // Out of stack space.
   1221     __ ldr(r1, MemOperand(fp, kFunctionOffset));
   1222     __ Push(r1, r0);
   1223     __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
   1224     // End of stack check.
   1225 
   1226     // Push current limit and index.
   1227     __ bind(&okay);
   1228     __ push(r0);  // limit
   1229     __ mov(r1, Operand::Zero());  // initial index
   1230     __ push(r1);
   1231 
   1232     // Get the receiver.
   1233     __ ldr(r0, MemOperand(fp, kRecvOffset));
   1234 
   1235     // Check that the function is a JS function (otherwise it must be a proxy).
   1236     Label push_receiver;
   1237     __ ldr(r1, MemOperand(fp, kFunctionOffset));
   1238     __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
   1239     __ b(ne, &push_receiver);
   1240 
   1241     // Change context eagerly to get the right global object if necessary.
   1242     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
   1243     // Load the shared function info while the function is still in r1.
   1244     __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1245 
   1246     // Compute the receiver.
   1247     // Do not transform the receiver for strict mode functions.
   1248     Label call_to_object, use_global_receiver;
   1249     __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
   1250     __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
   1251                              kSmiTagSize)));
   1252     __ b(ne, &push_receiver);
   1253 
   1254     // Do not transform the receiver for strict mode functions.
   1255     __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
   1256     __ b(ne, &push_receiver);
   1257 
   1258     // Compute the receiver in non-strict mode.
   1259     __ JumpIfSmi(r0, &call_to_object);
   1260     __ LoadRoot(r1, Heap::kNullValueRootIndex);
   1261     __ cmp(r0, r1);
   1262     __ b(eq, &use_global_receiver);
   1263     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   1264     __ cmp(r0, r1);
   1265     __ b(eq, &use_global_receiver);
   1266 
   1267     // Check if the receiver is already a JavaScript object.
   1268     // r0: receiver
   1269     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
   1270     __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
   1271     __ b(ge, &push_receiver);
   1272 
   1273     // Convert the receiver to a regular object.
   1274     // r0: receiver
   1275     __ bind(&call_to_object);
   1276     __ push(r0);
   1277     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1278     __ b(&push_receiver);
   1279 
   1280     // Use the current global receiver object as the receiver.
   1281     __ bind(&use_global_receiver);
   1282     const int kGlobalOffset =
   1283         Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
   1284     __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
   1285     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
   1286     __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
   1287     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
   1288 
   1289     // Push the receiver.
   1290     // r0: receiver
   1291     __ bind(&push_receiver);
   1292     __ push(r0);
   1293 
   1294     // Copy all arguments from the array to the stack.
   1295     Label entry, loop;
   1296     __ ldr(r0, MemOperand(fp, kIndexOffset));
   1297     __ b(&entry);
   1298 
   1299     // Load the current argument from the arguments array and push it to the
   1300     // stack.
   1301     // r0: current argument index
   1302     __ bind(&loop);
   1303     __ ldr(r1, MemOperand(fp, kArgsOffset));
   1304     __ Push(r1, r0);
   1305 
   1306     // Call the runtime to access the property in the arguments array.
   1307     __ CallRuntime(Runtime::kGetProperty, 2);
   1308     __ push(r0);
   1309 
   1310     // Use inline caching to access the arguments.
   1311     __ ldr(r0, MemOperand(fp, kIndexOffset));
   1312     __ add(r0, r0, Operand(1 << kSmiTagSize));
   1313     __ str(r0, MemOperand(fp, kIndexOffset));
   1314 
   1315     // Test if the copy loop has finished copying all the elements from the
   1316     // arguments object.
   1317     __ bind(&entry);
   1318     __ ldr(r1, MemOperand(fp, kLimitOffset));
   1319     __ cmp(r0, r1);
   1320     __ b(ne, &loop);
   1321 
   1322     // Invoke the function.
   1323     Label call_proxy;
   1324     ParameterCount actual(r0);
   1325     __ SmiUntag(r0);
   1326     __ ldr(r1, MemOperand(fp, kFunctionOffset));
   1327     __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
   1328     __ b(ne, &call_proxy);
   1329     __ InvokeFunction(r1, actual, CALL_FUNCTION,
   1330                       NullCallWrapper(), CALL_AS_METHOD);
   1331 
   1332     frame_scope.GenerateLeaveFrame();
   1333     __ add(sp, sp, Operand(3 * kPointerSize));
   1334     __ Jump(lr);
   1335 
   1336     // Invoke the function proxy.
   1337     __ bind(&call_proxy);
   1338     __ push(r1);  // add function proxy as last argument
   1339     __ add(r0, r0, Operand(1));
   1340     __ mov(r2, Operand::Zero());
   1341     __ SetCallKind(r5, CALL_AS_METHOD);
   1342     __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
   1343     __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1344             RelocInfo::CODE_TARGET);
   1345 
   1346     // Tear down the internal frame and remove function, receiver and args.
   1347   }
   1348   __ add(sp, sp, Operand(3 * kPointerSize));
   1349   __ Jump(lr);
   1350 }
   1351 
   1352 
   1353 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1354   __ SmiTag(r0);
   1355   __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   1356   __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
   1357   __ add(fp, sp,
   1358          Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
   1359 }
   1360 
   1361 
   1362 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1363   // ----------- S t a t e -------------
   1364   //  -- r0 : result being passed through
   1365   // -----------------------------------
   1366   // Get the number of arguments passed (as a smi), tear down the frame and
   1367   // then tear down the parameters.
   1368   __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   1369                               kPointerSize)));
   1370   __ mov(sp, fp);
   1371   __ ldm(ia_w, sp, fp.bit() | lr.bit());
   1372   __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
   1373   __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
   1374 }
   1375 
   1376 
   1377 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   1378   // ----------- S t a t e -------------
   1379   //  -- r0 : actual number of arguments
   1380   //  -- r1 : function (passed through to callee)
   1381   //  -- r2 : expected number of arguments
   1382   //  -- r3 : code entry to call
   1383   //  -- r5 : call kind information
   1384   // -----------------------------------
   1385 
   1386   Label invoke, dont_adapt_arguments;
   1387 
   1388   Label enough, too_few;
   1389   __ cmp(r0, r2);
   1390   __ b(lt, &too_few);
   1391   __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   1392   __ b(eq, &dont_adapt_arguments);
   1393 
   1394   {  // Enough parameters: actual >= expected
   1395     __ bind(&enough);
   1396     EnterArgumentsAdaptorFrame(masm);
   1397 
   1398     // Calculate copy start address into r0 and copy end address into r2.
   1399     // r0: actual number of arguments as a smi
   1400     // r1: function
   1401     // r2: expected number of arguments
   1402     // r3: code entry to call
   1403     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
   1404     // adjust for return address and receiver
   1405     __ add(r0, r0, Operand(2 * kPointerSize));
   1406     __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
   1407 
   1408     // Copy the arguments (including the receiver) to the new stack frame.
   1409     // r0: copy start address
   1410     // r1: function
   1411     // r2: copy end address
   1412     // r3: code entry to call
   1413 
   1414     Label copy;
   1415     __ bind(&copy);
   1416     __ ldr(ip, MemOperand(r0, 0));
   1417     __ push(ip);
   1418     __ cmp(r0, r2);  // Compare before moving to next argument.
   1419     __ sub(r0, r0, Operand(kPointerSize));
   1420     __ b(ne, &copy);
   1421 
   1422     __ b(&invoke);
   1423   }
   1424 
   1425   {  // Too few parameters: Actual < expected
   1426     __ bind(&too_few);
   1427     EnterArgumentsAdaptorFrame(masm);
   1428 
   1429     // Calculate copy start address into r0 and copy end address is fp.
   1430     // r0: actual number of arguments as a smi
   1431     // r1: function
   1432     // r2: expected number of arguments
   1433     // r3: code entry to call
   1434     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
   1435 
   1436     // Copy the arguments (including the receiver) to the new stack frame.
   1437     // r0: copy start address
   1438     // r1: function
   1439     // r2: expected number of arguments
   1440     // r3: code entry to call
   1441     Label copy;
   1442     __ bind(&copy);
   1443     // Adjust load for return address and receiver.
   1444     __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
   1445     __ push(ip);
   1446     __ cmp(r0, fp);  // Compare before moving to next argument.
   1447     __ sub(r0, r0, Operand(kPointerSize));
   1448     __ b(ne, &copy);
   1449 
   1450     // Fill the remaining expected arguments with undefined.
   1451     // r1: function
   1452     // r2: expected number of arguments
   1453     // r3: code entry to call
   1454     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   1455     __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
   1456     // Adjust for frame.
   1457     __ sub(r2, r2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
   1458                            2 * kPointerSize));
   1459 
   1460     Label fill;
   1461     __ bind(&fill);
   1462     __ push(ip);
   1463     __ cmp(sp, r2);
   1464     __ b(ne, &fill);
   1465   }
   1466 
   1467   // Call the entry point.
   1468   __ bind(&invoke);
   1469   __ Call(r3);
   1470 
   1471   // Store offset of return address for deoptimizer.
   1472   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   1473 
   1474   // Exit frame and return.
   1475   LeaveArgumentsAdaptorFrame(masm);
   1476   __ Jump(lr);
   1477 
   1478 
   1479   // -------------------------------------------
   1480   // Dont adapt arguments.
   1481   // -------------------------------------------
   1482   __ bind(&dont_adapt_arguments);
   1483   __ Jump(r3);
   1484 }
   1485 
   1486 
   1487 #undef __
   1488 
   1489 } }  // namespace v8::internal
   1490 
   1491 #endif  // V8_TARGET_ARCH_ARM
   1492