Home | History | Annotate | Download | only in arm
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_ARM
     31 
     32 #include "ic-inl.h"
     33 #include "codegen.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 #define __ ACCESS_MASM(masm)
     40 
     41 
     42 static void ProbeTable(Isolate* isolate,
     43                        MacroAssembler* masm,
     44                        Code::Flags flags,
     45                        StubCache::Table table,
     46                        Register receiver,
     47                        Register name,
     48                        // Number of the cache entry, not scaled.
     49                        Register offset,
     50                        Register scratch,
     51                        Register scratch2,
     52                        Register offset_scratch) {
     53   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     54   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     55   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
     56 
     57   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
     58   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
     59   uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
     60 
     61   // Check the relative positions of the address fields.
     62   ASSERT(value_off_addr > key_off_addr);
     63   ASSERT((value_off_addr - key_off_addr) % 4 == 0);
     64   ASSERT((value_off_addr - key_off_addr) < (256 * 4));
     65   ASSERT(map_off_addr > key_off_addr);
     66   ASSERT((map_off_addr - key_off_addr) % 4 == 0);
     67   ASSERT((map_off_addr - key_off_addr) < (256 * 4));
     68 
     69   Label miss;
     70   Register base_addr = scratch;
     71   scratch = no_reg;
     72 
     73   // Multiply by 3 because there are 3 fields per entry (name, code, map).
     74   __ add(offset_scratch, offset, Operand(offset, LSL, 1));
     75 
     76   // Calculate the base address of the entry.
     77   __ mov(base_addr, Operand(key_offset));
     78   __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
     79 
     80   // Check that the key in the entry matches the name.
     81   __ ldr(ip, MemOperand(base_addr, 0));
     82   __ cmp(name, ip);
     83   __ b(ne, &miss);
     84 
     85   // Check the map matches.
     86   __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
     87   __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
     88   __ cmp(ip, scratch2);
     89   __ b(ne, &miss);
     90 
     91   // Get the code entry from the cache.
     92   Register code = scratch2;
     93   scratch2 = no_reg;
     94   __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
     95 
     96   // Check that the flags match what we're looking for.
     97   Register flags_reg = base_addr;
     98   base_addr = no_reg;
     99   __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
    100   // It's a nice optimization if this constant is encodable in the bic insn.
    101 
    102   uint32_t mask = Code::kFlagsNotUsedInLookup;
    103   ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
    104   __ bic(flags_reg, flags_reg, Operand(mask));
    105   __ cmp(flags_reg, Operand(flags));
    106   __ b(ne, &miss);
    107 
    108 #ifdef DEBUG
    109     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
    110       __ jmp(&miss);
    111     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
    112       __ jmp(&miss);
    113     }
    114 #endif
    115 
    116   // Jump to the first instruction in the code stub.
    117   __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
    118 
    119   // Miss: fall through.
    120   __ bind(&miss);
    121 }
    122 
    123 
    124 // Helper function used to check that the dictionary doesn't contain
    125 // the property. This function may return false negatives, so miss_label
    126 // must always call a backup property check that is complete.
    127 // This function is safe to call if the receiver has fast properties.
    128 // Name must be unique and receiver must be a heap object.
    129 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
    130                                              Label* miss_label,
    131                                              Register receiver,
    132                                              Handle<Name> name,
    133                                              Register scratch0,
    134                                              Register scratch1) {
    135   ASSERT(name->IsUniqueName());
    136   Counters* counters = masm->isolate()->counters();
    137   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
    138   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    139 
    140   Label done;
    141 
    142   const int kInterceptorOrAccessCheckNeededMask =
    143       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    144 
    145   // Bail out if the receiver has a named interceptor or requires access checks.
    146   Register map = scratch1;
    147   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
    148   __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
    149   __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
    150   __ b(ne, miss_label);
    151 
    152   // Check that receiver is a JSObject.
    153   __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
    154   __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
    155   __ b(lt, miss_label);
    156 
    157   // Load properties array.
    158   Register properties = scratch0;
    159   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    160   // Check that the properties array is a dictionary.
    161   __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
    162   Register tmp = properties;
    163   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
    164   __ cmp(map, tmp);
    165   __ b(ne, miss_label);
    166 
    167   // Restore the temporarily used register.
    168   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    169 
    170 
    171   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
    172                                                    miss_label,
    173                                                    &done,
    174                                                    receiver,
    175                                                    properties,
    176                                                    name,
    177                                                    scratch1);
    178   __ bind(&done);
    179   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    180 }
    181 
    182 
    183 void StubCache::GenerateProbe(MacroAssembler* masm,
    184                               Code::Flags flags,
    185                               Register receiver,
    186                               Register name,
    187                               Register scratch,
    188                               Register extra,
    189                               Register extra2,
    190                               Register extra3) {
    191   Isolate* isolate = masm->isolate();
    192   Label miss;
    193 
    194   // Make sure that code is valid. The multiplying code relies on the
    195   // entry size being 12.
    196   ASSERT(sizeof(Entry) == 12);
    197 
    198   // Make sure the flags does not name a specific type.
    199   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    200 
    201   // Make sure that there are no register conflicts.
    202   ASSERT(!scratch.is(receiver));
    203   ASSERT(!scratch.is(name));
    204   ASSERT(!extra.is(receiver));
    205   ASSERT(!extra.is(name));
    206   ASSERT(!extra.is(scratch));
    207   ASSERT(!extra2.is(receiver));
    208   ASSERT(!extra2.is(name));
    209   ASSERT(!extra2.is(scratch));
    210   ASSERT(!extra2.is(extra));
    211 
    212   // Check scratch, extra and extra2 registers are valid.
    213   ASSERT(!scratch.is(no_reg));
    214   ASSERT(!extra.is(no_reg));
    215   ASSERT(!extra2.is(no_reg));
    216   ASSERT(!extra3.is(no_reg));
    217 
    218   Counters* counters = masm->isolate()->counters();
    219   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
    220                       extra2, extra3);
    221 
    222   // Check that the receiver isn't a smi.
    223   __ JumpIfSmi(receiver, &miss);
    224 
    225   // Get the map of the receiver and compute the hash.
    226   __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
    227   __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
    228   __ add(scratch, scratch, Operand(ip));
    229   uint32_t mask = kPrimaryTableSize - 1;
    230   // We shift out the last two bits because they are not part of the hash and
    231   // they are always 01 for maps.
    232   __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
    233   // Mask down the eor argument to the minimum to keep the immediate
    234   // ARM-encodable.
    235   __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
    236   // Prefer and_ to ubfx here because ubfx takes 2 cycles.
    237   __ and_(scratch, scratch, Operand(mask));
    238 
    239   // Probe the primary table.
    240   ProbeTable(isolate,
    241              masm,
    242              flags,
    243              kPrimary,
    244              receiver,
    245              name,
    246              scratch,
    247              extra,
    248              extra2,
    249              extra3);
    250 
    251   // Primary miss: Compute hash for secondary probe.
    252   __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
    253   uint32_t mask2 = kSecondaryTableSize - 1;
    254   __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
    255   __ and_(scratch, scratch, Operand(mask2));
    256 
    257   // Probe the secondary table.
    258   ProbeTable(isolate,
    259              masm,
    260              flags,
    261              kSecondary,
    262              receiver,
    263              name,
    264              scratch,
    265              extra,
    266              extra2,
    267              extra3);
    268 
    269   // Cache miss: Fall-through and let caller handle the miss by
    270   // entering the runtime system.
    271   __ bind(&miss);
    272   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
    273                       extra2, extra3);
    274 }
    275 
    276 
    277 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    278                                                        int index,
    279                                                        Register prototype) {
    280   // Load the global or builtins object from the current context.
    281   __ ldr(prototype,
    282          MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    283   // Load the native context from the global or builtins object.
    284   __ ldr(prototype,
    285          FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
    286   // Load the function from the native context.
    287   __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
    288   // Load the initial map.  The global functions all have initial maps.
    289   __ ldr(prototype,
    290          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
    291   // Load the prototype from the initial map.
    292   __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    293 }
    294 
    295 
    296 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    297     MacroAssembler* masm,
    298     int index,
    299     Register prototype,
    300     Label* miss) {
    301   Isolate* isolate = masm->isolate();
    302   // Check we're still in the same context.
    303   __ ldr(prototype,
    304          MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    305   __ Move(ip, isolate->global_object());
    306   __ cmp(prototype, ip);
    307   __ b(ne, miss);
    308   // Get the global function with the given index.
    309   Handle<JSFunction> function(
    310       JSFunction::cast(isolate->native_context()->get(index)));
    311   // Load its initial map. The global functions all have initial maps.
    312   __ Move(prototype, Handle<Map>(function->initial_map()));
    313   // Load the prototype from the initial map.
    314   __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    315 }
    316 
    317 
    318 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    319                                             Register dst,
    320                                             Register src,
    321                                             bool inobject,
    322                                             int index,
    323                                             Representation representation) {
    324   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
    325   int offset = index * kPointerSize;
    326   if (!inobject) {
    327     // Calculate the offset into the properties array.
    328     offset = offset + FixedArray::kHeaderSize;
    329     __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
    330     src = dst;
    331   }
    332   __ ldr(dst, FieldMemOperand(src, offset));
    333 }
    334 
    335 
    336 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    337                                            Register receiver,
    338                                            Register scratch,
    339                                            Label* miss_label) {
    340   // Check that the receiver isn't a smi.
    341   __ JumpIfSmi(receiver, miss_label);
    342 
    343   // Check that the object is a JS array.
    344   __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
    345   __ b(ne, miss_label);
    346 
    347   // Load length directly from the JS array.
    348   __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
    349   __ Ret();
    350 }
    351 
    352 
    353 // Generate code to check if an object is a string.  If the object is a
    354 // heap object, its map's instance type is left in the scratch1 register.
    355 // If this is not needed, scratch1 and scratch2 may be the same register.
    356 static void GenerateStringCheck(MacroAssembler* masm,
    357                                 Register receiver,
    358                                 Register scratch1,
    359                                 Register scratch2,
    360                                 Label* smi,
    361                                 Label* non_string_object) {
    362   // Check that the receiver isn't a smi.
    363   __ JumpIfSmi(receiver, smi);
    364 
    365   // Check that the object is a string.
    366   __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
    367   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
    368   __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
    369   // The cast is to resolve the overload for the argument of 0x0.
    370   __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
    371   __ b(ne, non_string_object);
    372 }
    373 
    374 
    375 // Generate code to load the length from a string object and return the length.
    376 // If the receiver object is not a string or a wrapped string object the
    377 // execution continues at the miss label. The register containing the
    378 // receiver is potentially clobbered.
    379 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
    380                                             Register receiver,
    381                                             Register scratch1,
    382                                             Register scratch2,
    383                                             Label* miss,
    384                                             bool support_wrappers) {
    385   Label check_wrapper;
    386 
    387   // Check if the object is a string leaving the instance type in the
    388   // scratch1 register.
    389   GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
    390                       support_wrappers ? &check_wrapper : miss);
    391 
    392   // Load length directly from the string.
    393   __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
    394   __ Ret();
    395 
    396   if (support_wrappers) {
    397     // Check if the object is a JSValue wrapper.
    398     __ bind(&check_wrapper);
    399     __ cmp(scratch1, Operand(JS_VALUE_TYPE));
    400     __ b(ne, miss);
    401 
    402     // Unwrap the value and check if the wrapped value is a string.
    403     __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
    404     GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
    405     __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
    406     __ Ret();
    407   }
    408 }
    409 
    410 
    411 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    412                                                  Register receiver,
    413                                                  Register scratch1,
    414                                                  Register scratch2,
    415                                                  Label* miss_label) {
    416   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    417   __ mov(r0, scratch1);
    418   __ Ret();
    419 }
    420 
    421 
    422 // Generate code to check that a global property cell is empty. Create
    423 // the property cell at compilation time if no cell exists for the
    424 // property.
    425 static void GenerateCheckPropertyCell(MacroAssembler* masm,
    426                                       Handle<GlobalObject> global,
    427                                       Handle<Name> name,
    428                                       Register scratch,
    429                                       Label* miss) {
    430   Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
    431   ASSERT(cell->value()->IsTheHole());
    432   __ mov(scratch, Operand(cell));
    433   __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
    434   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    435   __ cmp(scratch, ip);
    436   __ b(ne, miss);
    437 }
    438 
    439 
    440 void BaseStoreStubCompiler::GenerateNegativeHolderLookup(
    441     MacroAssembler* masm,
    442     Handle<JSObject> holder,
    443     Register holder_reg,
    444     Handle<Name> name,
    445     Label* miss) {
    446   if (holder->IsJSGlobalObject()) {
    447     GenerateCheckPropertyCell(
    448         masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
    449   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
    450     GenerateDictionaryNegativeLookup(
    451         masm, miss, holder_reg, name, scratch1(), scratch2());
    452   }
    453 }
    454 
    455 
    456 // Generate StoreTransition code, value is passed in r0 register.
    457 // When leaving generated code after success, the receiver_reg and name_reg
    458 // may be clobbered.  Upon branch to miss_label, the receiver and name
    459 // registers have their original values.
    460 void BaseStoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
    461                                                     Handle<JSObject> object,
    462                                                     LookupResult* lookup,
    463                                                     Handle<Map> transition,
    464                                                     Handle<Name> name,
    465                                                     Register receiver_reg,
    466                                                     Register storage_reg,
    467                                                     Register value_reg,
    468                                                     Register scratch1,
    469                                                     Register scratch2,
    470                                                     Register scratch3,
    471                                                     Label* miss_label,
    472                                                     Label* slow) {
    473   // r0 : value
    474   Label exit;
    475 
    476   int descriptor = transition->LastAdded();
    477   DescriptorArray* descriptors = transition->instance_descriptors();
    478   PropertyDetails details = descriptors->GetDetails(descriptor);
    479   Representation representation = details.representation();
    480   ASSERT(!representation.IsNone());
    481 
    482   if (details.type() == CONSTANT) {
    483     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
    484     __ LoadObject(scratch1, constant);
    485     __ cmp(value_reg, scratch1);
    486     __ b(ne, miss_label);
    487   } else if (FLAG_track_fields && representation.IsSmi()) {
    488     __ JumpIfNotSmi(value_reg, miss_label);
    489   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    490     __ JumpIfSmi(value_reg, miss_label);
    491   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    492     Label do_store, heap_number;
    493     __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
    494     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
    495 
    496     __ JumpIfNotSmi(value_reg, &heap_number);
    497     __ SmiUntag(scratch1, value_reg);
    498     __ vmov(s0, scratch1);
    499     __ vcvt_f64_s32(d0, s0);
    500     __ jmp(&do_store);
    501 
    502     __ bind(&heap_number);
    503     __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
    504                 miss_label, DONT_DO_SMI_CHECK);
    505     __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
    506 
    507     __ bind(&do_store);
    508     __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
    509   }
    510 
    511   // Stub never generated for non-global objects that require access
    512   // checks.
    513   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    514 
    515   // Perform map transition for the receiver if necessary.
    516   if (details.type() == FIELD &&
    517       object->map()->unused_property_fields() == 0) {
    518     // The properties must be extended before we can store the value.
    519     // We jump to a runtime call that extends the properties array.
    520     __ push(receiver_reg);
    521     __ mov(r2, Operand(transition));
    522     __ Push(r2, r0);
    523     __ TailCallExternalReference(
    524         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    525                           masm->isolate()),
    526         3,
    527         1);
    528     return;
    529   }
    530 
    531   // Update the map of the object.
    532   __ mov(scratch1, Operand(transition));
    533   __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
    534 
    535   // Update the write barrier for the map field.
    536   __ RecordWriteField(receiver_reg,
    537                       HeapObject::kMapOffset,
    538                       scratch1,
    539                       scratch2,
    540                       kLRHasNotBeenSaved,
    541                       kDontSaveFPRegs,
    542                       OMIT_REMEMBERED_SET,
    543                       OMIT_SMI_CHECK);
    544 
    545   if (details.type() == CONSTANT) {
    546     ASSERT(value_reg.is(r0));
    547     __ Ret();
    548     return;
    549   }
    550 
    551   int index = transition->instance_descriptors()->GetFieldIndex(
    552       transition->LastAdded());
    553 
    554   // Adjust for the number of properties stored in the object. Even in the
    555   // face of a transition we can use the old map here because the size of the
    556   // object and the number of in-object properties is not going to change.
    557   index -= object->map()->inobject_properties();
    558 
    559   // TODO(verwaest): Share this code as a code stub.
    560   SmiCheck smi_check = representation.IsTagged()
    561       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    562   if (index < 0) {
    563     // Set the property straight into the object.
    564     int offset = object->map()->instance_size() + (index * kPointerSize);
    565     if (FLAG_track_double_fields && representation.IsDouble()) {
    566       __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
    567     } else {
    568       __ str(value_reg, FieldMemOperand(receiver_reg, offset));
    569     }
    570 
    571     if (!FLAG_track_fields || !representation.IsSmi()) {
    572       // Update the write barrier for the array address.
    573       if (!FLAG_track_double_fields || !representation.IsDouble()) {
    574         __ mov(storage_reg, value_reg);
    575       }
    576       __ RecordWriteField(receiver_reg,
    577                           offset,
    578                           storage_reg,
    579                           scratch1,
    580                           kLRHasNotBeenSaved,
    581                           kDontSaveFPRegs,
    582                           EMIT_REMEMBERED_SET,
    583                           smi_check);
    584     }
    585   } else {
    586     // Write to the properties array.
    587     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    588     // Get the properties array
    589     __ ldr(scratch1,
    590            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    591     if (FLAG_track_double_fields && representation.IsDouble()) {
    592       __ str(storage_reg, FieldMemOperand(scratch1, offset));
    593     } else {
    594       __ str(value_reg, FieldMemOperand(scratch1, offset));
    595     }
    596 
    597     if (!FLAG_track_fields || !representation.IsSmi()) {
    598       // Update the write barrier for the array address.
    599       if (!FLAG_track_double_fields || !representation.IsDouble()) {
    600         __ mov(storage_reg, value_reg);
    601       }
    602       __ RecordWriteField(scratch1,
    603                           offset,
    604                           storage_reg,
    605                           receiver_reg,
    606                           kLRHasNotBeenSaved,
    607                           kDontSaveFPRegs,
    608                           EMIT_REMEMBERED_SET,
    609                           smi_check);
    610     }
    611   }
    612 
    613   // Return the value (register r0).
    614   ASSERT(value_reg.is(r0));
    615   __ bind(&exit);
    616   __ Ret();
    617 }
    618 
    619 
    620 // Generate StoreField code, value is passed in r0 register.
    621 // When leaving generated code after success, the receiver_reg and name_reg
    622 // may be clobbered.  Upon branch to miss_label, the receiver and name
    623 // registers have their original values.
    624 void BaseStoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
    625                                                Handle<JSObject> object,
    626                                                LookupResult* lookup,
    627                                                Register receiver_reg,
    628                                                Register name_reg,
    629                                                Register value_reg,
    630                                                Register scratch1,
    631                                                Register scratch2,
    632                                                Label* miss_label) {
    633   // r0 : value
    634   Label exit;
    635 
    636   // Stub never generated for non-global objects that require access
    637   // checks.
    638   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    639 
    640   int index = lookup->GetFieldIndex().field_index();
    641 
    642   // Adjust for the number of properties stored in the object. Even in the
    643   // face of a transition we can use the old map here because the size of the
    644   // object and the number of in-object properties is not going to change.
    645   index -= object->map()->inobject_properties();
    646 
    647   Representation representation = lookup->representation();
    648   ASSERT(!representation.IsNone());
    649   if (FLAG_track_fields && representation.IsSmi()) {
    650     __ JumpIfNotSmi(value_reg, miss_label);
    651   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    652     __ JumpIfSmi(value_reg, miss_label);
    653   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    654     // Load the double storage.
    655     if (index < 0) {
    656       int offset = object->map()->instance_size() + (index * kPointerSize);
    657       __ ldr(scratch1, FieldMemOperand(receiver_reg, offset));
    658     } else {
    659       __ ldr(scratch1,
    660              FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    661       int offset = index * kPointerSize + FixedArray::kHeaderSize;
    662       __ ldr(scratch1, FieldMemOperand(scratch1, offset));
    663     }
    664 
    665     // Store the value into the storage.
    666     Label do_store, heap_number;
    667     __ JumpIfNotSmi(value_reg, &heap_number);
    668     __ SmiUntag(scratch2, value_reg);
    669     __ vmov(s0, scratch2);
    670     __ vcvt_f64_s32(d0, s0);
    671     __ jmp(&do_store);
    672 
    673     __ bind(&heap_number);
    674     __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
    675                 miss_label, DONT_DO_SMI_CHECK);
    676     __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
    677 
    678     __ bind(&do_store);
    679     __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
    680     // Return the value (register r0).
    681     ASSERT(value_reg.is(r0));
    682     __ Ret();
    683     return;
    684   }
    685 
    686   // TODO(verwaest): Share this code as a code stub.
    687   SmiCheck smi_check = representation.IsTagged()
    688       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    689   if (index < 0) {
    690     // Set the property straight into the object.
    691     int offset = object->map()->instance_size() + (index * kPointerSize);
    692     __ str(value_reg, FieldMemOperand(receiver_reg, offset));
    693 
    694     if (!FLAG_track_fields || !representation.IsSmi()) {
    695       // Skip updating write barrier if storing a smi.
    696       __ JumpIfSmi(value_reg, &exit);
    697 
    698       // Update the write barrier for the array address.
    699       // Pass the now unused name_reg as a scratch register.
    700       __ mov(name_reg, value_reg);
    701       __ RecordWriteField(receiver_reg,
    702                           offset,
    703                           name_reg,
    704                           scratch1,
    705                           kLRHasNotBeenSaved,
    706                           kDontSaveFPRegs,
    707                           EMIT_REMEMBERED_SET,
    708                           smi_check);
    709     }
    710   } else {
    711     // Write to the properties array.
    712     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    713     // Get the properties array
    714     __ ldr(scratch1,
    715            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    716     __ str(value_reg, FieldMemOperand(scratch1, offset));
    717 
    718     if (!FLAG_track_fields || !representation.IsSmi()) {
    719       // Skip updating write barrier if storing a smi.
    720       __ JumpIfSmi(value_reg, &exit);
    721 
    722       // Update the write barrier for the array address.
    723       // Ok to clobber receiver_reg and name_reg, since we return.
    724       __ mov(name_reg, value_reg);
    725       __ RecordWriteField(scratch1,
    726                           offset,
    727                           name_reg,
    728                           receiver_reg,
    729                           kLRHasNotBeenSaved,
    730                           kDontSaveFPRegs,
    731                           EMIT_REMEMBERED_SET,
    732                           smi_check);
    733     }
    734   }
    735 
    736   // Return the value (register r0).
    737   ASSERT(value_reg.is(r0));
    738   __ bind(&exit);
    739   __ Ret();
    740 }
    741 
    742 
    743 void BaseStoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
    744                                                 Label* label,
    745                                                 Handle<Name> name) {
    746   if (!label->is_unused()) {
    747     __ bind(label);
    748     __ mov(this->name(), Operand(name));
    749   }
    750 }
    751 
    752 
    753 static void GenerateCallFunction(MacroAssembler* masm,
    754                                  Handle<Object> object,
    755                                  const ParameterCount& arguments,
    756                                  Label* miss,
    757                                  Code::ExtraICState extra_ic_state) {
    758   // ----------- S t a t e -------------
    759   //  -- r0: receiver
    760   //  -- r1: function to call
    761   // -----------------------------------
    762 
    763   // Check that the function really is a function.
    764   __ JumpIfSmi(r1, miss);
    765   __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
    766   __ b(ne, miss);
    767 
    768   // Patch the receiver on the stack with the global proxy if
    769   // necessary.
    770   if (object->IsGlobalObject()) {
    771     __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
    772     __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
    773   }
    774 
    775   // Invoke the function.
    776   CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
    777       ? CALL_AS_FUNCTION
    778       : CALL_AS_METHOD;
    779   __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
    780 }
    781 
    782 
    783 static void PushInterceptorArguments(MacroAssembler* masm,
    784                                      Register receiver,
    785                                      Register holder,
    786                                      Register name,
    787                                      Handle<JSObject> holder_obj) {
    788   __ push(name);
    789   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
    790   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
    791   Register scratch = name;
    792   __ mov(scratch, Operand(interceptor));
    793   __ push(scratch);
    794   __ push(receiver);
    795   __ push(holder);
    796   __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
    797   __ push(scratch);
    798   __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
    799   __ push(scratch);
    800 }
    801 
    802 
    803 static void CompileCallLoadPropertyWithInterceptor(
    804     MacroAssembler* masm,
    805     Register receiver,
    806     Register holder,
    807     Register name,
    808     Handle<JSObject> holder_obj) {
    809   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    810 
    811   ExternalReference ref =
    812       ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
    813                         masm->isolate());
    814   __ mov(r0, Operand(6));
    815   __ mov(r1, Operand(ref));
    816 
    817   CEntryStub stub(1);
    818   __ CallStub(&stub);
    819 }
    820 
    821 
    822 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
    823 
    824 // Reserves space for the extra arguments to API function in the
    825 // caller's frame.
    826 //
    827 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
    828 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
    829                                        Register scratch) {
    830   __ mov(scratch, Operand(Smi::FromInt(0)));
    831   for (int i = 0; i < kFastApiCallArguments; i++) {
    832     __ push(scratch);
    833   }
    834 }
    835 
    836 
    837 // Undoes the effects of ReserveSpaceForFastApiCall.
    838 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
    839   __ Drop(kFastApiCallArguments);
    840 }
    841 
    842 
    843 static void GenerateFastApiDirectCall(MacroAssembler* masm,
    844                                       const CallOptimization& optimization,
    845                                       int argc) {
    846   // ----------- S t a t e -------------
    847   //  -- sp[0]              : holder (set by CheckPrototypes)
    848   //  -- sp[4]              : callee JS function
    849   //  -- sp[8]              : call data
    850   //  -- sp[12]             : isolate
    851   //  -- sp[16]             : ReturnValue default value
    852   //  -- sp[20]             : ReturnValue
    853   //  -- sp[24]             : last JS argument
    854   //  -- ...
    855   //  -- sp[(argc + 5) * 4] : first JS argument
    856   //  -- sp[(argc + 6) * 4] : receiver
    857   // -----------------------------------
    858   // Get the function and setup the context.
    859   Handle<JSFunction> function = optimization.constant_function();
    860   __ LoadHeapObject(r5, function);
    861   __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
    862 
    863   // Pass the additional arguments.
    864   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    865   Handle<Object> call_data(api_call_info->data(), masm->isolate());
    866   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
    867     __ Move(r0, api_call_info);
    868     __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
    869   } else {
    870     __ Move(r6, call_data);
    871   }
    872   __ mov(r7, Operand(ExternalReference::isolate_address(masm->isolate())));
    873   // Store JS function, call data, isolate ReturnValue default and ReturnValue.
    874   __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit());
    875   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
    876   __ str(r5, MemOperand(sp, 4 * kPointerSize));
    877   __ str(r5, MemOperand(sp, 5 * kPointerSize));
    878 
    879   // Prepare arguments.
    880   __ add(r2, sp, Operand(5 * kPointerSize));
    881 
    882   // Allocate the v8::Arguments structure in the arguments' space since
    883   // it's not controlled by GC.
    884   const int kApiStackSpace = 4;
    885 
    886   FrameScope frame_scope(masm, StackFrame::MANUAL);
    887   __ EnterExitFrame(false, kApiStackSpace);
    888 
    889   // r0 = v8::Arguments&
    890   // Arguments is after the return address.
    891   __ add(r0, sp, Operand(1 * kPointerSize));
    892   // v8::Arguments::implicit_args_
    893   __ str(r2, MemOperand(r0, 0 * kPointerSize));
    894   // v8::Arguments::values_
    895   __ add(ip, r2, Operand(argc * kPointerSize));
    896   __ str(ip, MemOperand(r0, 1 * kPointerSize));
    897   // v8::Arguments::length_ = argc
    898   __ mov(ip, Operand(argc));
    899   __ str(ip, MemOperand(r0, 2 * kPointerSize));
    900   // v8::Arguments::is_construct_call = 0
    901   __ mov(ip, Operand::Zero());
    902   __ str(ip, MemOperand(r0, 3 * kPointerSize));
    903 
    904   const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
    905   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    906   bool returns_handle =
    907       !CallbackTable::ReturnsVoid(masm->isolate(), function_address);
    908   ApiFunction fun(function_address);
    909   ExternalReference::Type type =
    910       returns_handle ?
    911           ExternalReference::DIRECT_API_CALL :
    912           ExternalReference::DIRECT_API_CALL_NEW;
    913   ExternalReference ref = ExternalReference(&fun,
    914                                             type,
    915                                             masm->isolate());
    916   Address thunk_address = returns_handle
    917       ? FUNCTION_ADDR(&InvokeInvocationCallback)
    918       : FUNCTION_ADDR(&InvokeFunctionCallback);
    919   ExternalReference::Type thunk_type =
    920       returns_handle ?
    921           ExternalReference::PROFILING_API_CALL :
    922           ExternalReference::PROFILING_API_CALL_NEW;
    923   ApiFunction thunk_fun(thunk_address);
    924   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
    925       masm->isolate());
    926 
    927   AllowExternalCallThatCantCauseGC scope(masm);
    928   __ CallApiFunctionAndReturn(ref,
    929                               function_address,
    930                               thunk_ref,
    931                               r1,
    932                               kStackUnwindSpace,
    933                               returns_handle,
    934                               kFastApiCallArguments + 1);
    935 }
    936 
    937 
    938 class CallInterceptorCompiler BASE_EMBEDDED {
    939  public:
    940   CallInterceptorCompiler(StubCompiler* stub_compiler,
    941                           const ParameterCount& arguments,
    942                           Register name,
    943                           Code::ExtraICState extra_ic_state)
    944       : stub_compiler_(stub_compiler),
    945         arguments_(arguments),
    946         name_(name),
    947         extra_ic_state_(extra_ic_state) {}
    948 
    949   void Compile(MacroAssembler* masm,
    950                Handle<JSObject> object,
    951                Handle<JSObject> holder,
    952                Handle<Name> name,
    953                LookupResult* lookup,
    954                Register receiver,
    955                Register scratch1,
    956                Register scratch2,
    957                Register scratch3,
    958                Label* miss) {
    959     ASSERT(holder->HasNamedInterceptor());
    960     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
    961 
    962     // Check that the receiver isn't a smi.
    963     __ JumpIfSmi(receiver, miss);
    964     CallOptimization optimization(lookup);
    965     if (optimization.is_constant_call()) {
    966       CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
    967                        holder, lookup, name, optimization, miss);
    968     } else {
    969       CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
    970                      name, holder, miss);
    971     }
    972   }
    973 
    974  private:
    975   void CompileCacheable(MacroAssembler* masm,
    976                         Handle<JSObject> object,
    977                         Register receiver,
    978                         Register scratch1,
    979                         Register scratch2,
    980                         Register scratch3,
    981                         Handle<JSObject> interceptor_holder,
    982                         LookupResult* lookup,
    983                         Handle<Name> name,
    984                         const CallOptimization& optimization,
    985                         Label* miss_label) {
    986     ASSERT(optimization.is_constant_call());
    987     ASSERT(!lookup->holder()->IsGlobalObject());
    988     Counters* counters = masm->isolate()->counters();
    989     int depth1 = kInvalidProtoDepth;
    990     int depth2 = kInvalidProtoDepth;
    991     bool can_do_fast_api_call = false;
    992     if (optimization.is_simple_api_call() &&
    993         !lookup->holder()->IsGlobalObject()) {
    994       depth1 = optimization.GetPrototypeDepthOfExpectedType(
    995           object, interceptor_holder);
    996       if (depth1 == kInvalidProtoDepth) {
    997         depth2 = optimization.GetPrototypeDepthOfExpectedType(
    998             interceptor_holder, Handle<JSObject>(lookup->holder()));
    999       }
   1000       can_do_fast_api_call =
   1001           depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
   1002     }
   1003 
   1004     __ IncrementCounter(counters->call_const_interceptor(), 1,
   1005                         scratch1, scratch2);
   1006 
   1007     if (can_do_fast_api_call) {
   1008       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
   1009                           scratch1, scratch2);
   1010       ReserveSpaceForFastApiCall(masm, scratch1);
   1011     }
   1012 
   1013     // Check that the maps from receiver to interceptor's holder
   1014     // haven't changed and thus we can invoke interceptor.
   1015     Label miss_cleanup;
   1016     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
   1017     Register holder =
   1018         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
   1019                                         scratch1, scratch2, scratch3,
   1020                                         name, depth1, miss);
   1021 
   1022     // Invoke an interceptor and if it provides a value,
   1023     // branch to |regular_invoke|.
   1024     Label regular_invoke;
   1025     LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
   1026                         &regular_invoke);
   1027 
   1028     // Interceptor returned nothing for this property.  Try to use cached
   1029     // constant function.
   1030 
   1031     // Check that the maps from interceptor's holder to constant function's
   1032     // holder haven't changed and thus we can use cached constant function.
   1033     if (*interceptor_holder != lookup->holder()) {
   1034       stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
   1035                                       Handle<JSObject>(lookup->holder()),
   1036                                       scratch1, scratch2, scratch3,
   1037                                       name, depth2, miss);
   1038     } else {
   1039       // CheckPrototypes has a side effect of fetching a 'holder'
   1040       // for API (object which is instanceof for the signature).  It's
   1041       // safe to omit it here, as if present, it should be fetched
   1042       // by the previous CheckPrototypes.
   1043       ASSERT(depth2 == kInvalidProtoDepth);
   1044     }
   1045 
   1046     // Invoke function.
   1047     if (can_do_fast_api_call) {
   1048       GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
   1049     } else {
   1050       CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
   1051           ? CALL_AS_FUNCTION
   1052           : CALL_AS_METHOD;
   1053       Handle<JSFunction> function = optimization.constant_function();
   1054       ParameterCount expected(function);
   1055       __ InvokeFunction(function, expected, arguments_,
   1056                         JUMP_FUNCTION, NullCallWrapper(), call_kind);
   1057     }
   1058 
   1059     // Deferred code for fast API call case---clean preallocated space.
   1060     if (can_do_fast_api_call) {
   1061       __ bind(&miss_cleanup);
   1062       FreeSpaceForFastApiCall(masm);
   1063       __ b(miss_label);
   1064     }
   1065 
   1066     // Invoke a regular function.
   1067     __ bind(&regular_invoke);
   1068     if (can_do_fast_api_call) {
   1069       FreeSpaceForFastApiCall(masm);
   1070     }
   1071   }
   1072 
   1073   void CompileRegular(MacroAssembler* masm,
   1074                       Handle<JSObject> object,
   1075                       Register receiver,
   1076                       Register scratch1,
   1077                       Register scratch2,
   1078                       Register scratch3,
   1079                       Handle<Name> name,
   1080                       Handle<JSObject> interceptor_holder,
   1081                       Label* miss_label) {
   1082     Register holder =
   1083         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
   1084                                         scratch1, scratch2, scratch3,
   1085                                         name, miss_label);
   1086 
   1087     // Call a runtime function to load the interceptor property.
   1088     FrameScope scope(masm, StackFrame::INTERNAL);
   1089     // Save the name_ register across the call.
   1090     __ push(name_);
   1091     PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
   1092     __ CallExternalReference(
   1093         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
   1094                           masm->isolate()),
   1095         6);
   1096     // Restore the name_ register.
   1097     __ pop(name_);
   1098     // Leave the internal frame.
   1099   }
   1100 
   1101   void LoadWithInterceptor(MacroAssembler* masm,
   1102                            Register receiver,
   1103                            Register holder,
   1104                            Handle<JSObject> holder_obj,
   1105                            Register scratch,
   1106                            Label* interceptor_succeeded) {
   1107     {
   1108       FrameScope scope(masm, StackFrame::INTERNAL);
   1109       __ Push(holder, name_);
   1110       CompileCallLoadPropertyWithInterceptor(masm,
   1111                                              receiver,
   1112                                              holder,
   1113                                              name_,
   1114                                              holder_obj);
   1115       __ pop(name_);  // Restore the name.
   1116       __ pop(receiver);  // Restore the holder.
   1117     }
   1118     // If interceptor returns no-result sentinel, call the constant function.
   1119     __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
   1120     __ cmp(r0, scratch);
   1121     __ b(ne, interceptor_succeeded);
   1122   }
   1123 
   1124   StubCompiler* stub_compiler_;
   1125   const ParameterCount& arguments_;
   1126   Register name_;
   1127   Code::ExtraICState extra_ic_state_;
   1128 };
   1129 
   1130 
   1131 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
   1132 // from object to (but not including) holder.
   1133 static void GenerateCheckPropertyCells(MacroAssembler* masm,
   1134                                        Handle<JSObject> object,
   1135                                        Handle<JSObject> holder,
   1136                                        Handle<Name> name,
   1137                                        Register scratch,
   1138                                        Label* miss) {
   1139   Handle<JSObject> current = object;
   1140   while (!current.is_identical_to(holder)) {
   1141     if (current->IsGlobalObject()) {
   1142       GenerateCheckPropertyCell(masm,
   1143                                 Handle<GlobalObject>::cast(current),
   1144                                 name,
   1145                                 scratch,
   1146                                 miss);
   1147     }
   1148     current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
   1149   }
   1150 }
   1151 
   1152 
   1153 // Convert and store int passed in register ival to IEEE 754 single precision
   1154 // floating point value at memory location (dst + 4 * wordoffset)
   1155 // If VFP3 is available use it for conversion.
   1156 static void StoreIntAsFloat(MacroAssembler* masm,
   1157                             Register dst,
   1158                             Register wordoffset,
   1159                             Register ival,
   1160                             Register scratch1) {
   1161   __ vmov(s0, ival);
   1162   __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
   1163   __ vcvt_f32_s32(s0, s0);
   1164   __ vstr(s0, scratch1, 0);
   1165 }
   1166 
   1167 
   1168 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
   1169   __ Jump(code, RelocInfo::CODE_TARGET);
   1170 }
   1171 
   1172 
   1173 #undef __
   1174 #define __ ACCESS_MASM(masm())
   1175 
   1176 
   1177 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
   1178                                        Register object_reg,
   1179                                        Handle<JSObject> holder,
   1180                                        Register holder_reg,
   1181                                        Register scratch1,
   1182                                        Register scratch2,
   1183                                        Handle<Name> name,
   1184                                        int save_at_depth,
   1185                                        Label* miss,
   1186                                        PrototypeCheckType check) {
   1187   // Make sure that the type feedback oracle harvests the receiver map.
   1188   // TODO(svenpanne) Remove this hack when all ICs are reworked.
   1189   __ mov(scratch1, Operand(Handle<Map>(object->map())));
   1190 
   1191   Handle<JSObject> first = object;
   1192   // Make sure there's no overlap between holder and object registers.
   1193   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
   1194   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
   1195          && !scratch2.is(scratch1));
   1196 
   1197   // Keep track of the current object in register reg.
   1198   Register reg = object_reg;
   1199   int depth = 0;
   1200 
   1201   if (save_at_depth == depth) {
   1202     __ str(reg, MemOperand(sp));
   1203   }
   1204 
   1205   // Check the maps in the prototype chain.
   1206   // Traverse the prototype chain from the object and do map checks.
   1207   Handle<JSObject> current = object;
   1208   while (!current.is_identical_to(holder)) {
   1209     ++depth;
   1210 
   1211     // Only global objects and objects that do not require access
   1212     // checks are allowed in stubs.
   1213     ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
   1214 
   1215     Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
   1216     if (!current->HasFastProperties() &&
   1217         !current->IsJSGlobalObject() &&
   1218         !current->IsJSGlobalProxy()) {
   1219       if (!name->IsUniqueName()) {
   1220         ASSERT(name->IsString());
   1221         name = factory()->InternalizeString(Handle<String>::cast(name));
   1222       }
   1223       ASSERT(current->property_dictionary()->FindEntry(*name) ==
   1224              NameDictionary::kNotFound);
   1225 
   1226       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
   1227                                        scratch1, scratch2);
   1228 
   1229       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1230       reg = holder_reg;  // From now on the object will be in holder_reg.
   1231       __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
   1232     } else {
   1233       Register map_reg = scratch1;
   1234       if (!current.is_identical_to(first) || check == CHECK_ALL_MAPS) {
   1235         Handle<Map> current_map(current->map());
   1236         // CheckMap implicitly loads the map of |reg| into |map_reg|.
   1237         __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
   1238       } else {
   1239         __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
   1240       }
   1241 
   1242       // Check access rights to the global object.  This has to happen after
   1243       // the map check so that we know that the object is actually a global
   1244       // object.
   1245       if (current->IsJSGlobalProxy()) {
   1246         __ CheckAccessGlobalProxy(reg, scratch2, miss);
   1247       }
   1248       reg = holder_reg;  // From now on the object will be in holder_reg.
   1249 
   1250       if (heap()->InNewSpace(*prototype)) {
   1251         // The prototype is in new space; we cannot store a reference to it
   1252         // in the code.  Load it from the map.
   1253         __ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
   1254       } else {
   1255         // The prototype is in old space; load it directly.
   1256         __ mov(reg, Operand(prototype));
   1257       }
   1258     }
   1259 
   1260     if (save_at_depth == depth) {
   1261       __ str(reg, MemOperand(sp));
   1262     }
   1263 
   1264     // Go to the next object in the prototype chain.
   1265     current = prototype;
   1266   }
   1267 
   1268   // Log the check depth.
   1269   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
   1270 
   1271   if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
   1272     // Check the holder map.
   1273     __ CheckMap(reg, scratch1, Handle<Map>(holder->map()), miss,
   1274                 DONT_DO_SMI_CHECK);
   1275   }
   1276 
   1277   // Perform security check for access to the global object.
   1278   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   1279   if (holder->IsJSGlobalProxy()) {
   1280     __ CheckAccessGlobalProxy(reg, scratch1, miss);
   1281   }
   1282 
   1283   // If we've skipped any global objects, it's not enough to verify that
   1284   // their maps haven't changed.  We also need to check that the property
   1285   // cell for the property is still empty.
   1286   GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
   1287 
   1288   // Return the register containing the holder.
   1289   return reg;
   1290 }
   1291 
   1292 
   1293 void BaseLoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
   1294                                                  Label* success,
   1295                                                  Label* miss) {
   1296   if (!miss->is_unused()) {
   1297     __ b(success);
   1298     __ bind(miss);
   1299     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1300   }
   1301 }
   1302 
   1303 
   1304 void BaseStoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
   1305                                                   Label* success,
   1306                                                   Label* miss) {
   1307   if (!miss->is_unused()) {
   1308     __ b(success);
   1309     GenerateRestoreName(masm(), miss, name);
   1310     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1311   }
   1312 }
   1313 
   1314 
   1315 Register BaseLoadStubCompiler::CallbackHandlerFrontend(
   1316     Handle<JSObject> object,
   1317     Register object_reg,
   1318     Handle<JSObject> holder,
   1319     Handle<Name> name,
   1320     Label* success,
   1321     Handle<ExecutableAccessorInfo> callback) {
   1322   Label miss;
   1323 
   1324   Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
   1325 
   1326   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
   1327     ASSERT(!reg.is(scratch2()));
   1328     ASSERT(!reg.is(scratch3()));
   1329     ASSERT(!reg.is(scratch4()));
   1330 
   1331     // Load the properties dictionary.
   1332     Register dictionary = scratch4();
   1333     __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
   1334 
   1335     // Probe the dictionary.
   1336     Label probe_done;
   1337     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
   1338                                                      &miss,
   1339                                                      &probe_done,
   1340                                                      dictionary,
   1341                                                      this->name(),
   1342                                                      scratch2(),
   1343                                                      scratch3());
   1344     __ bind(&probe_done);
   1345 
   1346     // If probing finds an entry in the dictionary, scratch3 contains the
   1347     // pointer into the dictionary. Check that the value is the callback.
   1348     Register pointer = scratch3();
   1349     const int kElementsStartOffset = NameDictionary::kHeaderSize +
   1350         NameDictionary::kElementsStartIndex * kPointerSize;
   1351     const int kValueOffset = kElementsStartOffset + kPointerSize;
   1352     __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
   1353     __ cmp(scratch2(), Operand(callback));
   1354     __ b(ne, &miss);
   1355   }
   1356 
   1357   HandlerFrontendFooter(name, success, &miss);
   1358   return reg;
   1359 }
   1360 
   1361 
   1362 void BaseLoadStubCompiler::NonexistentHandlerFrontend(
   1363     Handle<JSObject> object,
   1364     Handle<JSObject> last,
   1365     Handle<Name> name,
   1366     Label* success,
   1367     Handle<GlobalObject> global) {
   1368   Label miss;
   1369 
   1370   HandlerFrontendHeader(object, receiver(), last, name, &miss);
   1371 
   1372   // If the last object in the prototype chain is a global object,
   1373   // check that the global property cell is empty.
   1374   if (!global.is_null()) {
   1375     GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
   1376   }
   1377 
   1378   HandlerFrontendFooter(name, success, &miss);
   1379 }
   1380 
   1381 
   1382 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
   1383                                              Handle<JSObject> holder,
   1384                                              PropertyIndex field,
   1385                                              Representation representation) {
   1386   if (!reg.is(receiver())) __ mov(receiver(), reg);
   1387   if (kind() == Code::LOAD_IC) {
   1388     LoadFieldStub stub(field.is_inobject(holder),
   1389                        field.translate(holder),
   1390                        representation);
   1391     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1392   } else {
   1393     KeyedLoadFieldStub stub(field.is_inobject(holder),
   1394                             field.translate(holder),
   1395                             representation);
   1396     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1397   }
   1398 }
   1399 
   1400 
   1401 void BaseLoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
   1402   // Return the constant value.
   1403   __ LoadObject(r0, value);
   1404   __ Ret();
   1405 }
   1406 
   1407 
   1408 void BaseLoadStubCompiler::GenerateLoadCallback(
   1409     Register reg,
   1410     Handle<ExecutableAccessorInfo> callback) {
   1411   // Build AccessorInfo::args_ list on the stack and push property name below
   1412   // the exit frame to make GC aware of them and store pointers to them.
   1413   __ push(receiver());
   1414   __ mov(scratch2(), sp);  // scratch2 = AccessorInfo::args_
   1415   if (heap()->InNewSpace(callback->data())) {
   1416     __ Move(scratch3(), callback);
   1417     __ ldr(scratch3(), FieldMemOperand(scratch3(),
   1418                                        ExecutableAccessorInfo::kDataOffset));
   1419   } else {
   1420     __ Move(scratch3(), Handle<Object>(callback->data(), isolate()));
   1421   }
   1422   __ Push(reg, scratch3());
   1423   __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
   1424   __ mov(scratch4(), scratch3());
   1425   __ Push(scratch3(), scratch4());
   1426   __ mov(scratch4(),
   1427          Operand(ExternalReference::isolate_address(isolate())));
   1428   __ Push(scratch4(), name());
   1429   __ mov(r0, sp);  // r0 = Handle<Name>
   1430 
   1431   const int kApiStackSpace = 1;
   1432   FrameScope frame_scope(masm(), StackFrame::MANUAL);
   1433   __ EnterExitFrame(false, kApiStackSpace);
   1434 
   1435   // Create AccessorInfo instance on the stack above the exit frame with
   1436   // scratch2 (internal::Object** args_) as the data.
   1437   __ str(scratch2(), MemOperand(sp, 1 * kPointerSize));
   1438   __ add(r1, sp, Operand(1 * kPointerSize));  // r1 = AccessorInfo&
   1439 
   1440   const int kStackUnwindSpace = kFastApiCallArguments + 1;
   1441   Address getter_address = v8::ToCData<Address>(callback->getter());
   1442   bool returns_handle =
   1443       !CallbackTable::ReturnsVoid(isolate(), getter_address);
   1444 
   1445   ApiFunction fun(getter_address);
   1446   ExternalReference::Type type =
   1447       returns_handle ?
   1448           ExternalReference::DIRECT_GETTER_CALL :
   1449           ExternalReference::DIRECT_GETTER_CALL_NEW;
   1450   ExternalReference ref = ExternalReference(&fun, type, isolate());
   1451 
   1452   Address thunk_address = returns_handle
   1453       ? FUNCTION_ADDR(&InvokeAccessorGetter)
   1454       : FUNCTION_ADDR(&InvokeAccessorGetterCallback);
   1455   ExternalReference::Type thunk_type =
   1456       returns_handle ?
   1457           ExternalReference::PROFILING_GETTER_CALL :
   1458           ExternalReference::PROFILING_GETTER_CALL_NEW;
   1459   ApiFunction thunk_fun(thunk_address);
   1460   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
   1461       isolate());
   1462   __ CallApiFunctionAndReturn(ref,
   1463                               getter_address,
   1464                               thunk_ref,
   1465                               r2,
   1466                               kStackUnwindSpace,
   1467                               returns_handle,
   1468                               5);
   1469 }
   1470 
   1471 
   1472 void BaseLoadStubCompiler::GenerateLoadInterceptor(
   1473     Register holder_reg,
   1474     Handle<JSObject> object,
   1475     Handle<JSObject> interceptor_holder,
   1476     LookupResult* lookup,
   1477     Handle<Name> name) {
   1478   ASSERT(interceptor_holder->HasNamedInterceptor());
   1479   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
   1480 
   1481   // So far the most popular follow ups for interceptor loads are FIELD
   1482   // and CALLBACKS, so inline only them, other cases may be added
   1483   // later.
   1484   bool compile_followup_inline = false;
   1485   if (lookup->IsFound() && lookup->IsCacheable()) {
   1486     if (lookup->IsField()) {
   1487       compile_followup_inline = true;
   1488     } else if (lookup->type() == CALLBACKS &&
   1489                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
   1490       ExecutableAccessorInfo* callback =
   1491           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
   1492       compile_followup_inline = callback->getter() != NULL &&
   1493           callback->IsCompatibleReceiver(*object);
   1494     }
   1495   }
   1496 
   1497   if (compile_followup_inline) {
   1498     // Compile the interceptor call, followed by inline code to load the
   1499     // property from further up the prototype chain if the call fails.
   1500     // Check that the maps haven't changed.
   1501     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
   1502 
   1503     // Preserve the receiver register explicitly whenever it is different from
   1504     // the holder and it is needed should the interceptor return without any
   1505     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
   1506     // the FIELD case might cause a miss during the prototype check.
   1507     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
   1508     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
   1509         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
   1510 
   1511     // Save necessary data before invoking an interceptor.
   1512     // Requires a frame to make GC aware of pushed pointers.
   1513     {
   1514       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
   1515       if (must_preserve_receiver_reg) {
   1516         __ Push(receiver(), holder_reg, this->name());
   1517       } else {
   1518         __ Push(holder_reg, this->name());
   1519       }
   1520       // Invoke an interceptor.  Note: map checks from receiver to
   1521       // interceptor's holder has been compiled before (see a caller
   1522       // of this method.)
   1523       CompileCallLoadPropertyWithInterceptor(masm(),
   1524                                              receiver(),
   1525                                              holder_reg,
   1526                                              this->name(),
   1527                                              interceptor_holder);
   1528       // Check if interceptor provided a value for property.  If it's
   1529       // the case, return immediately.
   1530       Label interceptor_failed;
   1531       __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
   1532       __ cmp(r0, scratch1());
   1533       __ b(eq, &interceptor_failed);
   1534       frame_scope.GenerateLeaveFrame();
   1535       __ Ret();
   1536 
   1537       __ bind(&interceptor_failed);
   1538       __ pop(this->name());
   1539       __ pop(holder_reg);
   1540       if (must_preserve_receiver_reg) {
   1541         __ pop(receiver());
   1542       }
   1543       // Leave the internal frame.
   1544     }
   1545 
   1546     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
   1547   } else {  // !compile_followup_inline
   1548     // Call the runtime system to load the interceptor.
   1549     // Check that the maps haven't changed.
   1550     PushInterceptorArguments(masm(), receiver(), holder_reg,
   1551                              this->name(), interceptor_holder);
   1552 
   1553     ExternalReference ref =
   1554         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
   1555                           isolate());
   1556     __ TailCallExternalReference(ref, 6, 1);
   1557   }
   1558 }
   1559 
   1560 
   1561 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
   1562   if (kind_ == Code::KEYED_CALL_IC) {
   1563     __ cmp(r2, Operand(name));
   1564     __ b(ne, miss);
   1565   }
   1566 }
   1567 
   1568 
   1569 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
   1570                                                    Handle<JSObject> holder,
   1571                                                    Handle<Name> name,
   1572                                                    Label* miss) {
   1573   ASSERT(holder->IsGlobalObject());
   1574 
   1575   // Get the number of arguments.
   1576   const int argc = arguments().immediate();
   1577 
   1578   // Get the receiver from the stack.
   1579   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   1580 
   1581   // Check that the maps haven't changed.
   1582   __ JumpIfSmi(r0, miss);
   1583   CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
   1584 }
   1585 
   1586 
   1587 void CallStubCompiler::GenerateLoadFunctionFromCell(
   1588     Handle<Cell> cell,
   1589     Handle<JSFunction> function,
   1590     Label* miss) {
   1591   // Get the value from the cell.
   1592   __ mov(r3, Operand(cell));
   1593   __ ldr(r1, FieldMemOperand(r3, Cell::kValueOffset));
   1594 
   1595   // Check that the cell contains the same function.
   1596   if (heap()->InNewSpace(*function)) {
   1597     // We can't embed a pointer to a function in new space so we have
   1598     // to verify that the shared function info is unchanged. This has
   1599     // the nice side effect that multiple closures based on the same
   1600     // function can all use this call IC. Before we load through the
   1601     // function, we have to verify that it still is a function.
   1602     __ JumpIfSmi(r1, miss);
   1603     __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
   1604     __ b(ne, miss);
   1605 
   1606     // Check the shared function info. Make sure it hasn't changed.
   1607     __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
   1608     __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1609     __ cmp(r4, r3);
   1610   } else {
   1611     __ cmp(r1, Operand(function));
   1612   }
   1613   __ b(ne, miss);
   1614 }
   1615 
   1616 
   1617 void CallStubCompiler::GenerateMissBranch() {
   1618   Handle<Code> code =
   1619       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
   1620                                                kind_,
   1621                                                extra_state_);
   1622   __ Jump(code, RelocInfo::CODE_TARGET);
   1623 }
   1624 
   1625 
   1626 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   1627                                                 Handle<JSObject> holder,
   1628                                                 PropertyIndex index,
   1629                                                 Handle<Name> name) {
   1630   // ----------- S t a t e -------------
   1631   //  -- r2    : name
   1632   //  -- lr    : return address
   1633   // -----------------------------------
   1634   Label miss;
   1635 
   1636   GenerateNameCheck(name, &miss);
   1637 
   1638   const int argc = arguments().immediate();
   1639 
   1640   // Get the receiver of the function from the stack into r0.
   1641   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   1642   // Check that the receiver isn't a smi.
   1643   __ JumpIfSmi(r0, &miss);
   1644 
   1645   // Do the right check and compute the holder register.
   1646   Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
   1647   GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder),
   1648                            index.translate(holder), Representation::Tagged());
   1649 
   1650   GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
   1651 
   1652   // Handle call cache miss.
   1653   __ bind(&miss);
   1654   GenerateMissBranch();
   1655 
   1656   // Return the generated code.
   1657   return GetCode(Code::FIELD, name);
   1658 }
   1659 
   1660 
   1661 Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   1662     Handle<Object> object,
   1663     Handle<JSObject> holder,
   1664     Handle<Cell> cell,
   1665     Handle<JSFunction> function,
   1666     Handle<String> name,
   1667     Code::StubType type) {
   1668   Label miss;
   1669 
   1670   // Check that function is still array
   1671   const int argc = arguments().immediate();
   1672   GenerateNameCheck(name, &miss);
   1673   Register receiver = r1;
   1674 
   1675   if (cell.is_null()) {
   1676     __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1677 
   1678     // Check that the receiver isn't a smi.
   1679     __ JumpIfSmi(receiver, &miss);
   1680 
   1681     // Check that the maps haven't changed.
   1682     CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0,
   1683                     r4, name, &miss);
   1684   } else {
   1685     ASSERT(cell->value() == *function);
   1686     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   1687                                 &miss);
   1688     GenerateLoadFunctionFromCell(cell, function, &miss);
   1689   }
   1690 
   1691   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
   1692   site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
   1693   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   1694   __ mov(r0, Operand(argc));
   1695   __ mov(r2, Operand(site_feedback_cell));
   1696   __ mov(r1, Operand(function));
   1697 
   1698   ArrayConstructorStub stub(isolate());
   1699   __ TailCallStub(&stub);
   1700 
   1701   __ bind(&miss);
   1702   GenerateMissBranch();
   1703 
   1704   // Return the generated code.
   1705   return GetCode(type, name);
   1706 }
   1707 
   1708 
   1709 Handle<Code> CallStubCompiler::CompileArrayPushCall(
   1710     Handle<Object> object,
   1711     Handle<JSObject> holder,
   1712     Handle<Cell> cell,
   1713     Handle<JSFunction> function,
   1714     Handle<String> name,
   1715     Code::StubType type) {
   1716   // ----------- S t a t e -------------
   1717   //  -- r2    : name
   1718   //  -- lr    : return address
   1719   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1720   //  -- ...
   1721   //  -- sp[argc * 4]           : receiver
   1722   // -----------------------------------
   1723 
   1724   // If object is not an array, bail out to regular call.
   1725   if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
   1726 
   1727   Label miss;
   1728   GenerateNameCheck(name, &miss);
   1729 
   1730   Register receiver = r1;
   1731   // Get the receiver from the stack
   1732   const int argc = arguments().immediate();
   1733   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1734 
   1735   // Check that the receiver isn't a smi.
   1736   __ JumpIfSmi(receiver, &miss);
   1737 
   1738   // Check that the maps haven't changed.
   1739   CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0, r4,
   1740                   name, &miss);
   1741 
   1742   if (argc == 0) {
   1743     // Nothing to do, just return the length.
   1744     __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1745     __ Drop(argc + 1);
   1746     __ Ret();
   1747   } else {
   1748     Label call_builtin;
   1749 
   1750     if (argc == 1) {  // Otherwise fall through to call the builtin.
   1751       Label attempt_to_grow_elements, with_write_barrier, check_double;
   1752 
   1753       Register elements = r6;
   1754       Register end_elements = r5;
   1755       // Get the elements array of the object.
   1756       __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1757 
   1758       // Check that the elements are in fast mode and writable.
   1759       __ CheckMap(elements,
   1760                   r0,
   1761                   Heap::kFixedArrayMapRootIndex,
   1762                   &check_double,
   1763                   DONT_DO_SMI_CHECK);
   1764 
   1765       // Get the array's length into r0 and calculate new length.
   1766       __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1767       __ add(r0, r0, Operand(Smi::FromInt(argc)));
   1768 
   1769       // Get the elements' length.
   1770       __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1771 
   1772       // Check if we could survive without allocation.
   1773       __ cmp(r0, r4);
   1774       __ b(gt, &attempt_to_grow_elements);
   1775 
   1776       // Check if value is a smi.
   1777       __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
   1778       __ JumpIfNotSmi(r4, &with_write_barrier);
   1779 
   1780       // Save new length.
   1781       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1782 
   1783       // Store the value.
   1784       // We may need a register containing the address end_elements below,
   1785       // so write back the value in end_elements.
   1786       __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
   1787       const int kEndElementsOffset =
   1788           FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
   1789       __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
   1790 
   1791       // Check for a smi.
   1792       __ Drop(argc + 1);
   1793       __ Ret();
   1794 
   1795       __ bind(&check_double);
   1796 
   1797       // Check that the elements are in fast mode and writable.
   1798       __ CheckMap(elements,
   1799                   r0,
   1800                   Heap::kFixedDoubleArrayMapRootIndex,
   1801                   &call_builtin,
   1802                   DONT_DO_SMI_CHECK);
   1803 
   1804       // Get the array's length into r0 and calculate new length.
   1805       __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1806       __ add(r0, r0, Operand(Smi::FromInt(argc)));
   1807 
   1808       // Get the elements' length.
   1809       __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1810 
   1811       // Check if we could survive without allocation.
   1812       __ cmp(r0, r4);
   1813       __ b(gt, &call_builtin);
   1814 
   1815       __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
   1816       __ StoreNumberToDoubleElements(r4, r0, elements, r5, d0,
   1817                                      &call_builtin, argc * kDoubleSize);
   1818 
   1819       // Save new length.
   1820       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1821 
   1822       // Check for a smi.
   1823       __ Drop(argc + 1);
   1824       __ Ret();
   1825 
   1826       __ bind(&with_write_barrier);
   1827 
   1828       __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1829 
   1830       if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
   1831         Label fast_object, not_fast_object;
   1832         __ CheckFastObjectElements(r3, r7, &not_fast_object);
   1833         __ jmp(&fast_object);
   1834         // In case of fast smi-only, convert to fast object, otherwise bail out.
   1835         __ bind(&not_fast_object);
   1836         __ CheckFastSmiElements(r3, r7, &call_builtin);
   1837 
   1838         __ ldr(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
   1839         __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
   1840         __ cmp(r7, ip);
   1841         __ b(eq, &call_builtin);
   1842         // edx: receiver
   1843         // r3: map
   1844         Label try_holey_map;
   1845         __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
   1846                                                FAST_ELEMENTS,
   1847                                                r3,
   1848                                                r7,
   1849                                                &try_holey_map);
   1850         __ mov(r2, receiver);
   1851         ElementsTransitionGenerator::
   1852             GenerateMapChangeElementsTransition(masm(),
   1853                                                 DONT_TRACK_ALLOCATION_SITE,
   1854                                                 NULL);
   1855         __ jmp(&fast_object);
   1856 
   1857         __ bind(&try_holey_map);
   1858         __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
   1859                                                FAST_HOLEY_ELEMENTS,
   1860                                                r3,
   1861                                                r7,
   1862                                                &call_builtin);
   1863         __ mov(r2, receiver);
   1864         ElementsTransitionGenerator::
   1865             GenerateMapChangeElementsTransition(masm(),
   1866                                                 DONT_TRACK_ALLOCATION_SITE,
   1867                                                 NULL);
   1868         __ bind(&fast_object);
   1869       } else {
   1870         __ CheckFastObjectElements(r3, r3, &call_builtin);
   1871       }
   1872 
   1873       // Save new length.
   1874       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1875 
   1876       // Store the value.
   1877       // We may need a register containing the address end_elements below,
   1878       // so write back the value in end_elements.
   1879       __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
   1880       __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
   1881 
   1882       __ RecordWrite(elements,
   1883                      end_elements,
   1884                      r4,
   1885                      kLRHasNotBeenSaved,
   1886                      kDontSaveFPRegs,
   1887                      EMIT_REMEMBERED_SET,
   1888                      OMIT_SMI_CHECK);
   1889       __ Drop(argc + 1);
   1890       __ Ret();
   1891 
   1892       __ bind(&attempt_to_grow_elements);
   1893       // r0: array's length + 1.
   1894       // r4: elements' length.
   1895 
   1896       if (!FLAG_inline_new) {
   1897         __ b(&call_builtin);
   1898       }
   1899 
   1900       __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
   1901       // Growing elements that are SMI-only requires special handling in case
   1902       // the new element is non-Smi. For now, delegate to the builtin.
   1903       Label no_fast_elements_check;
   1904       __ JumpIfSmi(r2, &no_fast_elements_check);
   1905       __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1906       __ CheckFastObjectElements(r7, r7, &call_builtin);
   1907       __ bind(&no_fast_elements_check);
   1908 
   1909       ExternalReference new_space_allocation_top =
   1910           ExternalReference::new_space_allocation_top_address(isolate());
   1911       ExternalReference new_space_allocation_limit =
   1912           ExternalReference::new_space_allocation_limit_address(isolate());
   1913 
   1914       const int kAllocationDelta = 4;
   1915       // Load top and check if it is the end of elements.
   1916       __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
   1917       __ add(end_elements, end_elements, Operand(kEndElementsOffset));
   1918       __ mov(r7, Operand(new_space_allocation_top));
   1919       __ ldr(r3, MemOperand(r7));
   1920       __ cmp(end_elements, r3);
   1921       __ b(ne, &call_builtin);
   1922 
   1923       __ mov(r9, Operand(new_space_allocation_limit));
   1924       __ ldr(r9, MemOperand(r9));
   1925       __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
   1926       __ cmp(r3, r9);
   1927       __ b(hi, &call_builtin);
   1928 
   1929       // We fit and could grow elements.
   1930       // Update new_space_allocation_top.
   1931       __ str(r3, MemOperand(r7));
   1932       // Push the argument.
   1933       __ str(r2, MemOperand(end_elements));
   1934       // Fill the rest with holes.
   1935       __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
   1936       for (int i = 1; i < kAllocationDelta; i++) {
   1937         __ str(r3, MemOperand(end_elements, i * kPointerSize));
   1938       }
   1939 
   1940       // Update elements' and array's sizes.
   1941       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1942       __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
   1943       __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1944 
   1945       // Elements are in new space, so write barrier is not required.
   1946       __ Drop(argc + 1);
   1947       __ Ret();
   1948     }
   1949     __ bind(&call_builtin);
   1950     __ TailCallExternalReference(
   1951         ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
   1952   }
   1953 
   1954   // Handle call cache miss.
   1955   __ bind(&miss);
   1956   GenerateMissBranch();
   1957 
   1958   // Return the generated code.
   1959   return GetCode(type, name);
   1960 }
   1961 
   1962 
   1963 Handle<Code> CallStubCompiler::CompileArrayPopCall(
   1964     Handle<Object> object,
   1965     Handle<JSObject> holder,
   1966     Handle<Cell> cell,
   1967     Handle<JSFunction> function,
   1968     Handle<String> name,
   1969     Code::StubType type) {
   1970   // ----------- S t a t e -------------
   1971   //  -- r2    : name
   1972   //  -- lr    : return address
   1973   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1974   //  -- ...
   1975   //  -- sp[argc * 4]           : receiver
   1976   // -----------------------------------
   1977 
   1978   // If object is not an array, bail out to regular call.
   1979   if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
   1980 
   1981   Label miss, return_undefined, call_builtin;
   1982   Register receiver = r1;
   1983   Register elements = r3;
   1984   GenerateNameCheck(name, &miss);
   1985 
   1986   // Get the receiver from the stack
   1987   const int argc = arguments().immediate();
   1988   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1989   // Check that the receiver isn't a smi.
   1990   __ JumpIfSmi(receiver, &miss);
   1991 
   1992   // Check that the maps haven't changed.
   1993   CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
   1994                   r4, r0, name, &miss);
   1995 
   1996   // Get the elements array of the object.
   1997   __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1998 
   1999   // Check that the elements are in fast mode and writable.
   2000   __ CheckMap(elements,
   2001               r0,
   2002               Heap::kFixedArrayMapRootIndex,
   2003               &call_builtin,
   2004               DONT_DO_SMI_CHECK);
   2005 
   2006   // Get the array's length into r4 and calculate new length.
   2007   __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   2008   __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
   2009   __ b(lt, &return_undefined);
   2010 
   2011   // Get the last element.
   2012   __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
   2013   // We can't address the last element in one operation. Compute the more
   2014   // expensive shift first, and use an offset later on.
   2015   __ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4));
   2016   __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
   2017   __ cmp(r0, r6);
   2018   __ b(eq, &call_builtin);
   2019 
   2020   // Set the array's length.
   2021   __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   2022 
   2023   // Fill with the hole.
   2024   __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
   2025   __ Drop(argc + 1);
   2026   __ Ret();
   2027 
   2028   __ bind(&return_undefined);
   2029   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2030   __ Drop(argc + 1);
   2031   __ Ret();
   2032 
   2033   __ bind(&call_builtin);
   2034   __ TailCallExternalReference(
   2035       ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
   2036 
   2037   // Handle call cache miss.
   2038   __ bind(&miss);
   2039   GenerateMissBranch();
   2040 
   2041   // Return the generated code.
   2042   return GetCode(type, name);
   2043 }
   2044 
   2045 
   2046 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
   2047     Handle<Object> object,
   2048     Handle<JSObject> holder,
   2049     Handle<Cell> cell,
   2050     Handle<JSFunction> function,
   2051     Handle<String> name,
   2052     Code::StubType type) {
   2053   // ----------- S t a t e -------------
   2054   //  -- r2                     : function name
   2055   //  -- lr                     : return address
   2056   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2057   //  -- ...
   2058   //  -- sp[argc * 4]           : receiver
   2059   // -----------------------------------
   2060 
   2061   // If object is not a string, bail out to regular call.
   2062   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
   2063 
   2064   const int argc = arguments().immediate();
   2065   Label miss;
   2066   Label name_miss;
   2067   Label index_out_of_range;
   2068   Label* index_out_of_range_label = &index_out_of_range;
   2069 
   2070   if (kind_ == Code::CALL_IC &&
   2071       (CallICBase::StringStubState::decode(extra_state_) ==
   2072        DEFAULT_STRING_STUB)) {
   2073     index_out_of_range_label = &miss;
   2074   }
   2075   GenerateNameCheck(name, &name_miss);
   2076 
   2077   // Check that the maps starting from the prototype haven't changed.
   2078   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   2079                                             Context::STRING_FUNCTION_INDEX,
   2080                                             r0,
   2081                                             &miss);
   2082   ASSERT(!object.is_identical_to(holder));
   2083   CheckPrototypes(
   2084       Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2085       r0, holder, r1, r3, r4, name, &miss);
   2086 
   2087   Register receiver = r1;
   2088   Register index = r4;
   2089   Register result = r0;
   2090   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   2091   if (argc > 0) {
   2092     __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
   2093   } else {
   2094     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   2095   }
   2096 
   2097   StringCharCodeAtGenerator generator(receiver,
   2098                                       index,
   2099                                       result,
   2100                                       &miss,  // When not a string.
   2101                                       &miss,  // When not a number.
   2102                                       index_out_of_range_label,
   2103                                       STRING_INDEX_IS_NUMBER);
   2104   generator.GenerateFast(masm());
   2105   __ Drop(argc + 1);
   2106   __ Ret();
   2107 
   2108   StubRuntimeCallHelper call_helper;
   2109   generator.GenerateSlow(masm(), call_helper);
   2110 
   2111   if (index_out_of_range.is_linked()) {
   2112     __ bind(&index_out_of_range);
   2113     __ LoadRoot(r0, Heap::kNanValueRootIndex);
   2114     __ Drop(argc + 1);
   2115     __ Ret();
   2116   }
   2117 
   2118   __ bind(&miss);
   2119   // Restore function name in r2.
   2120   __ Move(r2, name);
   2121   __ bind(&name_miss);
   2122   GenerateMissBranch();
   2123 
   2124   // Return the generated code.
   2125   return GetCode(type, name);
   2126 }
   2127 
   2128 
   2129 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
   2130     Handle<Object> object,
   2131     Handle<JSObject> holder,
   2132     Handle<Cell> cell,
   2133     Handle<JSFunction> function,
   2134     Handle<String> name,
   2135     Code::StubType type) {
   2136   // ----------- S t a t e -------------
   2137   //  -- r2                     : function name
   2138   //  -- lr                     : return address
   2139   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2140   //  -- ...
   2141   //  -- sp[argc * 4]           : receiver
   2142   // -----------------------------------
   2143 
   2144   // If object is not a string, bail out to regular call.
   2145   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
   2146 
   2147   const int argc = arguments().immediate();
   2148   Label miss;
   2149   Label name_miss;
   2150   Label index_out_of_range;
   2151   Label* index_out_of_range_label = &index_out_of_range;
   2152   if (kind_ == Code::CALL_IC &&
   2153       (CallICBase::StringStubState::decode(extra_state_) ==
   2154        DEFAULT_STRING_STUB)) {
   2155     index_out_of_range_label = &miss;
   2156   }
   2157   GenerateNameCheck(name, &name_miss);
   2158 
   2159   // Check that the maps starting from the prototype haven't changed.
   2160   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   2161                                             Context::STRING_FUNCTION_INDEX,
   2162                                             r0,
   2163                                             &miss);
   2164   ASSERT(!object.is_identical_to(holder));
   2165   CheckPrototypes(
   2166       Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2167       r0, holder, r1, r3, r4, name, &miss);
   2168 
   2169   Register receiver = r0;
   2170   Register index = r4;
   2171   Register scratch = r3;
   2172   Register result = r0;
   2173   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   2174   if (argc > 0) {
   2175     __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
   2176   } else {
   2177     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   2178   }
   2179 
   2180   StringCharAtGenerator generator(receiver,
   2181                                   index,
   2182                                   scratch,
   2183                                   result,
   2184                                   &miss,  // When not a string.
   2185                                   &miss,  // When not a number.
   2186                                   index_out_of_range_label,
   2187                                   STRING_INDEX_IS_NUMBER);
   2188   generator.GenerateFast(masm());
   2189   __ Drop(argc + 1);
   2190   __ Ret();
   2191 
   2192   StubRuntimeCallHelper call_helper;
   2193   generator.GenerateSlow(masm(), call_helper);
   2194 
   2195   if (index_out_of_range.is_linked()) {
   2196     __ bind(&index_out_of_range);
   2197     __ LoadRoot(r0, Heap::kempty_stringRootIndex);
   2198     __ Drop(argc + 1);
   2199     __ Ret();
   2200   }
   2201 
   2202   __ bind(&miss);
   2203   // Restore function name in r2.
   2204   __ Move(r2, name);
   2205   __ bind(&name_miss);
   2206   GenerateMissBranch();
   2207 
   2208   // Return the generated code.
   2209   return GetCode(type, name);
   2210 }
   2211 
   2212 
   2213 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
   2214     Handle<Object> object,
   2215     Handle<JSObject> holder,
   2216     Handle<Cell> cell,
   2217     Handle<JSFunction> function,
   2218     Handle<String> name,
   2219     Code::StubType type) {
   2220   // ----------- S t a t e -------------
   2221   //  -- r2                     : function name
   2222   //  -- lr                     : return address
   2223   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2224   //  -- ...
   2225   //  -- sp[argc * 4]           : receiver
   2226   // -----------------------------------
   2227 
   2228   const int argc = arguments().immediate();
   2229 
   2230   // If the object is not a JSObject or we got an unexpected number of
   2231   // arguments, bail out to the regular call.
   2232   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2233 
   2234   Label miss;
   2235   GenerateNameCheck(name, &miss);
   2236 
   2237   if (cell.is_null()) {
   2238     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
   2239 
   2240     __ JumpIfSmi(r1, &miss);
   2241 
   2242     CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
   2243                     name, &miss);
   2244   } else {
   2245     ASSERT(cell->value() == *function);
   2246     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2247                                 &miss);
   2248     GenerateLoadFunctionFromCell(cell, function, &miss);
   2249   }
   2250 
   2251   // Load the char code argument.
   2252   Register code = r1;
   2253   __ ldr(code, MemOperand(sp, 0 * kPointerSize));
   2254 
   2255   // Check the code is a smi.
   2256   Label slow;
   2257   __ JumpIfNotSmi(code, &slow);
   2258 
   2259   // Convert the smi code to uint16.
   2260   __ and_(code, code, Operand(Smi::FromInt(0xffff)));
   2261 
   2262   StringCharFromCodeGenerator generator(code, r0);
   2263   generator.GenerateFast(masm());
   2264   __ Drop(argc + 1);
   2265   __ Ret();
   2266 
   2267   StubRuntimeCallHelper call_helper;
   2268   generator.GenerateSlow(masm(), call_helper);
   2269 
   2270   // Tail call the full function. We do not have to patch the receiver
   2271   // because the function makes no use of it.
   2272   __ bind(&slow);
   2273   ParameterCount expected(function);
   2274   __ InvokeFunction(function, expected, arguments(),
   2275                     JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2276 
   2277   __ bind(&miss);
   2278   // r2: function name.
   2279   GenerateMissBranch();
   2280 
   2281   // Return the generated code.
   2282   return GetCode(type, name);
   2283 }
   2284 
   2285 
   2286 Handle<Code> CallStubCompiler::CompileMathFloorCall(
   2287     Handle<Object> object,
   2288     Handle<JSObject> holder,
   2289     Handle<Cell> cell,
   2290     Handle<JSFunction> function,
   2291     Handle<String> name,
   2292     Code::StubType type) {
   2293   // ----------- S t a t e -------------
   2294   //  -- r2                     : function name
   2295   //  -- lr                     : return address
   2296   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2297   //  -- ...
   2298   //  -- sp[argc * 4]           : receiver
   2299   // -----------------------------------
   2300 
   2301   const int argc = arguments().immediate();
   2302   // If the object is not a JSObject or we got an unexpected number of
   2303   // arguments, bail out to the regular call.
   2304   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2305 
   2306   Label miss, slow;
   2307   GenerateNameCheck(name, &miss);
   2308 
   2309   if (cell.is_null()) {
   2310     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
   2311     __ JumpIfSmi(r1, &miss);
   2312     CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
   2313                     name, &miss);
   2314   } else {
   2315     ASSERT(cell->value() == *function);
   2316     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2317                                 &miss);
   2318     GenerateLoadFunctionFromCell(cell, function, &miss);
   2319   }
   2320 
   2321   // Load the (only) argument into r0.
   2322   __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
   2323 
   2324   // If the argument is a smi, just return.
   2325   __ SmiTst(r0);
   2326   __ Drop(argc + 1, eq);
   2327   __ Ret(eq);
   2328 
   2329   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   2330 
   2331   Label smi_check, just_return;
   2332 
   2333   // Load the HeapNumber value.
   2334   // We will need access to the value in the core registers, so we load it
   2335   // with ldrd and move it to the fpu. It also spares a sub instruction for
   2336   // updating the HeapNumber value address, as vldr expects a multiple
   2337   // of 4 offset.
   2338   __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
   2339   __ vmov(d1, r4, r5);
   2340 
   2341   // Check for NaN, Infinities and -0.
   2342   // They are invariant through a Math.Floor call, so just
   2343   // return the original argument.
   2344   __ Sbfx(r3, r5, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
   2345   __ cmp(r3, Operand(-1));
   2346   __ b(eq, &just_return);
   2347   __ eor(r3, r5, Operand(0x80000000u));
   2348   __ orr(r3, r3, r4, SetCC);
   2349   __ b(eq, &just_return);
   2350   // Test for values that can be exactly represented as a
   2351   // signed 32-bit integer.
   2352   __ TryDoubleToInt32Exact(r0, d1, d2);
   2353   // If exact, check smi
   2354   __ b(eq, &smi_check);
   2355   __ cmp(r5, Operand(0));
   2356 
   2357   // If input is in ]+0, +inf[, the cmp has cleared overflow and negative
   2358   // (V=0 and N=0), the two following instructions won't execute and
   2359   // we fall through smi_check to check if the result can fit into a smi.
   2360 
   2361   // If input is in ]-inf, -0[, sub one and, go to slow if we have
   2362   // an overflow. Else we fall through smi check.
   2363   // Hint: if x is a negative, non integer number,
   2364   // floor(x) <=> round_to_zero(x) - 1.
   2365   __ sub(r0, r0, Operand(1), SetCC, mi);
   2366   __ b(vs, &slow);
   2367 
   2368   __ bind(&smi_check);
   2369   // Check if the result can fit into an smi. If we had an overflow,
   2370   // the result is either 0x80000000 or 0x7FFFFFFF and won't fit into an smi.
   2371   // If result doesn't fit into an smi, branch to slow.
   2372   __ SmiTag(r0, SetCC);
   2373   __ b(vs, &slow);
   2374 
   2375   __ bind(&just_return);
   2376   __ Drop(argc + 1);
   2377   __ Ret();
   2378 
   2379   __ bind(&slow);
   2380   // Tail call the full function. We do not have to patch the receiver
   2381   // because the function makes no use of it.
   2382   ParameterCount expected(function);
   2383   __ InvokeFunction(function, expected, arguments(),
   2384                     JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2385 
   2386   __ bind(&miss);
   2387   // r2: function name.
   2388   GenerateMissBranch();
   2389 
   2390   // Return the generated code.
   2391   return GetCode(type, name);
   2392 }
   2393 
   2394 
   2395 Handle<Code> CallStubCompiler::CompileMathAbsCall(
   2396     Handle<Object> object,
   2397     Handle<JSObject> holder,
   2398     Handle<Cell> cell,
   2399     Handle<JSFunction> function,
   2400     Handle<String> name,
   2401     Code::StubType type) {
   2402   // ----------- S t a t e -------------
   2403   //  -- r2                     : function name
   2404   //  -- lr                     : return address
   2405   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2406   //  -- ...
   2407   //  -- sp[argc * 4]           : receiver
   2408   // -----------------------------------
   2409 
   2410   const int argc = arguments().immediate();
   2411   // If the object is not a JSObject or we got an unexpected number of
   2412   // arguments, bail out to the regular call.
   2413   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2414 
   2415   Label miss;
   2416   GenerateNameCheck(name, &miss);
   2417   if (cell.is_null()) {
   2418     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
   2419     __ JumpIfSmi(r1, &miss);
   2420     CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
   2421                     name, &miss);
   2422   } else {
   2423     ASSERT(cell->value() == *function);
   2424     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2425                                 &miss);
   2426     GenerateLoadFunctionFromCell(cell, function, &miss);
   2427   }
   2428 
   2429   // Load the (only) argument into r0.
   2430   __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
   2431 
   2432   // Check if the argument is a smi.
   2433   Label not_smi;
   2434   __ JumpIfNotSmi(r0, &not_smi);
   2435 
   2436   // Do bitwise not or do nothing depending on the sign of the
   2437   // argument.
   2438   __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
   2439 
   2440   // Add 1 or do nothing depending on the sign of the argument.
   2441   __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
   2442 
   2443   // If the result is still negative, go to the slow case.
   2444   // This only happens for the most negative smi.
   2445   Label slow;
   2446   __ b(mi, &slow);
   2447 
   2448   // Smi case done.
   2449   __ Drop(argc + 1);
   2450   __ Ret();
   2451 
   2452   // Check if the argument is a heap number and load its exponent and
   2453   // sign.
   2454   __ bind(&not_smi);
   2455   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   2456   __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
   2457 
   2458   // Check the sign of the argument. If the argument is positive,
   2459   // just return it.
   2460   Label negative_sign;
   2461   __ tst(r1, Operand(HeapNumber::kSignMask));
   2462   __ b(ne, &negative_sign);
   2463   __ Drop(argc + 1);
   2464   __ Ret();
   2465 
   2466   // If the argument is negative, clear the sign, and return a new
   2467   // number.
   2468   __ bind(&negative_sign);
   2469   __ eor(r1, r1, Operand(HeapNumber::kSignMask));
   2470   __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
   2471   __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   2472   __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
   2473   __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
   2474   __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
   2475   __ Drop(argc + 1);
   2476   __ Ret();
   2477 
   2478   // Tail call the full function. We do not have to patch the receiver
   2479   // because the function makes no use of it.
   2480   __ bind(&slow);
   2481   ParameterCount expected(function);
   2482   __ InvokeFunction(function, expected, arguments(),
   2483                     JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2484 
   2485   __ bind(&miss);
   2486   // r2: function name.
   2487   GenerateMissBranch();
   2488 
   2489   // Return the generated code.
   2490   return GetCode(type, name);
   2491 }
   2492 
   2493 
   2494 Handle<Code> CallStubCompiler::CompileFastApiCall(
   2495     const CallOptimization& optimization,
   2496     Handle<Object> object,
   2497     Handle<JSObject> holder,
   2498     Handle<Cell> cell,
   2499     Handle<JSFunction> function,
   2500     Handle<String> name) {
   2501   Counters* counters = isolate()->counters();
   2502 
   2503   ASSERT(optimization.is_simple_api_call());
   2504   // Bail out if object is a global object as we don't want to
   2505   // repatch it to global receiver.
   2506   if (object->IsGlobalObject()) return Handle<Code>::null();
   2507   if (!cell.is_null()) return Handle<Code>::null();
   2508   if (!object->IsJSObject()) return Handle<Code>::null();
   2509   int depth = optimization.GetPrototypeDepthOfExpectedType(
   2510       Handle<JSObject>::cast(object), holder);
   2511   if (depth == kInvalidProtoDepth) return Handle<Code>::null();
   2512 
   2513   Label miss, miss_before_stack_reserved;
   2514   GenerateNameCheck(name, &miss_before_stack_reserved);
   2515 
   2516   // Get the receiver from the stack.
   2517   const int argc = arguments().immediate();
   2518   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2519 
   2520   // Check that the receiver isn't a smi.
   2521   __ JumpIfSmi(r1, &miss_before_stack_reserved);
   2522 
   2523   __ IncrementCounter(counters->call_const(), 1, r0, r3);
   2524   __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
   2525 
   2526   ReserveSpaceForFastApiCall(masm(), r0);
   2527 
   2528   // Check that the maps haven't changed and find a Holder as a side effect.
   2529   CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, name,
   2530                   depth, &miss);
   2531 
   2532   GenerateFastApiDirectCall(masm(), optimization, argc);
   2533 
   2534   __ bind(&miss);
   2535   FreeSpaceForFastApiCall(masm());
   2536 
   2537   __ bind(&miss_before_stack_reserved);
   2538   GenerateMissBranch();
   2539 
   2540   // Return the generated code.
   2541   return GetCode(function);
   2542 }
   2543 
   2544 
   2545 void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
   2546                                               Handle<JSObject> holder,
   2547                                               Handle<Name> name,
   2548                                               CheckType check,
   2549                                               Label* success) {
   2550   // ----------- S t a t e -------------
   2551   //  -- r2    : name
   2552   //  -- lr    : return address
   2553   // -----------------------------------
   2554   Label miss;
   2555   GenerateNameCheck(name, &miss);
   2556 
   2557   // Get the receiver from the stack
   2558   const int argc = arguments().immediate();
   2559   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2560 
   2561   // Check that the receiver isn't a smi.
   2562   if (check != NUMBER_CHECK) {
   2563     __ JumpIfSmi(r1, &miss);
   2564   }
   2565 
   2566   // Make sure that it's okay not to patch the on stack receiver
   2567   // unless we're doing a receiver map check.
   2568   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
   2569   switch (check) {
   2570     case RECEIVER_MAP_CHECK:
   2571       __ IncrementCounter(isolate()->counters()->call_const(), 1, r0, r3);
   2572 
   2573       // Check that the maps haven't changed.
   2574       CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
   2575                       name, &miss);
   2576 
   2577       // Patch the receiver on the stack with the global proxy if
   2578       // necessary.
   2579       if (object->IsGlobalObject()) {
   2580         __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
   2581         __ str(r3, MemOperand(sp, argc * kPointerSize));
   2582       }
   2583       break;
   2584 
   2585     case STRING_CHECK:
   2586       // Check that the object is a string.
   2587       __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
   2588       __ b(ge, &miss);
   2589       // Check that the maps starting from the prototype haven't changed.
   2590       GenerateDirectLoadGlobalFunctionPrototype(
   2591           masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
   2592       CheckPrototypes(
   2593           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2594           r0, holder, r3, r1, r4, name, &miss);
   2595       break;
   2596 
   2597     case SYMBOL_CHECK:
   2598       // Check that the object is a symbol.
   2599       __ CompareObjectType(r1, r1, r3, SYMBOL_TYPE);
   2600       __ b(ne, &miss);
   2601       // Check that the maps starting from the prototype haven't changed.
   2602       GenerateDirectLoadGlobalFunctionPrototype(
   2603           masm(), Context::SYMBOL_FUNCTION_INDEX, r0, &miss);
   2604       CheckPrototypes(
   2605           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2606           r0, holder, r3, r1, r4, name, &miss);
   2607       break;
   2608 
   2609     case NUMBER_CHECK: {
   2610       Label fast;
   2611       // Check that the object is a smi or a heap number.
   2612       __ JumpIfSmi(r1, &fast);
   2613       __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
   2614       __ b(ne, &miss);
   2615       __ bind(&fast);
   2616       // Check that the maps starting from the prototype haven't changed.
   2617       GenerateDirectLoadGlobalFunctionPrototype(
   2618           masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
   2619       CheckPrototypes(
   2620           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2621           r0, holder, r3, r1, r4, name, &miss);
   2622       break;
   2623     }
   2624     case BOOLEAN_CHECK: {
   2625       Label fast;
   2626       // Check that the object is a boolean.
   2627       __ LoadRoot(ip, Heap::kTrueValueRootIndex);
   2628       __ cmp(r1, ip);
   2629       __ b(eq, &fast);
   2630       __ LoadRoot(ip, Heap::kFalseValueRootIndex);
   2631       __ cmp(r1, ip);
   2632       __ b(ne, &miss);
   2633       __ bind(&fast);
   2634       // Check that the maps starting from the prototype haven't changed.
   2635       GenerateDirectLoadGlobalFunctionPrototype(
   2636           masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
   2637       CheckPrototypes(
   2638           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2639           r0, holder, r3, r1, r4, name, &miss);
   2640       break;
   2641     }
   2642   }
   2643 
   2644   __ b(success);
   2645 
   2646   // Handle call cache miss.
   2647   __ bind(&miss);
   2648   GenerateMissBranch();
   2649 }
   2650 
   2651 
   2652 void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
   2653   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2654       ? CALL_AS_FUNCTION
   2655       : CALL_AS_METHOD;
   2656   ParameterCount expected(function);
   2657   __ InvokeFunction(function, expected, arguments(),
   2658                     JUMP_FUNCTION, NullCallWrapper(), call_kind);
   2659 }
   2660 
   2661 
   2662 Handle<Code> CallStubCompiler::CompileCallConstant(
   2663     Handle<Object> object,
   2664     Handle<JSObject> holder,
   2665     Handle<Name> name,
   2666     CheckType check,
   2667     Handle<JSFunction> function) {
   2668   if (HasCustomCallGenerator(function)) {
   2669     Handle<Code> code = CompileCustomCall(object, holder,
   2670                                           Handle<Cell>::null(),
   2671                                           function, Handle<String>::cast(name),
   2672                                           Code::CONSTANT);
   2673     // A null handle means bail out to the regular compiler code below.
   2674     if (!code.is_null()) return code;
   2675   }
   2676 
   2677   Label success;
   2678 
   2679   CompileHandlerFrontend(object, holder, name, check, &success);
   2680   __ bind(&success);
   2681   CompileHandlerBackend(function);
   2682 
   2683   // Return the generated code.
   2684   return GetCode(function);
   2685 }
   2686 
   2687 
   2688 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
   2689                                                       Handle<JSObject> holder,
   2690                                                       Handle<Name> name) {
   2691   // ----------- S t a t e -------------
   2692   //  -- r2    : name
   2693   //  -- lr    : return address
   2694   // -----------------------------------
   2695   Label miss;
   2696   GenerateNameCheck(name, &miss);
   2697 
   2698   // Get the number of arguments.
   2699   const int argc = arguments().immediate();
   2700   LookupResult lookup(isolate());
   2701   LookupPostInterceptor(holder, name, &lookup);
   2702 
   2703   // Get the receiver from the stack.
   2704   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2705 
   2706   CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_);
   2707   compiler.Compile(masm(), object, holder, name, &lookup, r1, r3, r4, r0,
   2708                    &miss);
   2709 
   2710   // Move returned value, the function to call, to r1.
   2711   __ mov(r1, r0);
   2712   // Restore receiver.
   2713   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   2714 
   2715   GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
   2716 
   2717   // Handle call cache miss.
   2718   __ bind(&miss);
   2719   GenerateMissBranch();
   2720 
   2721   // Return the generated code.
   2722   return GetCode(Code::INTERCEPTOR, name);
   2723 }
   2724 
   2725 
   2726 Handle<Code> CallStubCompiler::CompileCallGlobal(
   2727     Handle<JSObject> object,
   2728     Handle<GlobalObject> holder,
   2729     Handle<PropertyCell> cell,
   2730     Handle<JSFunction> function,
   2731     Handle<Name> name) {
   2732   // ----------- S t a t e -------------
   2733   //  -- r2    : name
   2734   //  -- lr    : return address
   2735   // -----------------------------------
   2736   if (HasCustomCallGenerator(function)) {
   2737     Handle<Code> code = CompileCustomCall(
   2738         object, holder, cell, function, Handle<String>::cast(name),
   2739         Code::NORMAL);
   2740     // A null handle means bail out to the regular compiler code below.
   2741     if (!code.is_null()) return code;
   2742   }
   2743 
   2744   Label miss;
   2745   GenerateNameCheck(name, &miss);
   2746 
   2747   // Get the number of arguments.
   2748   const int argc = arguments().immediate();
   2749   GenerateGlobalReceiverCheck(object, holder, name, &miss);
   2750   GenerateLoadFunctionFromCell(cell, function, &miss);
   2751 
   2752   // Patch the receiver on the stack with the global proxy if
   2753   // necessary.
   2754   if (object->IsGlobalObject()) {
   2755     __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
   2756     __ str(r3, MemOperand(sp, argc * kPointerSize));
   2757   }
   2758 
   2759   // Set up the context (function already in r1).
   2760   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
   2761 
   2762   // Jump to the cached code (tail call).
   2763   Counters* counters = isolate()->counters();
   2764   __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
   2765   ParameterCount expected(function->shared()->formal_parameter_count());
   2766   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2767       ? CALL_AS_FUNCTION
   2768       : CALL_AS_METHOD;
   2769   // We call indirectly through the code field in the function to
   2770   // allow recompilation to take effect without changing any of the
   2771   // call sites.
   2772   __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   2773   __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
   2774                 NullCallWrapper(), call_kind);
   2775 
   2776   // Handle call cache miss.
   2777   __ bind(&miss);
   2778   __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
   2779   GenerateMissBranch();
   2780 
   2781   // Return the generated code.
   2782   return GetCode(Code::NORMAL, name);
   2783 }
   2784 
   2785 
   2786 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2787     Handle<JSObject> object,
   2788     Handle<JSObject> holder,
   2789     Handle<Name> name,
   2790     Handle<ExecutableAccessorInfo> callback) {
   2791   Label success;
   2792   HandlerFrontend(object, receiver(), holder, name, &success);
   2793   __ bind(&success);
   2794 
   2795   // Stub never generated for non-global objects that require access checks.
   2796   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   2797 
   2798   __ push(receiver());  // receiver
   2799   __ mov(ip, Operand(callback));  // callback info
   2800   __ push(ip);
   2801   __ mov(ip, Operand(name));
   2802   __ Push(ip, value());
   2803 
   2804   // Do tail-call to the runtime system.
   2805   ExternalReference store_callback_property =
   2806       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
   2807   __ TailCallExternalReference(store_callback_property, 4, 1);
   2808 
   2809   // Return the generated code.
   2810   return GetCode(kind(), Code::CALLBACKS, name);
   2811 }
   2812 
   2813 
   2814 #undef __
   2815 #define __ ACCESS_MASM(masm)
   2816 
   2817 
   2818 void StoreStubCompiler::GenerateStoreViaSetter(
   2819     MacroAssembler* masm,
   2820     Handle<JSFunction> setter) {
   2821   // ----------- S t a t e -------------
   2822   //  -- r0    : value
   2823   //  -- r1    : receiver
   2824   //  -- r2    : name
   2825   //  -- lr    : return address
   2826   // -----------------------------------
   2827   {
   2828     FrameScope scope(masm, StackFrame::INTERNAL);
   2829 
   2830     // Save value register, so we can restore it later.
   2831     __ push(r0);
   2832 
   2833     if (!setter.is_null()) {
   2834       // Call the JavaScript setter with receiver and value on the stack.
   2835       __ Push(r1, r0);
   2836       ParameterCount actual(1);
   2837       ParameterCount expected(setter);
   2838       __ InvokeFunction(setter, expected, actual,
   2839                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2840     } else {
   2841       // If we generate a global code snippet for deoptimization only, remember
   2842       // the place to continue after deoptimization.
   2843       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
   2844     }
   2845 
   2846     // We have to return the passed value, not the return value of the setter.
   2847     __ pop(r0);
   2848 
   2849     // Restore context register.
   2850     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2851   }
   2852   __ Ret();
   2853 }
   2854 
   2855 
   2856 #undef __
   2857 #define __ ACCESS_MASM(masm())
   2858 
   2859 
   2860 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
   2861     Handle<JSObject> object,
   2862     Handle<Name> name) {
   2863   Label miss;
   2864 
   2865   // Check that the map of the object hasn't changed.
   2866   __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
   2867               DO_SMI_CHECK);
   2868 
   2869   // Perform global security token check if needed.
   2870   if (object->IsJSGlobalProxy()) {
   2871     __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
   2872   }
   2873 
   2874   // Stub is never generated for non-global objects that require access
   2875   // checks.
   2876   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
   2877 
   2878   __ Push(receiver(), this->name(), value());
   2879 
   2880   __ mov(scratch1(), Operand(Smi::FromInt(strict_mode())));
   2881   __ push(scratch1());  // strict mode
   2882 
   2883   // Do tail-call to the runtime system.
   2884   ExternalReference store_ic_property =
   2885       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
   2886   __ TailCallExternalReference(store_ic_property, 4, 1);
   2887 
   2888   // Handle store cache miss.
   2889   __ bind(&miss);
   2890   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2891 
   2892   // Return the generated code.
   2893   return GetCode(kind(), Code::INTERCEPTOR, name);
   2894 }
   2895 
   2896 
   2897 Handle<Code> StoreStubCompiler::CompileStoreGlobal(
   2898     Handle<GlobalObject> object,
   2899     Handle<PropertyCell> cell,
   2900     Handle<Name> name) {
   2901   Label miss;
   2902 
   2903   // Check that the map of the global has not changed.
   2904   __ ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
   2905   __ cmp(scratch1(), Operand(Handle<Map>(object->map())));
   2906   __ b(ne, &miss);
   2907 
   2908   // Check that the value in the cell is not the hole. If it is, this
   2909   // cell could have been deleted and reintroducing the global needs
   2910   // to update the property details in the property dictionary of the
   2911   // global object. We bail out to the runtime system to do that.
   2912   __ mov(scratch1(), Operand(cell));
   2913   __ LoadRoot(scratch2(), Heap::kTheHoleValueRootIndex);
   2914   __ ldr(scratch3(), FieldMemOperand(scratch1(), Cell::kValueOffset));
   2915   __ cmp(scratch3(), scratch2());
   2916   __ b(eq, &miss);
   2917 
   2918   // Store the value in the cell.
   2919   __ str(value(), FieldMemOperand(scratch1(), Cell::kValueOffset));
   2920   // Cells are always rescanned, so no write barrier here.
   2921 
   2922   Counters* counters = isolate()->counters();
   2923   __ IncrementCounter(
   2924       counters->named_store_global_inline(), 1, scratch1(), scratch2());
   2925   __ Ret();
   2926 
   2927   // Handle store cache miss.
   2928   __ bind(&miss);
   2929   __ IncrementCounter(
   2930       counters->named_store_global_inline_miss(), 1, scratch1(), scratch2());
   2931   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2932 
   2933   // Return the generated code.
   2934   return GetICCode(kind(), Code::NORMAL, name);
   2935 }
   2936 
   2937 
   2938 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
   2939     Handle<JSObject> object,
   2940     Handle<JSObject> last,
   2941     Handle<Name> name,
   2942     Handle<GlobalObject> global) {
   2943   Label success;
   2944 
   2945   NonexistentHandlerFrontend(object, last, name, &success, global);
   2946 
   2947   __ bind(&success);
   2948   // Return undefined if maps of the full prototype chain are still the
   2949   // same and no global property with this name contains a value.
   2950   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2951   __ Ret();
   2952 
   2953   // Return the generated code.
   2954   return GetCode(kind(), Code::NONEXISTENT, name);
   2955 }
   2956 
   2957 
   2958 Register* LoadStubCompiler::registers() {
   2959   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   2960   static Register registers[] = { r0, r2, r3, r1, r4, r5 };
   2961   return registers;
   2962 }
   2963 
   2964 
   2965 Register* KeyedLoadStubCompiler::registers() {
   2966   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   2967   static Register registers[] = { r1, r0, r2, r3, r4, r5 };
   2968   return registers;
   2969 }
   2970 
   2971 
   2972 Register* StoreStubCompiler::registers() {
   2973   // receiver, name, value, scratch1, scratch2, scratch3.
   2974   static Register registers[] = { r1, r2, r0, r3, r4, r5 };
   2975   return registers;
   2976 }
   2977 
   2978 
   2979 Register* KeyedStoreStubCompiler::registers() {
   2980   // receiver, name, value, scratch1, scratch2, scratch3.
   2981   static Register registers[] = { r2, r1, r0, r3, r4, r5 };
   2982   return registers;
   2983 }
   2984 
   2985 
   2986 void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
   2987                                               Register name_reg,
   2988                                               Label* miss) {
   2989   __ cmp(name_reg, Operand(name));
   2990   __ b(ne, miss);
   2991 }
   2992 
   2993 
   2994 void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
   2995                                                Register name_reg,
   2996                                                Label* miss) {
   2997   __ cmp(name_reg, Operand(name));
   2998   __ b(ne, miss);
   2999 }
   3000 
   3001 
   3002 #undef __
   3003 #define __ ACCESS_MASM(masm)
   3004 
   3005 
   3006 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
   3007                                              Handle<JSFunction> getter) {
   3008   // ----------- S t a t e -------------
   3009   //  -- r0    : receiver
   3010   //  -- r2    : name
   3011   //  -- lr    : return address
   3012   // -----------------------------------
   3013   {
   3014     FrameScope scope(masm, StackFrame::INTERNAL);
   3015 
   3016     if (!getter.is_null()) {
   3017       // Call the JavaScript getter with the receiver on the stack.
   3018       __ push(r0);
   3019       ParameterCount actual(0);
   3020       ParameterCount expected(getter);
   3021       __ InvokeFunction(getter, expected, actual,
   3022                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   3023     } else {
   3024       // If we generate a global code snippet for deoptimization only, remember
   3025       // the place to continue after deoptimization.
   3026       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
   3027     }
   3028 
   3029     // Restore context register.
   3030     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3031   }
   3032   __ Ret();
   3033 }
   3034 
   3035 
   3036 #undef __
   3037 #define __ ACCESS_MASM(masm())
   3038 
   3039 
   3040 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
   3041     Handle<JSObject> object,
   3042     Handle<GlobalObject> global,
   3043     Handle<PropertyCell> cell,
   3044     Handle<Name> name,
   3045     bool is_dont_delete) {
   3046   Label success, miss;
   3047 
   3048   __ CheckMap(
   3049       receiver(), scratch1(), Handle<Map>(object->map()), &miss, DO_SMI_CHECK);
   3050   HandlerFrontendHeader(
   3051       object, receiver(), Handle<JSObject>::cast(global), name, &miss);
   3052 
   3053   // Get the value from the cell.
   3054   __ mov(r3, Operand(cell));
   3055   __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset));
   3056 
   3057   // Check for deleted property if property can actually be deleted.
   3058   if (!is_dont_delete) {
   3059     __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   3060     __ cmp(r4, ip);
   3061     __ b(eq, &miss);
   3062   }
   3063 
   3064   HandlerFrontendFooter(name, &success, &miss);
   3065   __ bind(&success);
   3066 
   3067   Counters* counters = isolate()->counters();
   3068   __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
   3069   __ mov(r0, r4);
   3070   __ Ret();
   3071 
   3072   // Return the generated code.
   3073   return GetICCode(kind(), Code::NORMAL, name);
   3074 }
   3075 
   3076 
   3077 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
   3078     MapHandleList* receiver_maps,
   3079     CodeHandleList* handlers,
   3080     Handle<Name> name,
   3081     Code::StubType type,
   3082     IcCheckType check) {
   3083   Label miss;
   3084 
   3085   if (check == PROPERTY) {
   3086     GenerateNameCheck(name, this->name(), &miss);
   3087   }
   3088 
   3089   __ JumpIfSmi(receiver(), &miss);
   3090   Register map_reg = scratch1();
   3091 
   3092   int receiver_count = receiver_maps->length();
   3093   int number_of_handled_maps = 0;
   3094   __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
   3095   for (int current = 0; current < receiver_count; ++current) {
   3096     Handle<Map> map = receiver_maps->at(current);
   3097     if (!map->is_deprecated()) {
   3098       number_of_handled_maps++;
   3099       __ mov(ip, Operand(receiver_maps->at(current)));
   3100       __ cmp(map_reg, ip);
   3101       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
   3102     }
   3103   }
   3104   ASSERT(number_of_handled_maps != 0);
   3105 
   3106   __ bind(&miss);
   3107   TailCallBuiltin(masm(), MissBuiltin(kind()));
   3108 
   3109   // Return the generated code.
   3110   InlineCacheState state =
   3111       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   3112   return GetICCode(kind(), type, name, state);
   3113 }
   3114 
   3115 
   3116 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
   3117     MapHandleList* receiver_maps,
   3118     CodeHandleList* handler_stubs,
   3119     MapHandleList* transitioned_maps) {
   3120   Label miss;
   3121   __ JumpIfSmi(receiver(), &miss);
   3122 
   3123   int receiver_count = receiver_maps->length();
   3124   __ ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
   3125   for (int i = 0; i < receiver_count; ++i) {
   3126     __ mov(ip, Operand(receiver_maps->at(i)));
   3127     __ cmp(scratch1(), ip);
   3128     if (transitioned_maps->at(i).is_null()) {
   3129       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
   3130     } else {
   3131       Label next_map;
   3132       __ b(ne, &next_map);
   3133       __ mov(transition_map(), Operand(transitioned_maps->at(i)));
   3134       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
   3135       __ bind(&next_map);
   3136     }
   3137   }
   3138 
   3139   __ bind(&miss);
   3140   TailCallBuiltin(masm(), MissBuiltin(kind()));
   3141 
   3142   // Return the generated code.
   3143   return GetICCode(
   3144       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
   3145 }
   3146 
   3147 
   3148 #undef __
   3149 #define __ ACCESS_MASM(masm)
   3150 
   3151 
   3152 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
   3153     MacroAssembler* masm) {
   3154   // ---------- S t a t e --------------
   3155   //  -- lr     : return address
   3156   //  -- r0     : key
   3157   //  -- r1     : receiver
   3158   // -----------------------------------
   3159   Label slow, miss_force_generic;
   3160 
   3161   Register key = r0;
   3162   Register receiver = r1;
   3163 
   3164   __ UntagAndJumpIfNotSmi(r2, key, &miss_force_generic);
   3165   __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
   3166   __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
   3167   __ Ret();
   3168 
   3169   __ bind(&slow);
   3170   __ IncrementCounter(
   3171       masm->isolate()->counters()->keyed_load_external_array_slow(),
   3172       1, r2, r3);
   3173 
   3174   // ---------- S t a t e --------------
   3175   //  -- lr     : return address
   3176   //  -- r0     : key
   3177   //  -- r1     : receiver
   3178   // -----------------------------------
   3179   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
   3180 
   3181   // Miss case, call the runtime.
   3182   __ bind(&miss_force_generic);
   3183 
   3184   // ---------- S t a t e --------------
   3185   //  -- lr     : return address
   3186   //  -- r0     : key
   3187   //  -- r1     : receiver
   3188   // -----------------------------------
   3189   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric);
   3190 }
   3191 
   3192 
   3193 static void GenerateSmiKeyCheck(MacroAssembler* masm,
   3194                                 Register key,
   3195                                 Register scratch0,
   3196                                 DwVfpRegister double_scratch0,
   3197                                 LowDwVfpRegister double_scratch1,
   3198                                 Label* fail) {
   3199   Label key_ok;
   3200   // Check for smi or a smi inside a heap number.  We convert the heap
   3201   // number and check if the conversion is exact and fits into the smi
   3202   // range.
   3203   __ JumpIfSmi(key, &key_ok);
   3204   __ CheckMap(key,
   3205               scratch0,
   3206               Heap::kHeapNumberMapRootIndex,
   3207               fail,
   3208               DONT_DO_SMI_CHECK);
   3209   __ sub(ip, key, Operand(kHeapObjectTag));
   3210   __ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
   3211   __ TryDoubleToInt32Exact(scratch0, double_scratch0, double_scratch1);
   3212   __ b(ne, fail);
   3213   __ TrySmiTag(key, scratch0, fail);
   3214   __ bind(&key_ok);
   3215 }
   3216 
   3217 
   3218 void KeyedStoreStubCompiler::GenerateStoreExternalArray(
   3219     MacroAssembler* masm,
   3220     ElementsKind elements_kind) {
   3221   // ---------- S t a t e --------------
   3222   //  -- r0     : value
   3223   //  -- r1     : key
   3224   //  -- r2     : receiver
   3225   //  -- lr     : return address
   3226   // -----------------------------------
   3227   Label slow, check_heap_number, miss_force_generic;
   3228 
   3229   // Register usage.
   3230   Register value = r0;
   3231   Register key = r1;
   3232   Register receiver = r2;
   3233   // r3 mostly holds the elements array or the destination external array.
   3234 
   3235   // This stub is meant to be tail-jumped to, the receiver must already
   3236   // have been verified by the caller to not be a smi.
   3237 
   3238   // Check that the key is a smi or a heap number convertible to a smi.
   3239   GenerateSmiKeyCheck(masm, key, r4, d1, d2, &miss_force_generic);
   3240 
   3241   __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
   3242 
   3243   // Check that the index is in range
   3244   __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
   3245   __ cmp(key, ip);
   3246   // Unsigned comparison catches both negative and too-large values.
   3247   __ b(hs, &miss_force_generic);
   3248 
   3249   // Handle both smis and HeapNumbers in the fast path. Go to the
   3250   // runtime for all other kinds of values.
   3251   // r3: external array.
   3252   if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
   3253     // Double to pixel conversion is only implemented in the runtime for now.
   3254     __ UntagAndJumpIfNotSmi(r5, value, &slow);
   3255   } else {
   3256     __ UntagAndJumpIfNotSmi(r5, value, &check_heap_number);
   3257   }
   3258   __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
   3259 
   3260   // r3: base pointer of external storage.
   3261   // r5: value (integer).
   3262   switch (elements_kind) {
   3263     case EXTERNAL_PIXEL_ELEMENTS:
   3264       // Clamp the value to [0..255].
   3265       __ Usat(r5, 8, Operand(r5));
   3266       __ strb(r5, MemOperand(r3, key, LSR, 1));
   3267       break;
   3268     case EXTERNAL_BYTE_ELEMENTS:
   3269     case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3270       __ strb(r5, MemOperand(r3, key, LSR, 1));
   3271       break;
   3272     case EXTERNAL_SHORT_ELEMENTS:
   3273     case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3274       __ strh(r5, MemOperand(r3, key, LSL, 0));
   3275       break;
   3276     case EXTERNAL_INT_ELEMENTS:
   3277     case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3278       __ str(r5, MemOperand(r3, key, LSL, 1));
   3279       break;
   3280     case EXTERNAL_FLOAT_ELEMENTS:
   3281       // Perform int-to-float conversion and store to memory.
   3282       __ SmiUntag(r4, key);
   3283       StoreIntAsFloat(masm, r3, r4, r5, r7);
   3284       break;
   3285     case EXTERNAL_DOUBLE_ELEMENTS:
   3286       __ vmov(s2, r5);
   3287       __ vcvt_f64_s32(d0, s2);
   3288       __ add(r3, r3, Operand(key, LSL, 2));
   3289       // r3: effective address of the double element
   3290       __ vstr(d0, r3, 0);
   3291       break;
   3292     case FAST_ELEMENTS:
   3293     case FAST_SMI_ELEMENTS:
   3294     case FAST_DOUBLE_ELEMENTS:
   3295     case FAST_HOLEY_ELEMENTS:
   3296     case FAST_HOLEY_SMI_ELEMENTS:
   3297     case FAST_HOLEY_DOUBLE_ELEMENTS:
   3298     case DICTIONARY_ELEMENTS:
   3299     case NON_STRICT_ARGUMENTS_ELEMENTS:
   3300       UNREACHABLE();
   3301       break;
   3302   }
   3303 
   3304   // Entry registers are intact, r0 holds the value which is the return value.
   3305   __ Ret();
   3306 
   3307   if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
   3308     // r3: external array.
   3309     __ bind(&check_heap_number);
   3310     __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
   3311     __ b(ne, &slow);
   3312 
   3313     __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
   3314 
   3315     // r3: base pointer of external storage.
   3316 
   3317     // The WebGL specification leaves the behavior of storing NaN and
   3318     // +/-Infinity into integer arrays basically undefined. For more
   3319     // reproducible behavior, convert these to zero.
   3320 
   3321     if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
   3322       // vldr requires offset to be a multiple of 4 so we can not
   3323       // include -kHeapObjectTag into it.
   3324       __ sub(r5, r0, Operand(kHeapObjectTag));
   3325       __ vldr(d0, r5, HeapNumber::kValueOffset);
   3326       __ add(r5, r3, Operand(key, LSL, 1));
   3327       __ vcvt_f32_f64(s0, d0);
   3328       __ vstr(s0, r5, 0);
   3329     } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
   3330       __ sub(r5, r0, Operand(kHeapObjectTag));
   3331       __ vldr(d0, r5, HeapNumber::kValueOffset);
   3332       __ add(r5, r3, Operand(key, LSL, 2));
   3333       __ vstr(d0, r5, 0);
   3334     } else {
   3335       // Hoisted load.  vldr requires offset to be a multiple of 4 so we can
   3336       // not include -kHeapObjectTag into it.
   3337       __ sub(r5, value, Operand(kHeapObjectTag));
   3338       __ vldr(d0, r5, HeapNumber::kValueOffset);
   3339       __ ECMAToInt32(r5, d0, r6, r7, r9, d1);
   3340 
   3341       switch (elements_kind) {
   3342         case EXTERNAL_BYTE_ELEMENTS:
   3343         case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3344           __ strb(r5, MemOperand(r3, key, LSR, 1));
   3345           break;
   3346         case EXTERNAL_SHORT_ELEMENTS:
   3347         case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3348           __ strh(r5, MemOperand(r3, key, LSL, 0));
   3349           break;
   3350         case EXTERNAL_INT_ELEMENTS:
   3351         case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3352           __ str(r5, MemOperand(r3, key, LSL, 1));
   3353           break;
   3354         case EXTERNAL_PIXEL_ELEMENTS:
   3355         case EXTERNAL_FLOAT_ELEMENTS:
   3356         case EXTERNAL_DOUBLE_ELEMENTS:
   3357         case FAST_ELEMENTS:
   3358         case FAST_SMI_ELEMENTS:
   3359         case FAST_DOUBLE_ELEMENTS:
   3360         case FAST_HOLEY_ELEMENTS:
   3361         case FAST_HOLEY_SMI_ELEMENTS:
   3362         case FAST_HOLEY_DOUBLE_ELEMENTS:
   3363         case DICTIONARY_ELEMENTS:
   3364         case NON_STRICT_ARGUMENTS_ELEMENTS:
   3365           UNREACHABLE();
   3366           break;
   3367       }
   3368     }
   3369 
   3370     // Entry registers are intact, r0 holds the value which is the return
   3371     // value.
   3372     __ Ret();
   3373   }
   3374 
   3375   // Slow case, key and receiver still in r0 and r1.
   3376   __ bind(&slow);
   3377   __ IncrementCounter(
   3378       masm->isolate()->counters()->keyed_load_external_array_slow(),
   3379       1, r2, r3);
   3380 
   3381   // ---------- S t a t e --------------
   3382   //  -- lr     : return address
   3383   //  -- r0     : key
   3384   //  -- r1     : receiver
   3385   // -----------------------------------
   3386   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
   3387 
   3388   // Miss case, call the runtime.
   3389   __ bind(&miss_force_generic);
   3390 
   3391   // ---------- S t a t e --------------
   3392   //  -- lr     : return address
   3393   //  -- r0     : key
   3394   //  -- r1     : receiver
   3395   // -----------------------------------
   3396   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_MissForceGeneric);
   3397 }
   3398 
   3399 
   3400 void KeyedStoreStubCompiler::GenerateStoreFastElement(
   3401     MacroAssembler* masm,
   3402     bool is_js_array,
   3403     ElementsKind elements_kind,
   3404     KeyedAccessStoreMode store_mode) {
   3405   // ----------- S t a t e -------------
   3406   //  -- r0    : value
   3407   //  -- r1    : key
   3408   //  -- r2    : receiver
   3409   //  -- lr    : return address
   3410   //  -- r3    : scratch
   3411   //  -- r4    : scratch (elements)
   3412   // -----------------------------------
   3413   Label miss_force_generic, transition_elements_kind, grow, slow;
   3414   Label finish_store, check_capacity;
   3415 
   3416   Register value_reg = r0;
   3417   Register key_reg = r1;
   3418   Register receiver_reg = r2;
   3419   Register scratch = r4;
   3420   Register elements_reg = r3;
   3421   Register length_reg = r5;
   3422   Register scratch2 = r6;
   3423 
   3424   // This stub is meant to be tail-jumped to, the receiver must already
   3425   // have been verified by the caller to not be a smi.
   3426 
   3427   // Check that the key is a smi or a heap number convertible to a smi.
   3428   GenerateSmiKeyCheck(masm, key_reg, r4, d1, d2, &miss_force_generic);
   3429 
   3430   if (IsFastSmiElementsKind(elements_kind)) {
   3431     __ JumpIfNotSmi(value_reg, &transition_elements_kind);
   3432   }
   3433 
   3434   // Check that the key is within bounds.
   3435   __ ldr(elements_reg,
   3436          FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3437   if (is_js_array) {
   3438     __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3439   } else {
   3440     __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3441   }
   3442   // Compare smis.
   3443   __ cmp(key_reg, scratch);
   3444   if (is_js_array && IsGrowStoreMode(store_mode)) {
   3445     __ b(hs, &grow);
   3446   } else {
   3447     __ b(hs, &miss_force_generic);
   3448   }
   3449 
   3450   // Make sure elements is a fast element array, not 'cow'.
   3451   __ CheckMap(elements_reg,
   3452               scratch,
   3453               Heap::kFixedArrayMapRootIndex,
   3454               &miss_force_generic,
   3455               DONT_DO_SMI_CHECK);
   3456 
   3457   __ bind(&finish_store);
   3458   if (IsFastSmiElementsKind(elements_kind)) {
   3459     __ add(scratch,
   3460            elements_reg,
   3461            Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3462     __ add(scratch, scratch, Operand::PointerOffsetFromSmiKey(key_reg));
   3463     __ str(value_reg, MemOperand(scratch));
   3464   } else {
   3465     ASSERT(IsFastObjectElementsKind(elements_kind));
   3466     __ add(scratch,
   3467            elements_reg,
   3468            Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3469     __ add(scratch, scratch, Operand::PointerOffsetFromSmiKey(key_reg));
   3470     __ str(value_reg, MemOperand(scratch));
   3471     __ mov(receiver_reg, value_reg);
   3472     __ RecordWrite(elements_reg,  // Object.
   3473                    scratch,       // Address.
   3474                    receiver_reg,  // Value.
   3475                    kLRHasNotBeenSaved,
   3476                    kDontSaveFPRegs);
   3477   }
   3478   // value_reg (r0) is preserved.
   3479   // Done.
   3480   __ Ret();
   3481 
   3482   __ bind(&miss_force_generic);
   3483   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_MissForceGeneric);
   3484 
   3485   __ bind(&transition_elements_kind);
   3486   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Miss);
   3487 
   3488   if (is_js_array && IsGrowStoreMode(store_mode)) {
   3489     // Grow the array by a single element if possible.
   3490     __ bind(&grow);
   3491 
   3492     // Make sure the array is only growing by a single element, anything else
   3493     // must be handled by the runtime. Flags already set by previous compare.
   3494     __ b(ne, &miss_force_generic);
   3495 
   3496     // Check for the empty array, and preallocate a small backing store if
   3497     // possible.
   3498     __ ldr(length_reg,
   3499            FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3500     __ ldr(elements_reg,
   3501            FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3502     __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
   3503     __ b(ne, &check_capacity);
   3504 
   3505     int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
   3506     __ Allocate(size, elements_reg, scratch, scratch2, &slow, TAG_OBJECT);
   3507 
   3508     __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
   3509     __ str(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
   3510     __ mov(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
   3511     __ str(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3512     __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
   3513     for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
   3514       __ str(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
   3515     }
   3516 
   3517     // Store the element at index zero.
   3518     __ str(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
   3519 
   3520     // Install the new backing store in the JSArray.
   3521     __ str(elements_reg,
   3522            FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3523     __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
   3524                         scratch, kLRHasNotBeenSaved, kDontSaveFPRegs,
   3525                         EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   3526 
   3527     // Increment the length of the array.
   3528     __ mov(length_reg, Operand(Smi::FromInt(1)));
   3529     __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3530     __ Ret();
   3531 
   3532     __ bind(&check_capacity);
   3533     // Check for cow elements, in general they are not handled by this stub
   3534     __ CheckMap(elements_reg,
   3535                 scratch,
   3536                 Heap::kFixedCOWArrayMapRootIndex,
   3537                 &miss_force_generic,
   3538                 DONT_DO_SMI_CHECK);
   3539 
   3540     __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3541     __ cmp(length_reg, scratch);
   3542     __ b(hs, &slow);
   3543 
   3544     // Grow the array and finish the store.
   3545     __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
   3546     __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3547     __ jmp(&finish_store);
   3548 
   3549     __ bind(&slow);
   3550     TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
   3551   }
   3552 }
   3553 
   3554 
   3555 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
   3556     MacroAssembler* masm,
   3557     bool is_js_array,
   3558     KeyedAccessStoreMode store_mode) {
   3559   // ----------- S t a t e -------------
   3560   //  -- r0    : value
   3561   //  -- r1    : key
   3562   //  -- r2    : receiver
   3563   //  -- lr    : return address
   3564   //  -- r3    : scratch (elements backing store)
   3565   //  -- r4    : scratch
   3566   //  -- r5    : scratch
   3567   // -----------------------------------
   3568   Label miss_force_generic, transition_elements_kind, grow, slow;
   3569   Label finish_store, check_capacity;
   3570 
   3571   Register value_reg = r0;
   3572   Register key_reg = r1;
   3573   Register receiver_reg = r2;
   3574   Register elements_reg = r3;
   3575   Register scratch1 = r4;
   3576   Register scratch2 = r5;
   3577   Register length_reg = r7;
   3578 
   3579   // This stub is meant to be tail-jumped to, the receiver must already
   3580   // have been verified by the caller to not be a smi.
   3581 
   3582   // Check that the key is a smi or a heap number convertible to a smi.
   3583   GenerateSmiKeyCheck(masm, key_reg, r4, d1, d2, &miss_force_generic);
   3584 
   3585   __ ldr(elements_reg,
   3586          FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3587 
   3588   // Check that the key is within bounds.
   3589   if (is_js_array) {
   3590     __ ldr(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3591   } else {
   3592     __ ldr(scratch1,
   3593            FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3594   }
   3595   // Compare smis, unsigned compare catches both negative and out-of-bound
   3596   // indexes.
   3597   __ cmp(key_reg, scratch1);
   3598   if (IsGrowStoreMode(store_mode)) {
   3599     __ b(hs, &grow);
   3600   } else {
   3601     __ b(hs, &miss_force_generic);
   3602   }
   3603 
   3604   __ bind(&finish_store);
   3605   __ StoreNumberToDoubleElements(value_reg, key_reg, elements_reg,
   3606                                  scratch1, d0, &transition_elements_kind);
   3607   __ Ret();
   3608 
   3609   // Handle store cache miss, replacing the ic with the generic stub.
   3610   __ bind(&miss_force_generic);
   3611   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_MissForceGeneric);
   3612 
   3613   __ bind(&transition_elements_kind);
   3614   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Miss);
   3615 
   3616   if (is_js_array && IsGrowStoreMode(store_mode)) {
   3617     // Grow the array by a single element if possible.
   3618     __ bind(&grow);
   3619 
   3620     // Make sure the array is only growing by a single element, anything else
   3621     // must be handled by the runtime. Flags already set by previous compare.
   3622     __ b(ne, &miss_force_generic);
   3623 
   3624     // Transition on values that can't be stored in a FixedDoubleArray.
   3625     Label value_is_smi;
   3626     __ JumpIfSmi(value_reg, &value_is_smi);
   3627     __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
   3628     __ CompareRoot(scratch1, Heap::kHeapNumberMapRootIndex);
   3629     __ b(ne, &transition_elements_kind);
   3630     __ bind(&value_is_smi);
   3631 
   3632     // Check for the empty array, and preallocate a small backing store if
   3633     // possible.
   3634     __ ldr(length_reg,
   3635            FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3636     __ ldr(elements_reg,
   3637            FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3638     __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
   3639     __ b(ne, &check_capacity);
   3640 
   3641     int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
   3642     __ Allocate(size, elements_reg, scratch1, scratch2, &slow, TAG_OBJECT);
   3643 
   3644     // Initialize the new FixedDoubleArray.
   3645     __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
   3646     __ str(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
   3647     __ mov(scratch1,
   3648            Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
   3649     __ str(scratch1,
   3650            FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
   3651 
   3652     __ mov(scratch1, elements_reg);
   3653     __ StoreNumberToDoubleElements(value_reg, key_reg, scratch1,
   3654                                    scratch2, d0, &transition_elements_kind);
   3655 
   3656     __ mov(scratch1, Operand(kHoleNanLower32));
   3657     __ mov(scratch2, Operand(kHoleNanUpper32));
   3658     for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) {
   3659       int offset = FixedDoubleArray::OffsetOfElementAt(i);
   3660       __ str(scratch1, FieldMemOperand(elements_reg, offset));
   3661       __ str(scratch2, FieldMemOperand(elements_reg, offset + kPointerSize));
   3662     }
   3663 
   3664     // Install the new backing store in the JSArray.
   3665     __ str(elements_reg,
   3666            FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3667     __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
   3668                         scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs,
   3669                         EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   3670 
   3671     // Increment the length of the array.
   3672     __ mov(length_reg, Operand(Smi::FromInt(1)));
   3673     __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3674     __ ldr(elements_reg,
   3675            FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3676     __ Ret();
   3677 
   3678     __ bind(&check_capacity);
   3679     // Make sure that the backing store can hold additional elements.
   3680     __ ldr(scratch1,
   3681            FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
   3682     __ cmp(length_reg, scratch1);
   3683     __ b(hs, &slow);
   3684 
   3685     // Grow the array and finish the store.
   3686     __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
   3687     __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3688     __ jmp(&finish_store);
   3689 
   3690     __ bind(&slow);
   3691     TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
   3692   }
   3693 }
   3694 
   3695 
   3696 #undef __
   3697 
   3698 } }  // namespace v8::internal
   3699 
   3700 #endif  // V8_TARGET_ARCH_ARM
   3701