Home | History | Annotate | Download | only in arm
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_ARM
     31 
     32 #include "ic-inl.h"
     33 #include "codegen.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 #define __ ACCESS_MASM(masm)
     40 
     41 
     42 static void ProbeTable(Isolate* isolate,
     43                        MacroAssembler* masm,
     44                        Code::Flags flags,
     45                        StubCache::Table table,
     46                        Register receiver,
     47                        Register name,
     48                        // Number of the cache entry, not scaled.
     49                        Register offset,
     50                        Register scratch,
     51                        Register scratch2,
     52                        Register offset_scratch) {
     53   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     54   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     55   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
     56 
     57   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
     58   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
     59   uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
     60 
     61   // Check the relative positions of the address fields.
     62   ASSERT(value_off_addr > key_off_addr);
     63   ASSERT((value_off_addr - key_off_addr) % 4 == 0);
     64   ASSERT((value_off_addr - key_off_addr) < (256 * 4));
     65   ASSERT(map_off_addr > key_off_addr);
     66   ASSERT((map_off_addr - key_off_addr) % 4 == 0);
     67   ASSERT((map_off_addr - key_off_addr) < (256 * 4));
     68 
     69   Label miss;
     70   Register base_addr = scratch;
     71   scratch = no_reg;
     72 
     73   // Multiply by 3 because there are 3 fields per entry (name, code, map).
     74   __ add(offset_scratch, offset, Operand(offset, LSL, 1));
     75 
     76   // Calculate the base address of the entry.
     77   __ mov(base_addr, Operand(key_offset));
     78   __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
     79 
     80   // Check that the key in the entry matches the name.
     81   __ ldr(ip, MemOperand(base_addr, 0));
     82   __ cmp(name, ip);
     83   __ b(ne, &miss);
     84 
     85   // Check the map matches.
     86   __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
     87   __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
     88   __ cmp(ip, scratch2);
     89   __ b(ne, &miss);
     90 
     91   // Get the code entry from the cache.
     92   Register code = scratch2;
     93   scratch2 = no_reg;
     94   __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
     95 
     96   // Check that the flags match what we're looking for.
     97   Register flags_reg = base_addr;
     98   base_addr = no_reg;
     99   __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
    100   // It's a nice optimization if this constant is encodable in the bic insn.
    101 
    102   uint32_t mask = Code::kFlagsNotUsedInLookup;
    103   ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
    104   __ bic(flags_reg, flags_reg, Operand(mask));
    105   __ cmp(flags_reg, Operand(flags));
    106   __ b(ne, &miss);
    107 
    108 #ifdef DEBUG
    109     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
    110       __ jmp(&miss);
    111     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
    112       __ jmp(&miss);
    113     }
    114 #endif
    115 
    116   // Jump to the first instruction in the code stub.
    117   __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
    118 
    119   // Miss: fall through.
    120   __ bind(&miss);
    121 }
    122 
    123 
    124 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
    125                                                     Label* miss_label,
    126                                                     Register receiver,
    127                                                     Handle<Name> name,
    128                                                     Register scratch0,
    129                                                     Register scratch1) {
    130   ASSERT(name->IsUniqueName());
    131   ASSERT(!receiver.is(scratch0));
    132   Counters* counters = masm->isolate()->counters();
    133   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
    134   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    135 
    136   Label done;
    137 
    138   const int kInterceptorOrAccessCheckNeededMask =
    139       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    140 
    141   // Bail out if the receiver has a named interceptor or requires access checks.
    142   Register map = scratch1;
    143   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
    144   __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
    145   __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
    146   __ b(ne, miss_label);
    147 
    148   // Check that receiver is a JSObject.
    149   __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
    150   __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
    151   __ b(lt, miss_label);
    152 
    153   // Load properties array.
    154   Register properties = scratch0;
    155   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    156   // Check that the properties array is a dictionary.
    157   __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
    158   Register tmp = properties;
    159   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
    160   __ cmp(map, tmp);
    161   __ b(ne, miss_label);
    162 
    163   // Restore the temporarily used register.
    164   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    165 
    166 
    167   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
    168                                                    miss_label,
    169                                                    &done,
    170                                                    receiver,
    171                                                    properties,
    172                                                    name,
    173                                                    scratch1);
    174   __ bind(&done);
    175   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    176 }
    177 
    178 
    179 void StubCache::GenerateProbe(MacroAssembler* masm,
    180                               Code::Flags flags,
    181                               Register receiver,
    182                               Register name,
    183                               Register scratch,
    184                               Register extra,
    185                               Register extra2,
    186                               Register extra3) {
    187   Isolate* isolate = masm->isolate();
    188   Label miss;
    189 
    190   // Make sure that code is valid. The multiplying code relies on the
    191   // entry size being 12.
    192   ASSERT(sizeof(Entry) == 12);
    193 
    194   // Make sure the flags does not name a specific type.
    195   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    196 
    197   // Make sure that there are no register conflicts.
    198   ASSERT(!scratch.is(receiver));
    199   ASSERT(!scratch.is(name));
    200   ASSERT(!extra.is(receiver));
    201   ASSERT(!extra.is(name));
    202   ASSERT(!extra.is(scratch));
    203   ASSERT(!extra2.is(receiver));
    204   ASSERT(!extra2.is(name));
    205   ASSERT(!extra2.is(scratch));
    206   ASSERT(!extra2.is(extra));
    207 
    208   // Check scratch, extra and extra2 registers are valid.
    209   ASSERT(!scratch.is(no_reg));
    210   ASSERT(!extra.is(no_reg));
    211   ASSERT(!extra2.is(no_reg));
    212   ASSERT(!extra3.is(no_reg));
    213 
    214   Counters* counters = masm->isolate()->counters();
    215   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
    216                       extra2, extra3);
    217 
    218   // Check that the receiver isn't a smi.
    219   __ JumpIfSmi(receiver, &miss);
    220 
    221   // Get the map of the receiver and compute the hash.
    222   __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
    223   __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
    224   __ add(scratch, scratch, Operand(ip));
    225   uint32_t mask = kPrimaryTableSize - 1;
    226   // We shift out the last two bits because they are not part of the hash and
    227   // they are always 01 for maps.
    228   __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
    229   // Mask down the eor argument to the minimum to keep the immediate
    230   // ARM-encodable.
    231   __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
    232   // Prefer and_ to ubfx here because ubfx takes 2 cycles.
    233   __ and_(scratch, scratch, Operand(mask));
    234 
    235   // Probe the primary table.
    236   ProbeTable(isolate,
    237              masm,
    238              flags,
    239              kPrimary,
    240              receiver,
    241              name,
    242              scratch,
    243              extra,
    244              extra2,
    245              extra3);
    246 
    247   // Primary miss: Compute hash for secondary probe.
    248   __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
    249   uint32_t mask2 = kSecondaryTableSize - 1;
    250   __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
    251   __ and_(scratch, scratch, Operand(mask2));
    252 
    253   // Probe the secondary table.
    254   ProbeTable(isolate,
    255              masm,
    256              flags,
    257              kSecondary,
    258              receiver,
    259              name,
    260              scratch,
    261              extra,
    262              extra2,
    263              extra3);
    264 
    265   // Cache miss: Fall-through and let caller handle the miss by
    266   // entering the runtime system.
    267   __ bind(&miss);
    268   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
    269                       extra2, extra3);
    270 }
    271 
    272 
    273 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    274                                                        int index,
    275                                                        Register prototype) {
    276   // Load the global or builtins object from the current context.
    277   __ ldr(prototype,
    278          MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    279   // Load the native context from the global or builtins object.
    280   __ ldr(prototype,
    281          FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
    282   // Load the function from the native context.
    283   __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
    284   // Load the initial map.  The global functions all have initial maps.
    285   __ ldr(prototype,
    286          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
    287   // Load the prototype from the initial map.
    288   __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    289 }
    290 
    291 
    292 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    293     MacroAssembler* masm,
    294     int index,
    295     Register prototype,
    296     Label* miss) {
    297   Isolate* isolate = masm->isolate();
    298   // Check we're still in the same context.
    299   __ ldr(prototype,
    300          MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    301   __ Move(ip, isolate->global_object());
    302   __ cmp(prototype, ip);
    303   __ b(ne, miss);
    304   // Get the global function with the given index.
    305   Handle<JSFunction> function(
    306       JSFunction::cast(isolate->native_context()->get(index)));
    307   // Load its initial map. The global functions all have initial maps.
    308   __ Move(prototype, Handle<Map>(function->initial_map()));
    309   // Load the prototype from the initial map.
    310   __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    311 }
    312 
    313 
    314 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    315                                             Register dst,
    316                                             Register src,
    317                                             bool inobject,
    318                                             int index,
    319                                             Representation representation) {
    320   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
    321   int offset = index * kPointerSize;
    322   if (!inobject) {
    323     // Calculate the offset into the properties array.
    324     offset = offset + FixedArray::kHeaderSize;
    325     __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
    326     src = dst;
    327   }
    328   __ ldr(dst, FieldMemOperand(src, offset));
    329 }
    330 
    331 
    332 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    333                                            Register receiver,
    334                                            Register scratch,
    335                                            Label* miss_label) {
    336   // Check that the receiver isn't a smi.
    337   __ JumpIfSmi(receiver, miss_label);
    338 
    339   // Check that the object is a JS array.
    340   __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
    341   __ b(ne, miss_label);
    342 
    343   // Load length directly from the JS array.
    344   __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
    345   __ Ret();
    346 }
    347 
    348 
    349 // Generate code to check if an object is a string.  If the object is a
    350 // heap object, its map's instance type is left in the scratch1 register.
    351 // If this is not needed, scratch1 and scratch2 may be the same register.
    352 static void GenerateStringCheck(MacroAssembler* masm,
    353                                 Register receiver,
    354                                 Register scratch1,
    355                                 Register scratch2,
    356                                 Label* smi,
    357                                 Label* non_string_object) {
    358   // Check that the receiver isn't a smi.
    359   __ JumpIfSmi(receiver, smi);
    360 
    361   // Check that the object is a string.
    362   __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
    363   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
    364   __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
    365   // The cast is to resolve the overload for the argument of 0x0.
    366   __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
    367   __ b(ne, non_string_object);
    368 }
    369 
    370 
    371 // Generate code to load the length from a string object and return the length.
    372 // If the receiver object is not a string or a wrapped string object the
    373 // execution continues at the miss label. The register containing the
    374 // receiver is potentially clobbered.
    375 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
    376                                             Register receiver,
    377                                             Register scratch1,
    378                                             Register scratch2,
    379                                             Label* miss) {
    380   Label check_wrapper;
    381 
    382   // Check if the object is a string leaving the instance type in the
    383   // scratch1 register.
    384   GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
    385 
    386   // Load length directly from the string.
    387   __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
    388   __ Ret();
    389 
    390   // Check if the object is a JSValue wrapper.
    391   __ bind(&check_wrapper);
    392   __ cmp(scratch1, Operand(JS_VALUE_TYPE));
    393   __ b(ne, miss);
    394 
    395   // Unwrap the value and check if the wrapped value is a string.
    396   __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
    397   GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
    398   __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
    399   __ Ret();
    400 }
    401 
    402 
    403 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    404                                                  Register receiver,
    405                                                  Register scratch1,
    406                                                  Register scratch2,
    407                                                  Label* miss_label) {
    408   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    409   __ mov(r0, scratch1);
    410   __ Ret();
    411 }
    412 
    413 
    414 // Generate code to check that a global property cell is empty. Create
    415 // the property cell at compilation time if no cell exists for the
    416 // property.
    417 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
    418                                              Handle<JSGlobalObject> global,
    419                                              Handle<Name> name,
    420                                              Register scratch,
    421                                              Label* miss) {
    422   Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
    423   ASSERT(cell->value()->IsTheHole());
    424   __ mov(scratch, Operand(cell));
    425   __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
    426   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    427   __ cmp(scratch, ip);
    428   __ b(ne, miss);
    429 }
    430 
    431 
    432 void StoreStubCompiler::GenerateNegativeHolderLookup(
    433     MacroAssembler* masm,
    434     Handle<JSObject> holder,
    435     Register holder_reg,
    436     Handle<Name> name,
    437     Label* miss) {
    438   if (holder->IsJSGlobalObject()) {
    439     GenerateCheckPropertyCell(
    440         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
    441   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
    442     GenerateDictionaryNegativeLookup(
    443         masm, miss, holder_reg, name, scratch1(), scratch2());
    444   }
    445 }
    446 
    447 
    448 // Generate StoreTransition code, value is passed in r0 register.
    449 // When leaving generated code after success, the receiver_reg and name_reg
    450 // may be clobbered.  Upon branch to miss_label, the receiver and name
    451 // registers have their original values.
    452 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
    453                                                 Handle<JSObject> object,
    454                                                 LookupResult* lookup,
    455                                                 Handle<Map> transition,
    456                                                 Handle<Name> name,
    457                                                 Register receiver_reg,
    458                                                 Register storage_reg,
    459                                                 Register value_reg,
    460                                                 Register scratch1,
    461                                                 Register scratch2,
    462                                                 Register scratch3,
    463                                                 Label* miss_label,
    464                                                 Label* slow) {
    465   // r0 : value
    466   Label exit;
    467 
    468   int descriptor = transition->LastAdded();
    469   DescriptorArray* descriptors = transition->instance_descriptors();
    470   PropertyDetails details = descriptors->GetDetails(descriptor);
    471   Representation representation = details.representation();
    472   ASSERT(!representation.IsNone());
    473 
    474   if (details.type() == CONSTANT) {
    475     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
    476     __ Move(scratch1, constant);
    477     __ cmp(value_reg, scratch1);
    478     __ b(ne, miss_label);
    479   } else if (FLAG_track_fields && representation.IsSmi()) {
    480     __ JumpIfNotSmi(value_reg, miss_label);
    481   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    482     __ JumpIfSmi(value_reg, miss_label);
    483   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    484     Label do_store, heap_number;
    485     __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
    486     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
    487 
    488     __ JumpIfNotSmi(value_reg, &heap_number);
    489     __ SmiUntag(scratch1, value_reg);
    490     __ vmov(s0, scratch1);
    491     __ vcvt_f64_s32(d0, s0);
    492     __ jmp(&do_store);
    493 
    494     __ bind(&heap_number);
    495     __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
    496                 miss_label, DONT_DO_SMI_CHECK);
    497     __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
    498 
    499     __ bind(&do_store);
    500     __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
    501   }
    502 
    503   // Stub never generated for non-global objects that require access
    504   // checks.
    505   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    506 
    507   // Perform map transition for the receiver if necessary.
    508   if (details.type() == FIELD &&
    509       object->map()->unused_property_fields() == 0) {
    510     // The properties must be extended before we can store the value.
    511     // We jump to a runtime call that extends the properties array.
    512     __ push(receiver_reg);
    513     __ mov(r2, Operand(transition));
    514     __ Push(r2, r0);
    515     __ TailCallExternalReference(
    516         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    517                           masm->isolate()),
    518         3,
    519         1);
    520     return;
    521   }
    522 
    523   // Update the map of the object.
    524   __ mov(scratch1, Operand(transition));
    525   __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
    526 
    527   // Update the write barrier for the map field.
    528   __ RecordWriteField(receiver_reg,
    529                       HeapObject::kMapOffset,
    530                       scratch1,
    531                       scratch2,
    532                       kLRHasNotBeenSaved,
    533                       kDontSaveFPRegs,
    534                       OMIT_REMEMBERED_SET,
    535                       OMIT_SMI_CHECK);
    536 
    537   if (details.type() == CONSTANT) {
    538     ASSERT(value_reg.is(r0));
    539     __ Ret();
    540     return;
    541   }
    542 
    543   int index = transition->instance_descriptors()->GetFieldIndex(
    544       transition->LastAdded());
    545 
    546   // Adjust for the number of properties stored in the object. Even in the
    547   // face of a transition we can use the old map here because the size of the
    548   // object and the number of in-object properties is not going to change.
    549   index -= object->map()->inobject_properties();
    550 
    551   // TODO(verwaest): Share this code as a code stub.
    552   SmiCheck smi_check = representation.IsTagged()
    553       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    554   if (index < 0) {
    555     // Set the property straight into the object.
    556     int offset = object->map()->instance_size() + (index * kPointerSize);
    557     if (FLAG_track_double_fields && representation.IsDouble()) {
    558       __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
    559     } else {
    560       __ str(value_reg, FieldMemOperand(receiver_reg, offset));
    561     }
    562 
    563     if (!FLAG_track_fields || !representation.IsSmi()) {
    564       // Update the write barrier for the array address.
    565       if (!FLAG_track_double_fields || !representation.IsDouble()) {
    566         __ mov(storage_reg, value_reg);
    567       }
    568       __ RecordWriteField(receiver_reg,
    569                           offset,
    570                           storage_reg,
    571                           scratch1,
    572                           kLRHasNotBeenSaved,
    573                           kDontSaveFPRegs,
    574                           EMIT_REMEMBERED_SET,
    575                           smi_check);
    576     }
    577   } else {
    578     // Write to the properties array.
    579     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    580     // Get the properties array
    581     __ ldr(scratch1,
    582            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    583     if (FLAG_track_double_fields && representation.IsDouble()) {
    584       __ str(storage_reg, FieldMemOperand(scratch1, offset));
    585     } else {
    586       __ str(value_reg, FieldMemOperand(scratch1, offset));
    587     }
    588 
    589     if (!FLAG_track_fields || !representation.IsSmi()) {
    590       // Update the write barrier for the array address.
    591       if (!FLAG_track_double_fields || !representation.IsDouble()) {
    592         __ mov(storage_reg, value_reg);
    593       }
    594       __ RecordWriteField(scratch1,
    595                           offset,
    596                           storage_reg,
    597                           receiver_reg,
    598                           kLRHasNotBeenSaved,
    599                           kDontSaveFPRegs,
    600                           EMIT_REMEMBERED_SET,
    601                           smi_check);
    602     }
    603   }
    604 
    605   // Return the value (register r0).
    606   ASSERT(value_reg.is(r0));
    607   __ bind(&exit);
    608   __ Ret();
    609 }
    610 
    611 
    612 // Generate StoreField code, value is passed in r0 register.
    613 // When leaving generated code after success, the receiver_reg and name_reg
    614 // may be clobbered.  Upon branch to miss_label, the receiver and name
    615 // registers have their original values.
    616 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
    617                                            Handle<JSObject> object,
    618                                            LookupResult* lookup,
    619                                            Register receiver_reg,
    620                                            Register name_reg,
    621                                            Register value_reg,
    622                                            Register scratch1,
    623                                            Register scratch2,
    624                                            Label* miss_label) {
    625   // r0 : value
    626   Label exit;
    627 
    628   // Stub never generated for non-global objects that require access
    629   // checks.
    630   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    631 
    632   int index = lookup->GetFieldIndex().field_index();
    633 
    634   // Adjust for the number of properties stored in the object. Even in the
    635   // face of a transition we can use the old map here because the size of the
    636   // object and the number of in-object properties is not going to change.
    637   index -= object->map()->inobject_properties();
    638 
    639   Representation representation = lookup->representation();
    640   ASSERT(!representation.IsNone());
    641   if (FLAG_track_fields && representation.IsSmi()) {
    642     __ JumpIfNotSmi(value_reg, miss_label);
    643   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    644     __ JumpIfSmi(value_reg, miss_label);
    645   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    646     // Load the double storage.
    647     if (index < 0) {
    648       int offset = object->map()->instance_size() + (index * kPointerSize);
    649       __ ldr(scratch1, FieldMemOperand(receiver_reg, offset));
    650     } else {
    651       __ ldr(scratch1,
    652              FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    653       int offset = index * kPointerSize + FixedArray::kHeaderSize;
    654       __ ldr(scratch1, FieldMemOperand(scratch1, offset));
    655     }
    656 
    657     // Store the value into the storage.
    658     Label do_store, heap_number;
    659     __ JumpIfNotSmi(value_reg, &heap_number);
    660     __ SmiUntag(scratch2, value_reg);
    661     __ vmov(s0, scratch2);
    662     __ vcvt_f64_s32(d0, s0);
    663     __ jmp(&do_store);
    664 
    665     __ bind(&heap_number);
    666     __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
    667                 miss_label, DONT_DO_SMI_CHECK);
    668     __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
    669 
    670     __ bind(&do_store);
    671     __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
    672     // Return the value (register r0).
    673     ASSERT(value_reg.is(r0));
    674     __ Ret();
    675     return;
    676   }
    677 
    678   // TODO(verwaest): Share this code as a code stub.
    679   SmiCheck smi_check = representation.IsTagged()
    680       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    681   if (index < 0) {
    682     // Set the property straight into the object.
    683     int offset = object->map()->instance_size() + (index * kPointerSize);
    684     __ str(value_reg, FieldMemOperand(receiver_reg, offset));
    685 
    686     if (!FLAG_track_fields || !representation.IsSmi()) {
    687       // Skip updating write barrier if storing a smi.
    688       __ JumpIfSmi(value_reg, &exit);
    689 
    690       // Update the write barrier for the array address.
    691       // Pass the now unused name_reg as a scratch register.
    692       __ mov(name_reg, value_reg);
    693       __ RecordWriteField(receiver_reg,
    694                           offset,
    695                           name_reg,
    696                           scratch1,
    697                           kLRHasNotBeenSaved,
    698                           kDontSaveFPRegs,
    699                           EMIT_REMEMBERED_SET,
    700                           smi_check);
    701     }
    702   } else {
    703     // Write to the properties array.
    704     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    705     // Get the properties array
    706     __ ldr(scratch1,
    707            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    708     __ str(value_reg, FieldMemOperand(scratch1, offset));
    709 
    710     if (!FLAG_track_fields || !representation.IsSmi()) {
    711       // Skip updating write barrier if storing a smi.
    712       __ JumpIfSmi(value_reg, &exit);
    713 
    714       // Update the write barrier for the array address.
    715       // Ok to clobber receiver_reg and name_reg, since we return.
    716       __ mov(name_reg, value_reg);
    717       __ RecordWriteField(scratch1,
    718                           offset,
    719                           name_reg,
    720                           receiver_reg,
    721                           kLRHasNotBeenSaved,
    722                           kDontSaveFPRegs,
    723                           EMIT_REMEMBERED_SET,
    724                           smi_check);
    725     }
    726   }
    727 
    728   // Return the value (register r0).
    729   ASSERT(value_reg.is(r0));
    730   __ bind(&exit);
    731   __ Ret();
    732 }
    733 
    734 
    735 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
    736                                             Label* label,
    737                                             Handle<Name> name) {
    738   if (!label->is_unused()) {
    739     __ bind(label);
    740     __ mov(this->name(), Operand(name));
    741   }
    742 }
    743 
    744 
    745 static void PushInterceptorArguments(MacroAssembler* masm,
    746                                      Register receiver,
    747                                      Register holder,
    748                                      Register name,
    749                                      Handle<JSObject> holder_obj) {
    750   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
    751   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
    752   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
    753   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
    754   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
    755   __ push(name);
    756   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
    757   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
    758   Register scratch = name;
    759   __ mov(scratch, Operand(interceptor));
    760   __ push(scratch);
    761   __ push(receiver);
    762   __ push(holder);
    763 }
    764 
    765 
    766 static void CompileCallLoadPropertyWithInterceptor(
    767     MacroAssembler* masm,
    768     Register receiver,
    769     Register holder,
    770     Register name,
    771     Handle<JSObject> holder_obj,
    772     IC::UtilityId id) {
    773   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    774   __ CallExternalReference(
    775       ExternalReference(IC_Utility(id), masm->isolate()),
    776       StubCache::kInterceptorArgsLength);
    777 }
    778 
    779 
    780 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
    781 
    782 // Reserves space for the extra arguments to API function in the
    783 // caller's frame.
    784 //
    785 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
    786 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
    787                                        Register scratch) {
    788   __ mov(scratch, Operand(Smi::FromInt(0)));
    789   for (int i = 0; i < kFastApiCallArguments; i++) {
    790     __ push(scratch);
    791   }
    792 }
    793 
    794 
    795 // Undoes the effects of ReserveSpaceForFastApiCall.
    796 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
    797   __ Drop(kFastApiCallArguments);
    798 }
    799 
    800 
    801 static void GenerateFastApiDirectCall(MacroAssembler* masm,
    802                                       const CallOptimization& optimization,
    803                                       int argc,
    804                                       bool restore_context) {
    805   // ----------- S t a t e -------------
    806   //  -- sp[0] - sp[24]     : FunctionCallbackInfo, incl.
    807   //                        :  holder (set by CheckPrototypes)
    808   //  -- sp[28]             : last JS argument
    809   //  -- ...
    810   //  -- sp[(argc + 6) * 4] : first JS argument
    811   //  -- sp[(argc + 7) * 4] : receiver
    812   // -----------------------------------
    813   typedef FunctionCallbackArguments FCA;
    814   // Save calling context.
    815   __ str(cp, MemOperand(sp, FCA::kContextSaveIndex * kPointerSize));
    816   // Get the function and setup the context.
    817   Handle<JSFunction> function = optimization.constant_function();
    818   __ Move(r5, function);
    819   __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
    820   __ str(r5, MemOperand(sp, FCA::kCalleeIndex * kPointerSize));
    821 
    822   // Construct the FunctionCallbackInfo.
    823   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    824   Handle<Object> call_data(api_call_info->data(), masm->isolate());
    825   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
    826     __ Move(r0, api_call_info);
    827     __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
    828   } else {
    829     __ Move(r6, call_data);
    830   }
    831   // Store call data.
    832   __ str(r6, MemOperand(sp, FCA::kDataIndex * kPointerSize));
    833   // Store isolate.
    834   __ mov(r5, Operand(ExternalReference::isolate_address(masm->isolate())));
    835   __ str(r5, MemOperand(sp, FCA::kIsolateIndex * kPointerSize));
    836   // Store ReturnValue default and ReturnValue.
    837   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
    838   __ str(r5, MemOperand(sp, FCA::kReturnValueOffset * kPointerSize));
    839   __ str(r5, MemOperand(sp, FCA::kReturnValueDefaultValueIndex * kPointerSize));
    840 
    841   // Prepare arguments.
    842   __ mov(r2, sp);
    843 
    844   // Allocate the v8::Arguments structure in the arguments' space since
    845   // it's not controlled by GC.
    846   const int kApiStackSpace = 4;
    847 
    848   FrameScope frame_scope(masm, StackFrame::MANUAL);
    849   __ EnterExitFrame(false, kApiStackSpace);
    850 
    851   // r0 = FunctionCallbackInfo&
    852   // Arguments is after the return address.
    853   __ add(r0, sp, Operand(1 * kPointerSize));
    854   // FunctionCallbackInfo::implicit_args_
    855   __ str(r2, MemOperand(r0, 0 * kPointerSize));
    856   // FunctionCallbackInfo::values_
    857   __ add(ip, r2, Operand((kFastApiCallArguments - 1 + argc) * kPointerSize));
    858   __ str(ip, MemOperand(r0, 1 * kPointerSize));
    859   // FunctionCallbackInfo::length_ = argc
    860   __ mov(ip, Operand(argc));
    861   __ str(ip, MemOperand(r0, 2 * kPointerSize));
    862   // FunctionCallbackInfo::is_construct_call = 0
    863   __ mov(ip, Operand::Zero());
    864   __ str(ip, MemOperand(r0, 3 * kPointerSize));
    865 
    866   const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
    867   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    868   ApiFunction fun(function_address);
    869   ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
    870   ExternalReference ref = ExternalReference(&fun,
    871                                             type,
    872                                             masm->isolate());
    873   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
    874   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
    875   ApiFunction thunk_fun(thunk_address);
    876   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
    877       masm->isolate());
    878 
    879   AllowExternalCallThatCantCauseGC scope(masm);
    880   MemOperand context_restore_operand(
    881       fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
    882   MemOperand return_value_operand(fp,
    883                                   (2 + FCA::kReturnValueOffset) * kPointerSize);
    884 
    885   __ CallApiFunctionAndReturn(ref,
    886                               function_address,
    887                               thunk_ref,
    888                               r1,
    889                               kStackUnwindSpace,
    890                               return_value_operand,
    891                               restore_context ?
    892                                   &context_restore_operand : NULL);
    893 }
    894 
    895 
    896 // Generate call to api function.
    897 static void GenerateFastApiCall(MacroAssembler* masm,
    898                                 const CallOptimization& optimization,
    899                                 Register receiver,
    900                                 Register scratch,
    901                                 int argc,
    902                                 Register* values) {
    903   ASSERT(optimization.is_simple_api_call());
    904   ASSERT(!receiver.is(scratch));
    905 
    906   typedef FunctionCallbackArguments FCA;
    907   const int stack_space = kFastApiCallArguments + argc + 1;
    908   // Assign stack space for the call arguments.
    909   __ sub(sp, sp, Operand(stack_space * kPointerSize));
    910   // Write holder to stack frame.
    911   __ str(receiver, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
    912   // Write receiver to stack frame.
    913   int index = stack_space - 1;
    914   __ str(receiver, MemOperand(sp, index * kPointerSize));
    915   // Write the arguments to stack frame.
    916   for (int i = 0; i < argc; i++) {
    917     ASSERT(!receiver.is(values[i]));
    918     ASSERT(!scratch.is(values[i]));
    919     __ str(receiver, MemOperand(sp, index-- * kPointerSize));
    920   }
    921 
    922   GenerateFastApiDirectCall(masm, optimization, argc, true);
    923 }
    924 
    925 
    926 class CallInterceptorCompiler BASE_EMBEDDED {
    927  public:
    928   CallInterceptorCompiler(CallStubCompiler* stub_compiler,
    929                           const ParameterCount& arguments,
    930                           Register name,
    931                           ExtraICState extra_ic_state)
    932       : stub_compiler_(stub_compiler),
    933         arguments_(arguments),
    934         name_(name),
    935         extra_ic_state_(extra_ic_state) {}
    936 
    937   void Compile(MacroAssembler* masm,
    938                Handle<JSObject> object,
    939                Handle<JSObject> holder,
    940                Handle<Name> name,
    941                LookupResult* lookup,
    942                Register receiver,
    943                Register scratch1,
    944                Register scratch2,
    945                Register scratch3,
    946                Label* miss) {
    947     ASSERT(holder->HasNamedInterceptor());
    948     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
    949 
    950     // Check that the receiver isn't a smi.
    951     __ JumpIfSmi(receiver, miss);
    952     CallOptimization optimization(lookup);
    953     if (optimization.is_constant_call()) {
    954       CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
    955                        holder, lookup, name, optimization, miss);
    956     } else {
    957       CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
    958                      name, holder, miss);
    959     }
    960   }
    961 
    962  private:
    963   void CompileCacheable(MacroAssembler* masm,
    964                         Handle<JSObject> object,
    965                         Register receiver,
    966                         Register scratch1,
    967                         Register scratch2,
    968                         Register scratch3,
    969                         Handle<JSObject> interceptor_holder,
    970                         LookupResult* lookup,
    971                         Handle<Name> name,
    972                         const CallOptimization& optimization,
    973                         Label* miss_label) {
    974     ASSERT(optimization.is_constant_call());
    975     ASSERT(!lookup->holder()->IsGlobalObject());
    976     Counters* counters = masm->isolate()->counters();
    977     int depth1 = kInvalidProtoDepth;
    978     int depth2 = kInvalidProtoDepth;
    979     bool can_do_fast_api_call = false;
    980     if (optimization.is_simple_api_call() &&
    981         !lookup->holder()->IsGlobalObject()) {
    982       depth1 = optimization.GetPrototypeDepthOfExpectedType(
    983           object, interceptor_holder);
    984       if (depth1 == kInvalidProtoDepth) {
    985         depth2 = optimization.GetPrototypeDepthOfExpectedType(
    986             interceptor_holder, Handle<JSObject>(lookup->holder()));
    987       }
    988       can_do_fast_api_call =
    989           depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
    990     }
    991 
    992     __ IncrementCounter(counters->call_const_interceptor(), 1,
    993                         scratch1, scratch2);
    994 
    995     if (can_do_fast_api_call) {
    996       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
    997                           scratch1, scratch2);
    998       ReserveSpaceForFastApiCall(masm, scratch1);
    999     }
   1000 
   1001     // Check that the maps from receiver to interceptor's holder
   1002     // haven't changed and thus we can invoke interceptor.
   1003     Label miss_cleanup;
   1004     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
   1005     Register holder =
   1006         stub_compiler_->CheckPrototypes(
   1007             IC::CurrentTypeOf(object, masm->isolate()), receiver,
   1008             interceptor_holder, scratch1, scratch2, scratch3,
   1009             name, depth1, miss);
   1010 
   1011     // Invoke an interceptor and if it provides a value,
   1012     // branch to |regular_invoke|.
   1013     Label regular_invoke;
   1014     LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
   1015                         &regular_invoke);
   1016 
   1017     // Interceptor returned nothing for this property.  Try to use cached
   1018     // constant function.
   1019 
   1020     // Check that the maps from interceptor's holder to constant function's
   1021     // holder haven't changed and thus we can use cached constant function.
   1022     if (*interceptor_holder != lookup->holder()) {
   1023       stub_compiler_->CheckPrototypes(
   1024           IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder,
   1025           handle(lookup->holder()), scratch1, scratch2, scratch3,
   1026           name, depth2, miss);
   1027     } else {
   1028       // CheckPrototypes has a side effect of fetching a 'holder'
   1029       // for API (object which is instanceof for the signature).  It's
   1030       // safe to omit it here, as if present, it should be fetched
   1031       // by the previous CheckPrototypes.
   1032       ASSERT(depth2 == kInvalidProtoDepth);
   1033     }
   1034 
   1035     // Invoke function.
   1036     if (can_do_fast_api_call) {
   1037       GenerateFastApiDirectCall(
   1038           masm, optimization, arguments_.immediate(), false);
   1039     } else {
   1040       Handle<JSFunction> function = optimization.constant_function();
   1041       __ Move(r0, receiver);
   1042       stub_compiler_->GenerateJumpFunction(object, function);
   1043     }
   1044 
   1045     // Deferred code for fast API call case---clean preallocated space.
   1046     if (can_do_fast_api_call) {
   1047       __ bind(&miss_cleanup);
   1048       FreeSpaceForFastApiCall(masm);
   1049       __ b(miss_label);
   1050     }
   1051 
   1052     // Invoke a regular function.
   1053     __ bind(&regular_invoke);
   1054     if (can_do_fast_api_call) {
   1055       FreeSpaceForFastApiCall(masm);
   1056     }
   1057   }
   1058 
   1059   void CompileRegular(MacroAssembler* masm,
   1060                       Handle<JSObject> object,
   1061                       Register receiver,
   1062                       Register scratch1,
   1063                       Register scratch2,
   1064                       Register scratch3,
   1065                       Handle<Name> name,
   1066                       Handle<JSObject> interceptor_holder,
   1067                       Label* miss_label) {
   1068     Register holder =
   1069         stub_compiler_->CheckPrototypes(
   1070             IC::CurrentTypeOf(object, masm->isolate()), receiver,
   1071             interceptor_holder, scratch1, scratch2, scratch3, name, miss_label);
   1072 
   1073     // Call a runtime function to load the interceptor property.
   1074     FrameScope scope(masm, StackFrame::INTERNAL);
   1075     // Save the name_ register across the call.
   1076     __ push(name_);
   1077 
   1078     CompileCallLoadPropertyWithInterceptor(
   1079         masm, receiver, holder, name_, interceptor_holder,
   1080         IC::kLoadPropertyWithInterceptorForCall);
   1081 
   1082     // Restore the name_ register.
   1083     __ pop(name_);
   1084     // Leave the internal frame.
   1085   }
   1086 
   1087   void LoadWithInterceptor(MacroAssembler* masm,
   1088                            Register receiver,
   1089                            Register holder,
   1090                            Handle<JSObject> holder_obj,
   1091                            Register scratch,
   1092                            Label* interceptor_succeeded) {
   1093     {
   1094       FrameScope scope(masm, StackFrame::INTERNAL);
   1095       __ Push(receiver);
   1096       __ Push(holder, name_);
   1097       CompileCallLoadPropertyWithInterceptor(
   1098           masm, receiver, holder, name_, holder_obj,
   1099           IC::kLoadPropertyWithInterceptorOnly);
   1100       __ pop(name_);
   1101       __ pop(holder);
   1102       __ pop(receiver);
   1103     }
   1104     // If interceptor returns no-result sentinel, call the constant function.
   1105     __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
   1106     __ cmp(r0, scratch);
   1107     __ b(ne, interceptor_succeeded);
   1108   }
   1109 
   1110   CallStubCompiler* stub_compiler_;
   1111   const ParameterCount& arguments_;
   1112   Register name_;
   1113   ExtraICState extra_ic_state_;
   1114 };
   1115 
   1116 
   1117 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
   1118   __ Jump(code, RelocInfo::CODE_TARGET);
   1119 }
   1120 
   1121 
   1122 #undef __
   1123 #define __ ACCESS_MASM(masm())
   1124 
   1125 
   1126 Register StubCompiler::CheckPrototypes(Handle<Type> type,
   1127                                        Register object_reg,
   1128                                        Handle<JSObject> holder,
   1129                                        Register holder_reg,
   1130                                        Register scratch1,
   1131                                        Register scratch2,
   1132                                        Handle<Name> name,
   1133                                        int save_at_depth,
   1134                                        Label* miss,
   1135                                        PrototypeCheckType check) {
   1136   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
   1137   // Make sure that the type feedback oracle harvests the receiver map.
   1138   // TODO(svenpanne) Remove this hack when all ICs are reworked.
   1139   __ mov(scratch1, Operand(receiver_map));
   1140 
   1141   // Make sure there's no overlap between holder and object registers.
   1142   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
   1143   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
   1144          && !scratch2.is(scratch1));
   1145 
   1146   // Keep track of the current object in register reg.
   1147   Register reg = object_reg;
   1148   int depth = 0;
   1149 
   1150   typedef FunctionCallbackArguments FCA;
   1151   if (save_at_depth == depth) {
   1152     __ str(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
   1153   }
   1154 
   1155   Handle<JSObject> current = Handle<JSObject>::null();
   1156   if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
   1157   Handle<JSObject> prototype = Handle<JSObject>::null();
   1158   Handle<Map> current_map = receiver_map;
   1159   Handle<Map> holder_map(holder->map());
   1160   // Traverse the prototype chain and check the maps in the prototype chain for
   1161   // fast and global objects or do negative lookup for normal objects.
   1162   while (!current_map.is_identical_to(holder_map)) {
   1163     ++depth;
   1164 
   1165     // Only global objects and objects that do not require access
   1166     // checks are allowed in stubs.
   1167     ASSERT(current_map->IsJSGlobalProxyMap() ||
   1168            !current_map->is_access_check_needed());
   1169 
   1170     prototype = handle(JSObject::cast(current_map->prototype()));
   1171     if (current_map->is_dictionary_map() &&
   1172         !current_map->IsJSGlobalObjectMap() &&
   1173         !current_map->IsJSGlobalProxyMap()) {
   1174       if (!name->IsUniqueName()) {
   1175         ASSERT(name->IsString());
   1176         name = factory()->InternalizeString(Handle<String>::cast(name));
   1177       }
   1178       ASSERT(current.is_null() ||
   1179              current->property_dictionary()->FindEntry(*name) ==
   1180              NameDictionary::kNotFound);
   1181 
   1182       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
   1183                                        scratch1, scratch2);
   1184 
   1185       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1186       reg = holder_reg;  // From now on the object will be in holder_reg.
   1187       __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
   1188     } else {
   1189       Register map_reg = scratch1;
   1190       if (depth != 1 || check == CHECK_ALL_MAPS) {
   1191         // CheckMap implicitly loads the map of |reg| into |map_reg|.
   1192         __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
   1193       } else {
   1194         __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
   1195       }
   1196 
   1197       // Check access rights to the global object.  This has to happen after
   1198       // the map check so that we know that the object is actually a global
   1199       // object.
   1200       if (current_map->IsJSGlobalProxyMap()) {
   1201         __ CheckAccessGlobalProxy(reg, scratch2, miss);
   1202       } else if (current_map->IsJSGlobalObjectMap()) {
   1203         GenerateCheckPropertyCell(
   1204             masm(), Handle<JSGlobalObject>::cast(current), name,
   1205             scratch2, miss);
   1206       }
   1207 
   1208       reg = holder_reg;  // From now on the object will be in holder_reg.
   1209 
   1210       if (heap()->InNewSpace(*prototype)) {
   1211         // The prototype is in new space; we cannot store a reference to it
   1212         // in the code.  Load it from the map.
   1213         __ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
   1214       } else {
   1215         // The prototype is in old space; load it directly.
   1216         __ mov(reg, Operand(prototype));
   1217       }
   1218     }
   1219 
   1220     if (save_at_depth == depth) {
   1221       __ str(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
   1222     }
   1223 
   1224     // Go to the next object in the prototype chain.
   1225     current = prototype;
   1226     current_map = handle(current->map());
   1227   }
   1228 
   1229   // Log the check depth.
   1230   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
   1231 
   1232   if (depth != 0 || check == CHECK_ALL_MAPS) {
   1233     // Check the holder map.
   1234     __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
   1235   }
   1236 
   1237   // Perform security check for access to the global object.
   1238   ASSERT(current_map->IsJSGlobalProxyMap() ||
   1239          !current_map->is_access_check_needed());
   1240   if (current_map->IsJSGlobalProxyMap()) {
   1241     __ CheckAccessGlobalProxy(reg, scratch1, miss);
   1242   }
   1243 
   1244   // Return the register containing the holder.
   1245   return reg;
   1246 }
   1247 
   1248 
   1249 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   1250   if (!miss->is_unused()) {
   1251     Label success;
   1252     __ b(&success);
   1253     __ bind(miss);
   1254     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1255     __ bind(&success);
   1256   }
   1257 }
   1258 
   1259 
   1260 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   1261   if (!miss->is_unused()) {
   1262     Label success;
   1263     __ b(&success);
   1264     GenerateRestoreName(masm(), miss, name);
   1265     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1266     __ bind(&success);
   1267   }
   1268 }
   1269 
   1270 
   1271 Register LoadStubCompiler::CallbackHandlerFrontend(
   1272     Handle<Type> type,
   1273     Register object_reg,
   1274     Handle<JSObject> holder,
   1275     Handle<Name> name,
   1276     Handle<Object> callback) {
   1277   Label miss;
   1278 
   1279   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
   1280 
   1281   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
   1282     ASSERT(!reg.is(scratch2()));
   1283     ASSERT(!reg.is(scratch3()));
   1284     ASSERT(!reg.is(scratch4()));
   1285 
   1286     // Load the properties dictionary.
   1287     Register dictionary = scratch4();
   1288     __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
   1289 
   1290     // Probe the dictionary.
   1291     Label probe_done;
   1292     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
   1293                                                      &miss,
   1294                                                      &probe_done,
   1295                                                      dictionary,
   1296                                                      this->name(),
   1297                                                      scratch2(),
   1298                                                      scratch3());
   1299     __ bind(&probe_done);
   1300 
   1301     // If probing finds an entry in the dictionary, scratch3 contains the
   1302     // pointer into the dictionary. Check that the value is the callback.
   1303     Register pointer = scratch3();
   1304     const int kElementsStartOffset = NameDictionary::kHeaderSize +
   1305         NameDictionary::kElementsStartIndex * kPointerSize;
   1306     const int kValueOffset = kElementsStartOffset + kPointerSize;
   1307     __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
   1308     __ cmp(scratch2(), Operand(callback));
   1309     __ b(ne, &miss);
   1310   }
   1311 
   1312   HandlerFrontendFooter(name, &miss);
   1313   return reg;
   1314 }
   1315 
   1316 
   1317 void LoadStubCompiler::GenerateLoadField(Register reg,
   1318                                          Handle<JSObject> holder,
   1319                                          PropertyIndex field,
   1320                                          Representation representation) {
   1321   if (!reg.is(receiver())) __ mov(receiver(), reg);
   1322   if (kind() == Code::LOAD_IC) {
   1323     LoadFieldStub stub(field.is_inobject(holder),
   1324                        field.translate(holder),
   1325                        representation);
   1326     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1327   } else {
   1328     KeyedLoadFieldStub stub(field.is_inobject(holder),
   1329                             field.translate(holder),
   1330                             representation);
   1331     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1332   }
   1333 }
   1334 
   1335 
   1336 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
   1337   // Return the constant value.
   1338   __ Move(r0, value);
   1339   __ Ret();
   1340 }
   1341 
   1342 
   1343 void LoadStubCompiler::GenerateLoadCallback(
   1344     const CallOptimization& call_optimization) {
   1345   GenerateFastApiCall(
   1346       masm(), call_optimization, receiver(), scratch3(), 0, NULL);
   1347 }
   1348 
   1349 
   1350 void LoadStubCompiler::GenerateLoadCallback(
   1351     Register reg,
   1352     Handle<ExecutableAccessorInfo> callback) {
   1353   // Build AccessorInfo::args_ list on the stack and push property name below
   1354   // the exit frame to make GC aware of them and store pointers to them.
   1355   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
   1356   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
   1357   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
   1358   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
   1359   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
   1360   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
   1361   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
   1362   ASSERT(!scratch2().is(reg));
   1363   ASSERT(!scratch3().is(reg));
   1364   ASSERT(!scratch4().is(reg));
   1365   __ push(receiver());
   1366   if (heap()->InNewSpace(callback->data())) {
   1367     __ Move(scratch3(), callback);
   1368     __ ldr(scratch3(), FieldMemOperand(scratch3(),
   1369                                        ExecutableAccessorInfo::kDataOffset));
   1370   } else {
   1371     __ Move(scratch3(), Handle<Object>(callback->data(), isolate()));
   1372   }
   1373   __ push(scratch3());
   1374   __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
   1375   __ mov(scratch4(), scratch3());
   1376   __ Push(scratch3(), scratch4());
   1377   __ mov(scratch4(),
   1378          Operand(ExternalReference::isolate_address(isolate())));
   1379   __ Push(scratch4(), reg);
   1380   __ mov(scratch2(), sp);  // scratch2 = PropertyAccessorInfo::args_
   1381   __ push(name());
   1382   __ mov(r0, sp);  // r0 = Handle<Name>
   1383 
   1384   const int kApiStackSpace = 1;
   1385   FrameScope frame_scope(masm(), StackFrame::MANUAL);
   1386   __ EnterExitFrame(false, kApiStackSpace);
   1387 
   1388   // Create PropertyAccessorInfo instance on the stack above the exit frame with
   1389   // scratch2 (internal::Object** args_) as the data.
   1390   __ str(scratch2(), MemOperand(sp, 1 * kPointerSize));
   1391   __ add(r1, sp, Operand(1 * kPointerSize));  // r1 = AccessorInfo&
   1392 
   1393   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
   1394   Address getter_address = v8::ToCData<Address>(callback->getter());
   1395 
   1396   ApiFunction fun(getter_address);
   1397   ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
   1398   ExternalReference ref = ExternalReference(&fun, type, isolate());
   1399 
   1400   Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
   1401   ExternalReference::Type thunk_type =
   1402       ExternalReference::PROFILING_GETTER_CALL;
   1403   ApiFunction thunk_fun(thunk_address);
   1404   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
   1405       isolate());
   1406   __ CallApiFunctionAndReturn(ref,
   1407                               getter_address,
   1408                               thunk_ref,
   1409                               r2,
   1410                               kStackUnwindSpace,
   1411                               MemOperand(fp, 6 * kPointerSize),
   1412                               NULL);
   1413 }
   1414 
   1415 
   1416 void LoadStubCompiler::GenerateLoadInterceptor(
   1417     Register holder_reg,
   1418     Handle<Object> object,
   1419     Handle<JSObject> interceptor_holder,
   1420     LookupResult* lookup,
   1421     Handle<Name> name) {
   1422   ASSERT(interceptor_holder->HasNamedInterceptor());
   1423   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
   1424 
   1425   // So far the most popular follow ups for interceptor loads are FIELD
   1426   // and CALLBACKS, so inline only them, other cases may be added
   1427   // later.
   1428   bool compile_followup_inline = false;
   1429   if (lookup->IsFound() && lookup->IsCacheable()) {
   1430     if (lookup->IsField()) {
   1431       compile_followup_inline = true;
   1432     } else if (lookup->type() == CALLBACKS &&
   1433                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
   1434       ExecutableAccessorInfo* callback =
   1435           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
   1436       compile_followup_inline = callback->getter() != NULL &&
   1437           callback->IsCompatibleReceiver(*object);
   1438     }
   1439   }
   1440 
   1441   if (compile_followup_inline) {
   1442     // Compile the interceptor call, followed by inline code to load the
   1443     // property from further up the prototype chain if the call fails.
   1444     // Check that the maps haven't changed.
   1445     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
   1446 
   1447     // Preserve the receiver register explicitly whenever it is different from
   1448     // the holder and it is needed should the interceptor return without any
   1449     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
   1450     // the FIELD case might cause a miss during the prototype check.
   1451     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
   1452     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
   1453         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
   1454 
   1455     // Save necessary data before invoking an interceptor.
   1456     // Requires a frame to make GC aware of pushed pointers.
   1457     {
   1458       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
   1459       if (must_preserve_receiver_reg) {
   1460         __ Push(receiver(), holder_reg, this->name());
   1461       } else {
   1462         __ Push(holder_reg, this->name());
   1463       }
   1464       // Invoke an interceptor.  Note: map checks from receiver to
   1465       // interceptor's holder has been compiled before (see a caller
   1466       // of this method.)
   1467       CompileCallLoadPropertyWithInterceptor(
   1468           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
   1469           IC::kLoadPropertyWithInterceptorOnly);
   1470 
   1471       // Check if interceptor provided a value for property.  If it's
   1472       // the case, return immediately.
   1473       Label interceptor_failed;
   1474       __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
   1475       __ cmp(r0, scratch1());
   1476       __ b(eq, &interceptor_failed);
   1477       frame_scope.GenerateLeaveFrame();
   1478       __ Ret();
   1479 
   1480       __ bind(&interceptor_failed);
   1481       __ pop(this->name());
   1482       __ pop(holder_reg);
   1483       if (must_preserve_receiver_reg) {
   1484         __ pop(receiver());
   1485       }
   1486       // Leave the internal frame.
   1487     }
   1488 
   1489     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
   1490   } else {  // !compile_followup_inline
   1491     // Call the runtime system to load the interceptor.
   1492     // Check that the maps haven't changed.
   1493     PushInterceptorArguments(masm(), receiver(), holder_reg,
   1494                              this->name(), interceptor_holder);
   1495 
   1496     ExternalReference ref =
   1497         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
   1498                           isolate());
   1499     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
   1500   }
   1501 }
   1502 
   1503 
   1504 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
   1505   if (kind_ == Code::KEYED_CALL_IC) {
   1506     __ cmp(r2, Operand(name));
   1507     __ b(ne, miss);
   1508   }
   1509 }
   1510 
   1511 
   1512 void CallStubCompiler::GenerateFunctionCheck(Register function,
   1513                                              Register scratch,
   1514                                              Label* miss) {
   1515   __ JumpIfSmi(function, miss);
   1516   __ CompareObjectType(function, scratch, scratch, JS_FUNCTION_TYPE);
   1517   __ b(ne, miss);
   1518 }
   1519 
   1520 
   1521 void CallStubCompiler::GenerateLoadFunctionFromCell(
   1522     Handle<Cell> cell,
   1523     Handle<JSFunction> function,
   1524     Label* miss) {
   1525   // Get the value from the cell.
   1526   __ mov(r3, Operand(cell));
   1527   __ ldr(r1, FieldMemOperand(r3, Cell::kValueOffset));
   1528 
   1529   // Check that the cell contains the same function.
   1530   if (heap()->InNewSpace(*function)) {
   1531     // We can't embed a pointer to a function in new space so we have
   1532     // to verify that the shared function info is unchanged. This has
   1533     // the nice side effect that multiple closures based on the same
   1534     // function can all use this call IC. Before we load through the
   1535     // function, we have to verify that it still is a function.
   1536     GenerateFunctionCheck(r1, r3, miss);
   1537 
   1538     // Check the shared function info. Make sure it hasn't changed.
   1539     __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
   1540     __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1541     __ cmp(r4, r3);
   1542   } else {
   1543     __ cmp(r1, Operand(function));
   1544   }
   1545   __ b(ne, miss);
   1546 }
   1547 
   1548 
   1549 void CallStubCompiler::GenerateMissBranch() {
   1550   Handle<Code> code =
   1551       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
   1552                                                kind_,
   1553                                                extra_state());
   1554   __ Jump(code, RelocInfo::CODE_TARGET);
   1555 }
   1556 
   1557 
   1558 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   1559                                                 Handle<JSObject> holder,
   1560                                                 PropertyIndex index,
   1561                                                 Handle<Name> name) {
   1562   Label miss;
   1563 
   1564   Register reg = HandlerFrontendHeader(
   1565       object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1566   GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder),
   1567                            index.translate(holder), Representation::Tagged());
   1568   GenerateJumpFunction(object, r1, &miss);
   1569 
   1570   HandlerFrontendFooter(&miss);
   1571 
   1572   // Return the generated code.
   1573   return GetCode(Code::FAST, name);
   1574 }
   1575 
   1576 
   1577 Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   1578     Handle<Object> object,
   1579     Handle<JSObject> holder,
   1580     Handle<Cell> cell,
   1581     Handle<JSFunction> function,
   1582     Handle<String> name,
   1583     Code::StubType type) {
   1584   Label miss;
   1585 
   1586   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1587   if (!cell.is_null()) {
   1588     ASSERT(cell->value() == *function);
   1589     GenerateLoadFunctionFromCell(cell, function, &miss);
   1590   }
   1591 
   1592   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
   1593   site->SetElementsKind(GetInitialFastElementsKind());
   1594   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   1595   const int argc = arguments().immediate();
   1596   __ mov(r0, Operand(argc));
   1597   __ mov(r2, Operand(site_feedback_cell));
   1598   __ mov(r1, Operand(function));
   1599 
   1600   ArrayConstructorStub stub(isolate());
   1601   __ TailCallStub(&stub);
   1602 
   1603   HandlerFrontendFooter(&miss);
   1604 
   1605   // Return the generated code.
   1606   return GetCode(type, name);
   1607 }
   1608 
   1609 
   1610 Handle<Code> CallStubCompiler::CompileArrayPushCall(
   1611     Handle<Object> object,
   1612     Handle<JSObject> holder,
   1613     Handle<Cell> cell,
   1614     Handle<JSFunction> function,
   1615     Handle<String> name,
   1616     Code::StubType type) {
   1617   // If object is not an array or is observed or sealed, bail out to regular
   1618   // call.
   1619   if (!object->IsJSArray() ||
   1620       !cell.is_null() ||
   1621       Handle<JSArray>::cast(object)->map()->is_observed() ||
   1622       !Handle<JSArray>::cast(object)->map()->is_extensible()) {
   1623     return Handle<Code>::null();
   1624   }
   1625 
   1626   Label miss;
   1627 
   1628   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1629   Register receiver = r0;
   1630   Register scratch = r1;
   1631 
   1632   const int argc = arguments().immediate();
   1633   if (argc == 0) {
   1634     // Nothing to do, just return the length.
   1635     __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1636     __ Drop(argc + 1);
   1637     __ Ret();
   1638   } else {
   1639     Label call_builtin;
   1640 
   1641     if (argc == 1) {  // Otherwise fall through to call the builtin.
   1642       Label attempt_to_grow_elements, with_write_barrier, check_double;
   1643 
   1644       Register elements = r6;
   1645       Register end_elements = r5;
   1646       // Get the elements array of the object.
   1647       __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1648 
   1649       // Check that the elements are in fast mode and writable.
   1650       __ CheckMap(elements,
   1651                   scratch,
   1652                   Heap::kFixedArrayMapRootIndex,
   1653                   &check_double,
   1654                   DONT_DO_SMI_CHECK);
   1655 
   1656       // Get the array's length into scratch and calculate new length.
   1657       __ ldr(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1658       __ add(scratch, scratch, Operand(Smi::FromInt(argc)));
   1659 
   1660       // Get the elements' length.
   1661       __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1662 
   1663       // Check if we could survive without allocation.
   1664       __ cmp(scratch, r4);
   1665       __ b(gt, &attempt_to_grow_elements);
   1666 
   1667       // Check if value is a smi.
   1668       __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
   1669       __ JumpIfNotSmi(r4, &with_write_barrier);
   1670 
   1671       // Save new length.
   1672       __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1673 
   1674       // Store the value.
   1675       // We may need a register containing the address end_elements below,
   1676       // so write back the value in end_elements.
   1677       __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
   1678       const int kEndElementsOffset =
   1679           FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
   1680       __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
   1681 
   1682       // Check for a smi.
   1683       __ Drop(argc + 1);
   1684       __ mov(r0, scratch);
   1685       __ Ret();
   1686 
   1687       __ bind(&check_double);
   1688 
   1689       // Check that the elements are in fast mode and writable.
   1690       __ CheckMap(elements,
   1691                   scratch,
   1692                   Heap::kFixedDoubleArrayMapRootIndex,
   1693                   &call_builtin,
   1694                   DONT_DO_SMI_CHECK);
   1695 
   1696       // Get the array's length into scratch and calculate new length.
   1697       __ ldr(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1698       __ add(scratch, scratch, Operand(Smi::FromInt(argc)));
   1699 
   1700       // Get the elements' length.
   1701       __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1702 
   1703       // Check if we could survive without allocation.
   1704       __ cmp(scratch, r4);
   1705       __ b(gt, &call_builtin);
   1706 
   1707       __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
   1708       __ StoreNumberToDoubleElements(r4, scratch, elements, r5, d0,
   1709                                      &call_builtin, argc * kDoubleSize);
   1710 
   1711       // Save new length.
   1712       __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1713 
   1714       __ Drop(argc + 1);
   1715       __ mov(r0, scratch);
   1716       __ Ret();
   1717 
   1718       __ bind(&with_write_barrier);
   1719 
   1720       __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1721 
   1722       if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
   1723         Label fast_object, not_fast_object;
   1724         __ CheckFastObjectElements(r3, r9, &not_fast_object);
   1725         __ jmp(&fast_object);
   1726         // In case of fast smi-only, convert to fast object, otherwise bail out.
   1727         __ bind(&not_fast_object);
   1728         __ CheckFastSmiElements(r3, r9, &call_builtin);
   1729 
   1730         __ ldr(r9, FieldMemOperand(r4, HeapObject::kMapOffset));
   1731         __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
   1732         __ cmp(r9, ip);
   1733         __ b(eq, &call_builtin);
   1734         // edx: receiver
   1735         // r3: map
   1736         Label try_holey_map;
   1737         __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
   1738                                                FAST_ELEMENTS,
   1739                                                r3,
   1740                                                r9,
   1741                                                &try_holey_map);
   1742         __ mov(r2, receiver);
   1743         ElementsTransitionGenerator::
   1744             GenerateMapChangeElementsTransition(masm(),
   1745                                                 DONT_TRACK_ALLOCATION_SITE,
   1746                                                 NULL);
   1747         __ jmp(&fast_object);
   1748 
   1749         __ bind(&try_holey_map);
   1750         __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
   1751                                                FAST_HOLEY_ELEMENTS,
   1752                                                r3,
   1753                                                r9,
   1754                                                &call_builtin);
   1755         __ mov(r2, receiver);
   1756         ElementsTransitionGenerator::
   1757             GenerateMapChangeElementsTransition(masm(),
   1758                                                 DONT_TRACK_ALLOCATION_SITE,
   1759                                                 NULL);
   1760         __ bind(&fast_object);
   1761       } else {
   1762         __ CheckFastObjectElements(r3, r3, &call_builtin);
   1763       }
   1764 
   1765       // Save new length.
   1766       __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1767 
   1768       // Store the value.
   1769       // We may need a register containing the address end_elements below,
   1770       // so write back the value in end_elements.
   1771       __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
   1772       __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
   1773 
   1774       __ RecordWrite(elements,
   1775                      end_elements,
   1776                      r4,
   1777                      kLRHasNotBeenSaved,
   1778                      kDontSaveFPRegs,
   1779                      EMIT_REMEMBERED_SET,
   1780                      OMIT_SMI_CHECK);
   1781       __ Drop(argc + 1);
   1782       __ mov(r0, scratch);
   1783       __ Ret();
   1784 
   1785       __ bind(&attempt_to_grow_elements);
   1786       // scratch: array's length + 1.
   1787 
   1788       if (!FLAG_inline_new) {
   1789         __ b(&call_builtin);
   1790       }
   1791 
   1792       __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
   1793       // Growing elements that are SMI-only requires special handling in case
   1794       // the new element is non-Smi. For now, delegate to the builtin.
   1795       Label no_fast_elements_check;
   1796       __ JumpIfSmi(r2, &no_fast_elements_check);
   1797       __ ldr(r9, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1798       __ CheckFastObjectElements(r9, r9, &call_builtin);
   1799       __ bind(&no_fast_elements_check);
   1800 
   1801       ExternalReference new_space_allocation_top =
   1802           ExternalReference::new_space_allocation_top_address(isolate());
   1803       ExternalReference new_space_allocation_limit =
   1804           ExternalReference::new_space_allocation_limit_address(isolate());
   1805 
   1806       const int kAllocationDelta = 4;
   1807       // Load top and check if it is the end of elements.
   1808       __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
   1809       __ add(end_elements, end_elements, Operand(kEndElementsOffset));
   1810       __ mov(r4, Operand(new_space_allocation_top));
   1811       __ ldr(r3, MemOperand(r4));
   1812       __ cmp(end_elements, r3);
   1813       __ b(ne, &call_builtin);
   1814 
   1815       __ mov(r9, Operand(new_space_allocation_limit));
   1816       __ ldr(r9, MemOperand(r9));
   1817       __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
   1818       __ cmp(r3, r9);
   1819       __ b(hi, &call_builtin);
   1820 
   1821       // We fit and could grow elements.
   1822       // Update new_space_allocation_top.
   1823       __ str(r3, MemOperand(r4));
   1824       // Push the argument.
   1825       __ str(r2, MemOperand(end_elements));
   1826       // Fill the rest with holes.
   1827       __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
   1828       for (int i = 1; i < kAllocationDelta; i++) {
   1829         __ str(r3, MemOperand(end_elements, i * kPointerSize));
   1830       }
   1831 
   1832       // Update elements' and array's sizes.
   1833       __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1834       __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1835       __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
   1836       __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1837 
   1838       // Elements are in new space, so write barrier is not required.
   1839       __ Drop(argc + 1);
   1840       __ mov(r0, scratch);
   1841       __ Ret();
   1842     }
   1843     __ bind(&call_builtin);
   1844     __ TailCallExternalReference(
   1845         ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
   1846   }
   1847 
   1848   HandlerFrontendFooter(&miss);
   1849 
   1850   // Return the generated code.
   1851   return GetCode(type, name);
   1852 }
   1853 
   1854 
   1855 Handle<Code> CallStubCompiler::CompileArrayPopCall(
   1856     Handle<Object> object,
   1857     Handle<JSObject> holder,
   1858     Handle<Cell> cell,
   1859     Handle<JSFunction> function,
   1860     Handle<String> name,
   1861     Code::StubType type) {
   1862   // If object is not an array or is observed or sealed, bail out to regular
   1863   // call.
   1864   if (!object->IsJSArray() ||
   1865       !cell.is_null() ||
   1866       Handle<JSArray>::cast(object)->map()->is_observed() ||
   1867       !Handle<JSArray>::cast(object)->map()->is_extensible()) {
   1868     return Handle<Code>::null();
   1869   }
   1870 
   1871   Label miss, return_undefined, call_builtin;
   1872   Register receiver = r0;
   1873   Register scratch = r1;
   1874   Register elements = r3;
   1875 
   1876   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1877 
   1878   // Get the elements array of the object.
   1879   __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1880 
   1881   // Check that the elements are in fast mode and writable.
   1882   __ CheckMap(elements,
   1883               scratch,
   1884               Heap::kFixedArrayMapRootIndex,
   1885               &call_builtin,
   1886               DONT_DO_SMI_CHECK);
   1887 
   1888   // Get the array's length into r4 and calculate new length.
   1889   __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1890   __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
   1891   __ b(lt, &return_undefined);
   1892 
   1893   // Get the last element.
   1894   __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
   1895   // We can't address the last element in one operation. Compute the more
   1896   // expensive shift first, and use an offset later on.
   1897   __ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4));
   1898   __ ldr(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
   1899   __ cmp(scratch, r6);
   1900   __ b(eq, &call_builtin);
   1901 
   1902   // Set the array's length.
   1903   __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1904 
   1905   // Fill with the hole.
   1906   __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
   1907   const int argc = arguments().immediate();
   1908   __ Drop(argc + 1);
   1909   __ mov(r0, scratch);
   1910   __ Ret();
   1911 
   1912   __ bind(&return_undefined);
   1913   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   1914   __ Drop(argc + 1);
   1915   __ Ret();
   1916 
   1917   __ bind(&call_builtin);
   1918   __ TailCallExternalReference(
   1919       ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
   1920 
   1921   HandlerFrontendFooter(&miss);
   1922 
   1923   // Return the generated code.
   1924   return GetCode(type, name);
   1925 }
   1926 
   1927 
   1928 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
   1929     Handle<Object> object,
   1930     Handle<JSObject> holder,
   1931     Handle<Cell> cell,
   1932     Handle<JSFunction> function,
   1933     Handle<String> name,
   1934     Code::StubType type) {
   1935   // If object is not a string, bail out to regular call.
   1936   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
   1937 
   1938   Label miss;
   1939   Label name_miss;
   1940   Label index_out_of_range;
   1941   Label* index_out_of_range_label = &index_out_of_range;
   1942 
   1943   if (kind_ == Code::CALL_IC &&
   1944       (CallICBase::StringStubState::decode(extra_state()) ==
   1945        DEFAULT_STRING_STUB)) {
   1946     index_out_of_range_label = &miss;
   1947   }
   1948 
   1949   HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
   1950 
   1951   Register receiver = r0;
   1952   Register index = r4;
   1953   Register result = r1;
   1954   const int argc = arguments().immediate();
   1955   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1956   if (argc > 0) {
   1957     __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
   1958   } else {
   1959     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   1960   }
   1961 
   1962   StringCharCodeAtGenerator generator(receiver,
   1963                                       index,
   1964                                       result,
   1965                                       &miss,  // When not a string.
   1966                                       &miss,  // When not a number.
   1967                                       index_out_of_range_label,
   1968                                       STRING_INDEX_IS_NUMBER);
   1969   generator.GenerateFast(masm());
   1970   __ Drop(argc + 1);
   1971   __ mov(r0, result);
   1972   __ Ret();
   1973 
   1974   StubRuntimeCallHelper call_helper;
   1975   generator.GenerateSlow(masm(), call_helper);
   1976 
   1977   if (index_out_of_range.is_linked()) {
   1978     __ bind(&index_out_of_range);
   1979     __ LoadRoot(r0, Heap::kNanValueRootIndex);
   1980     __ Drop(argc + 1);
   1981     __ Ret();
   1982   }
   1983 
   1984   __ bind(&miss);
   1985   // Restore function name in r2.
   1986   __ Move(r2, name);
   1987   HandlerFrontendFooter(&name_miss);
   1988 
   1989   // Return the generated code.
   1990   return GetCode(type, name);
   1991 }
   1992 
   1993 
   1994 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
   1995     Handle<Object> object,
   1996     Handle<JSObject> holder,
   1997     Handle<Cell> cell,
   1998     Handle<JSFunction> function,
   1999     Handle<String> name,
   2000     Code::StubType type) {
   2001   // If object is not a string, bail out to regular call.
   2002   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
   2003 
   2004   const int argc = arguments().immediate();
   2005   Label miss;
   2006   Label name_miss;
   2007   Label index_out_of_range;
   2008   Label* index_out_of_range_label = &index_out_of_range;
   2009   if (kind_ == Code::CALL_IC &&
   2010       (CallICBase::StringStubState::decode(extra_state()) ==
   2011        DEFAULT_STRING_STUB)) {
   2012     index_out_of_range_label = &miss;
   2013   }
   2014 
   2015   HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
   2016 
   2017   Register receiver = r0;
   2018   Register index = r4;
   2019   Register scratch = r3;
   2020   Register result = r1;
   2021   if (argc > 0) {
   2022     __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
   2023   } else {
   2024     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   2025   }
   2026 
   2027   StringCharAtGenerator generator(receiver,
   2028                                   index,
   2029                                   scratch,
   2030                                   result,
   2031                                   &miss,  // When not a string.
   2032                                   &miss,  // When not a number.
   2033                                   index_out_of_range_label,
   2034                                   STRING_INDEX_IS_NUMBER);
   2035   generator.GenerateFast(masm());
   2036   __ Drop(argc + 1);
   2037   __ mov(r0, result);
   2038   __ Ret();
   2039 
   2040   StubRuntimeCallHelper call_helper;
   2041   generator.GenerateSlow(masm(), call_helper);
   2042 
   2043   if (index_out_of_range.is_linked()) {
   2044     __ bind(&index_out_of_range);
   2045     __ LoadRoot(r0, Heap::kempty_stringRootIndex);
   2046     __ Drop(argc + 1);
   2047     __ Ret();
   2048   }
   2049 
   2050   __ bind(&miss);
   2051   // Restore function name in r2.
   2052   __ Move(r2, name);
   2053   HandlerFrontendFooter(&name_miss);
   2054 
   2055   // Return the generated code.
   2056   return GetCode(type, name);
   2057 }
   2058 
   2059 
   2060 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
   2061     Handle<Object> object,
   2062     Handle<JSObject> holder,
   2063     Handle<Cell> cell,
   2064     Handle<JSFunction> function,
   2065     Handle<String> name,
   2066     Code::StubType type) {
   2067   const int argc = arguments().immediate();
   2068 
   2069   // If the object is not a JSObject or we got an unexpected number of
   2070   // arguments, bail out to the regular call.
   2071   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2072 
   2073   Label miss;
   2074 
   2075   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2076   if (!cell.is_null()) {
   2077     ASSERT(cell->value() == *function);
   2078     GenerateLoadFunctionFromCell(cell, function, &miss);
   2079   }
   2080 
   2081   // Load the char code argument.
   2082   Register code = r1;
   2083   __ ldr(code, MemOperand(sp, 0 * kPointerSize));
   2084 
   2085   // Check the code is a smi.
   2086   Label slow;
   2087   __ JumpIfNotSmi(code, &slow);
   2088 
   2089   // Convert the smi code to uint16.
   2090   __ and_(code, code, Operand(Smi::FromInt(0xffff)));
   2091 
   2092   StringCharFromCodeGenerator generator(code, r0);
   2093   generator.GenerateFast(masm());
   2094   __ Drop(argc + 1);
   2095   __ Ret();
   2096 
   2097   StubRuntimeCallHelper call_helper;
   2098   generator.GenerateSlow(masm(), call_helper);
   2099 
   2100   __ bind(&slow);
   2101   // We do not have to patch the receiver because the function makes no use of
   2102   // it.
   2103   GenerateJumpFunctionIgnoreReceiver(function);
   2104 
   2105   HandlerFrontendFooter(&miss);
   2106 
   2107   // Return the generated code.
   2108   return GetCode(type, name);
   2109 }
   2110 
   2111 
   2112 Handle<Code> CallStubCompiler::CompileMathFloorCall(
   2113     Handle<Object> object,
   2114     Handle<JSObject> holder,
   2115     Handle<Cell> cell,
   2116     Handle<JSFunction> function,
   2117     Handle<String> name,
   2118     Code::StubType type) {
   2119   const int argc = arguments().immediate();
   2120   // If the object is not a JSObject or we got an unexpected number of
   2121   // arguments, bail out to the regular call.
   2122   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2123 
   2124   Label miss, slow;
   2125 
   2126   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2127   if (!cell.is_null()) {
   2128     ASSERT(cell->value() == *function);
   2129     GenerateLoadFunctionFromCell(cell, function, &miss);
   2130   }
   2131 
   2132   // Load the (only) argument into r0.
   2133   __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
   2134 
   2135   // If the argument is a smi, just return.
   2136   __ SmiTst(r0);
   2137   __ Drop(argc + 1, eq);
   2138   __ Ret(eq);
   2139 
   2140   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   2141 
   2142   Label smi_check, just_return;
   2143 
   2144   // Load the HeapNumber value.
   2145   // We will need access to the value in the core registers, so we load it
   2146   // with ldrd and move it to the fpu. It also spares a sub instruction for
   2147   // updating the HeapNumber value address, as vldr expects a multiple
   2148   // of 4 offset.
   2149   __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
   2150   __ vmov(d1, r4, r5);
   2151 
   2152   // Check for NaN, Infinities and -0.
   2153   // They are invariant through a Math.Floor call, so just
   2154   // return the original argument.
   2155   __ Sbfx(r3, r5, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
   2156   __ cmp(r3, Operand(-1));
   2157   __ b(eq, &just_return);
   2158   __ eor(r3, r5, Operand(0x80000000u));
   2159   __ orr(r3, r3, r4, SetCC);
   2160   __ b(eq, &just_return);
   2161   // Test for values that can be exactly represented as a
   2162   // signed 32-bit integer.
   2163   __ TryDoubleToInt32Exact(r0, d1, d2);
   2164   // If exact, check smi
   2165   __ b(eq, &smi_check);
   2166   __ cmp(r5, Operand(0));
   2167 
   2168   // If input is in ]+0, +inf[, the cmp has cleared overflow and negative
   2169   // (V=0 and N=0), the two following instructions won't execute and
   2170   // we fall through smi_check to check if the result can fit into a smi.
   2171 
   2172   // If input is in ]-inf, -0[, sub one and, go to slow if we have
   2173   // an overflow. Else we fall through smi check.
   2174   // Hint: if x is a negative, non integer number,
   2175   // floor(x) <=> round_to_zero(x) - 1.
   2176   __ sub(r0, r0, Operand(1), SetCC, mi);
   2177   __ b(vs, &slow);
   2178 
   2179   __ bind(&smi_check);
   2180   // Check if the result can fit into an smi. If we had an overflow,
   2181   // the result is either 0x80000000 or 0x7FFFFFFF and won't fit into an smi.
   2182   // If result doesn't fit into an smi, branch to slow.
   2183   __ SmiTag(r0, SetCC);
   2184   __ b(vs, &slow);
   2185 
   2186   __ bind(&just_return);
   2187   __ Drop(argc + 1);
   2188   __ Ret();
   2189 
   2190   __ bind(&slow);
   2191   // We do not have to patch the receiver because the function makes no use of
   2192   // it.
   2193   GenerateJumpFunctionIgnoreReceiver(function);
   2194 
   2195   HandlerFrontendFooter(&miss);
   2196 
   2197   // Return the generated code.
   2198   return GetCode(type, name);
   2199 }
   2200 
   2201 
   2202 Handle<Code> CallStubCompiler::CompileMathAbsCall(
   2203     Handle<Object> object,
   2204     Handle<JSObject> holder,
   2205     Handle<Cell> cell,
   2206     Handle<JSFunction> function,
   2207     Handle<String> name,
   2208     Code::StubType type) {
   2209   const int argc = arguments().immediate();
   2210   // If the object is not a JSObject or we got an unexpected number of
   2211   // arguments, bail out to the regular call.
   2212   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2213 
   2214   Label miss;
   2215 
   2216   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2217   if (!cell.is_null()) {
   2218     ASSERT(cell->value() == *function);
   2219     GenerateLoadFunctionFromCell(cell, function, &miss);
   2220   }
   2221 
   2222   // Load the (only) argument into r0.
   2223   __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
   2224 
   2225   // Check if the argument is a smi.
   2226   Label not_smi;
   2227   __ JumpIfNotSmi(r0, &not_smi);
   2228 
   2229   // Do bitwise not or do nothing depending on the sign of the
   2230   // argument.
   2231   __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
   2232 
   2233   // Add 1 or do nothing depending on the sign of the argument.
   2234   __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
   2235 
   2236   // If the result is still negative, go to the slow case.
   2237   // This only happens for the most negative smi.
   2238   Label slow;
   2239   __ b(mi, &slow);
   2240 
   2241   // Smi case done.
   2242   __ Drop(argc + 1);
   2243   __ Ret();
   2244 
   2245   // Check if the argument is a heap number and load its exponent and
   2246   // sign.
   2247   __ bind(&not_smi);
   2248   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   2249   __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
   2250 
   2251   // Check the sign of the argument. If the argument is positive,
   2252   // just return it.
   2253   Label negative_sign;
   2254   __ tst(r1, Operand(HeapNumber::kSignMask));
   2255   __ b(ne, &negative_sign);
   2256   __ Drop(argc + 1);
   2257   __ Ret();
   2258 
   2259   // If the argument is negative, clear the sign, and return a new
   2260   // number.
   2261   __ bind(&negative_sign);
   2262   __ eor(r1, r1, Operand(HeapNumber::kSignMask));
   2263   __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
   2264   __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   2265   __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
   2266   __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
   2267   __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
   2268   __ Drop(argc + 1);
   2269   __ Ret();
   2270 
   2271   __ bind(&slow);
   2272   // We do not have to patch the receiver because the function makes no use of
   2273   // it.
   2274   GenerateJumpFunctionIgnoreReceiver(function);
   2275 
   2276   HandlerFrontendFooter(&miss);
   2277 
   2278   // Return the generated code.
   2279   return GetCode(type, name);
   2280 }
   2281 
   2282 
   2283 Handle<Code> CallStubCompiler::CompileFastApiCall(
   2284     const CallOptimization& optimization,
   2285     Handle<Object> object,
   2286     Handle<JSObject> holder,
   2287     Handle<Cell> cell,
   2288     Handle<JSFunction> function,
   2289     Handle<String> name) {
   2290   Counters* counters = isolate()->counters();
   2291 
   2292   ASSERT(optimization.is_simple_api_call());
   2293   // Bail out if object is a global object as we don't want to
   2294   // repatch it to global receiver.
   2295   if (object->IsGlobalObject()) return Handle<Code>::null();
   2296   if (!cell.is_null()) return Handle<Code>::null();
   2297   if (!object->IsJSObject()) return Handle<Code>::null();
   2298   int depth = optimization.GetPrototypeDepthOfExpectedType(
   2299       Handle<JSObject>::cast(object), holder);
   2300   if (depth == kInvalidProtoDepth) return Handle<Code>::null();
   2301 
   2302   Label miss, miss_before_stack_reserved;
   2303   GenerateNameCheck(name, &miss_before_stack_reserved);
   2304 
   2305   // Get the receiver from the stack.
   2306   const int argc = arguments().immediate();
   2307   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2308 
   2309   // Check that the receiver isn't a smi.
   2310   __ JumpIfSmi(r1, &miss_before_stack_reserved);
   2311 
   2312   __ IncrementCounter(counters->call_const(), 1, r0, r3);
   2313   __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
   2314 
   2315   ReserveSpaceForFastApiCall(masm(), r0);
   2316 
   2317   // Check that the maps haven't changed and find a Holder as a side effect.
   2318   CheckPrototypes(
   2319       IC::CurrentTypeOf(object, isolate()),
   2320       r1, holder, r0, r3, r4, name, depth, &miss);
   2321 
   2322   GenerateFastApiDirectCall(masm(), optimization, argc, false);
   2323 
   2324   __ bind(&miss);
   2325   FreeSpaceForFastApiCall(masm());
   2326 
   2327   HandlerFrontendFooter(&miss_before_stack_reserved);
   2328 
   2329   // Return the generated code.
   2330   return GetCode(function);
   2331 }
   2332 
   2333 
   2334 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
   2335   Label success;
   2336   // Check that the object is a boolean.
   2337   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
   2338   __ cmp(object, ip);
   2339   __ b(eq, &success);
   2340   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
   2341   __ cmp(object, ip);
   2342   __ b(ne, miss);
   2343   __ bind(&success);
   2344 }
   2345 
   2346 
   2347 void CallStubCompiler::PatchGlobalProxy(Handle<Object> object) {
   2348   if (object->IsGlobalObject()) {
   2349     const int argc = arguments().immediate();
   2350     const int receiver_offset = argc * kPointerSize;
   2351     __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
   2352     __ str(r3, MemOperand(sp, receiver_offset));
   2353   }
   2354 }
   2355 
   2356 
   2357 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object,
   2358                                                  Handle<JSObject> holder,
   2359                                                  Handle<Name> name,
   2360                                                  CheckType check,
   2361                                                  Label* miss) {
   2362   // ----------- S t a t e -------------
   2363   //  -- r2    : name
   2364   //  -- lr    : return address
   2365   // -----------------------------------
   2366   GenerateNameCheck(name, miss);
   2367 
   2368   Register reg = r0;
   2369 
   2370   // Get the receiver from the stack
   2371   const int argc = arguments().immediate();
   2372   const int receiver_offset = argc * kPointerSize;
   2373   __ ldr(r0, MemOperand(sp, receiver_offset));
   2374 
   2375   // Check that the receiver isn't a smi.
   2376   if (check != NUMBER_CHECK) {
   2377     __ JumpIfSmi(r0, miss);
   2378   }
   2379 
   2380   // Make sure that it's okay not to patch the on stack receiver
   2381   // unless we're doing a receiver map check.
   2382   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
   2383   switch (check) {
   2384     case RECEIVER_MAP_CHECK:
   2385       __ IncrementCounter(isolate()->counters()->call_const(), 1, r1, r3);
   2386 
   2387       // Check that the maps haven't changed.
   2388       reg = CheckPrototypes(
   2389           IC::CurrentTypeOf(object, isolate()),
   2390           reg, holder, r1, r3, r4, name, miss);
   2391       break;
   2392 
   2393     case STRING_CHECK: {
   2394       // Check that the object is a string.
   2395       __ CompareObjectType(reg, r3, r3, FIRST_NONSTRING_TYPE);
   2396       __ b(ge, miss);
   2397       // Check that the maps starting from the prototype haven't changed.
   2398       GenerateDirectLoadGlobalFunctionPrototype(
   2399           masm(), Context::STRING_FUNCTION_INDEX, r1, miss);
   2400       break;
   2401     }
   2402     case SYMBOL_CHECK: {
   2403       // Check that the object is a symbol.
   2404       __ CompareObjectType(reg, r3, r3, SYMBOL_TYPE);
   2405       __ b(ne, miss);
   2406       // Check that the maps starting from the prototype haven't changed.
   2407       GenerateDirectLoadGlobalFunctionPrototype(
   2408           masm(), Context::SYMBOL_FUNCTION_INDEX, r1, miss);
   2409       break;
   2410     }
   2411     case NUMBER_CHECK: {
   2412       Label fast;
   2413       // Check that the object is a smi or a heap number.
   2414       __ JumpIfSmi(reg, &fast);
   2415       __ CompareObjectType(reg, r3, r3, HEAP_NUMBER_TYPE);
   2416       __ b(ne, miss);
   2417       __ bind(&fast);
   2418       // Check that the maps starting from the prototype haven't changed.
   2419       GenerateDirectLoadGlobalFunctionPrototype(
   2420           masm(), Context::NUMBER_FUNCTION_INDEX, r1, miss);
   2421       break;
   2422     }
   2423     case BOOLEAN_CHECK: {
   2424       GenerateBooleanCheck(reg, miss);
   2425 
   2426       // Check that the maps starting from the prototype haven't changed.
   2427       GenerateDirectLoadGlobalFunctionPrototype(
   2428           masm(), Context::BOOLEAN_FUNCTION_INDEX, r1, miss);
   2429       break;
   2430     }
   2431   }
   2432 
   2433   if (check != RECEIVER_MAP_CHECK) {
   2434     Handle<Object> prototype(object->GetPrototype(isolate()), isolate());
   2435     reg = CheckPrototypes(
   2436         IC::CurrentTypeOf(prototype, isolate()),
   2437         r1, holder, r1, r3, r4, name, miss);
   2438   }
   2439 
   2440   return reg;
   2441 }
   2442 
   2443 
   2444 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
   2445                                             Register function,
   2446                                             Label* miss) {
   2447   ASSERT(function.is(r1));
   2448   // Check that the function really is a function.
   2449   GenerateFunctionCheck(function, r3, miss);
   2450   PatchGlobalProxy(object);
   2451 
   2452   // Invoke the function.
   2453   __ InvokeFunction(r1, arguments(), JUMP_FUNCTION,
   2454                     NullCallWrapper(), call_kind());
   2455 }
   2456 
   2457 
   2458 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
   2459                                                       Handle<JSObject> holder,
   2460                                                       Handle<Name> name) {
   2461   Label miss;
   2462   GenerateNameCheck(name, &miss);
   2463 
   2464   // Get the number of arguments.
   2465   const int argc = arguments().immediate();
   2466   LookupResult lookup(isolate());
   2467   LookupPostInterceptor(holder, name, &lookup);
   2468 
   2469   // Get the receiver from the stack.
   2470   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2471 
   2472   CallInterceptorCompiler compiler(this, arguments(), r2, extra_state());
   2473   compiler.Compile(masm(), object, holder, name, &lookup, r1, r3, r4, r0,
   2474                    &miss);
   2475 
   2476   // Move returned value, the function to call, to r1.
   2477   __ mov(r1, r0);
   2478   // Restore receiver.
   2479   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   2480 
   2481   GenerateJumpFunction(object, r1, &miss);
   2482 
   2483   HandlerFrontendFooter(&miss);
   2484 
   2485   // Return the generated code.
   2486   return GetCode(Code::FAST, name);
   2487 }
   2488 
   2489 
   2490 Handle<Code> CallStubCompiler::CompileCallGlobal(
   2491     Handle<JSObject> object,
   2492     Handle<GlobalObject> holder,
   2493     Handle<PropertyCell> cell,
   2494     Handle<JSFunction> function,
   2495     Handle<Name> name) {
   2496   if (HasCustomCallGenerator(function)) {
   2497     Handle<Code> code = CompileCustomCall(
   2498         object, holder, cell, function, Handle<String>::cast(name),
   2499         Code::NORMAL);
   2500     // A null handle means bail out to the regular compiler code below.
   2501     if (!code.is_null()) return code;
   2502   }
   2503 
   2504   Label miss;
   2505   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2506   // Potentially loads a closure that matches the shared function info of the
   2507   // function, rather than function.
   2508   GenerateLoadFunctionFromCell(cell, function, &miss);
   2509 
   2510   Counters* counters = isolate()->counters();
   2511   __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
   2512   GenerateJumpFunction(object, r1, function);
   2513   HandlerFrontendFooter(&miss);
   2514 
   2515   // Return the generated code.
   2516   return GetCode(Code::NORMAL, name);
   2517 }
   2518 
   2519 
   2520 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2521     Handle<JSObject> object,
   2522     Handle<JSObject> holder,
   2523     Handle<Name> name,
   2524     Handle<ExecutableAccessorInfo> callback) {
   2525   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
   2526                   receiver(), holder, name);
   2527 
   2528   // Stub never generated for non-global objects that require access checks.
   2529   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   2530 
   2531   __ push(receiver());  // receiver
   2532   __ mov(ip, Operand(callback));  // callback info
   2533   __ push(ip);
   2534   __ mov(ip, Operand(name));
   2535   __ Push(ip, value());
   2536 
   2537   // Do tail-call to the runtime system.
   2538   ExternalReference store_callback_property =
   2539       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
   2540   __ TailCallExternalReference(store_callback_property, 4, 1);
   2541 
   2542   // Return the generated code.
   2543   return GetCode(kind(), Code::FAST, name);
   2544 }
   2545 
   2546 
   2547 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2548     Handle<JSObject> object,
   2549     Handle<JSObject> holder,
   2550     Handle<Name> name,
   2551     const CallOptimization& call_optimization) {
   2552   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
   2553                   receiver(), holder, name);
   2554 
   2555   Register values[] = { value() };
   2556   GenerateFastApiCall(
   2557       masm(), call_optimization, receiver(), scratch3(), 1, values);
   2558 
   2559   // Return the generated code.
   2560   return GetCode(kind(), Code::FAST, name);
   2561 }
   2562 
   2563 
   2564 #undef __
   2565 #define __ ACCESS_MASM(masm)
   2566 
   2567 
   2568 void StoreStubCompiler::GenerateStoreViaSetter(
   2569     MacroAssembler* masm,
   2570     Handle<JSFunction> setter) {
   2571   // ----------- S t a t e -------------
   2572   //  -- r0    : value
   2573   //  -- r1    : receiver
   2574   //  -- r2    : name
   2575   //  -- lr    : return address
   2576   // -----------------------------------
   2577   {
   2578     FrameScope scope(masm, StackFrame::INTERNAL);
   2579 
   2580     // Save value register, so we can restore it later.
   2581     __ push(r0);
   2582 
   2583     if (!setter.is_null()) {
   2584       // Call the JavaScript setter with receiver and value on the stack.
   2585       __ Push(r1, r0);
   2586       ParameterCount actual(1);
   2587       ParameterCount expected(setter);
   2588       __ InvokeFunction(setter, expected, actual,
   2589                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2590     } else {
   2591       // If we generate a global code snippet for deoptimization only, remember
   2592       // the place to continue after deoptimization.
   2593       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
   2594     }
   2595 
   2596     // We have to return the passed value, not the return value of the setter.
   2597     __ pop(r0);
   2598 
   2599     // Restore context register.
   2600     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2601   }
   2602   __ Ret();
   2603 }
   2604 
   2605 
   2606 #undef __
   2607 #define __ ACCESS_MASM(masm())
   2608 
   2609 
   2610 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
   2611     Handle<JSObject> object,
   2612     Handle<Name> name) {
   2613   Label miss;
   2614 
   2615   // Check that the map of the object hasn't changed.
   2616   __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
   2617               DO_SMI_CHECK);
   2618 
   2619   // Perform global security token check if needed.
   2620   if (object->IsJSGlobalProxy()) {
   2621     __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
   2622   }
   2623 
   2624   // Stub is never generated for non-global objects that require access
   2625   // checks.
   2626   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
   2627 
   2628   __ Push(receiver(), this->name(), value());
   2629 
   2630   // Do tail-call to the runtime system.
   2631   ExternalReference store_ic_property =
   2632       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
   2633   __ TailCallExternalReference(store_ic_property, 3, 1);
   2634 
   2635   // Handle store cache miss.
   2636   __ bind(&miss);
   2637   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2638 
   2639   // Return the generated code.
   2640   return GetCode(kind(), Code::FAST, name);
   2641 }
   2642 
   2643 
   2644 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<Type> type,
   2645                                                       Handle<JSObject> last,
   2646                                                       Handle<Name> name) {
   2647   NonexistentHandlerFrontend(type, last, name);
   2648 
   2649   // Return undefined if maps of the full prototype chain are still the
   2650   // same and no global property with this name contains a value.
   2651   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2652   __ Ret();
   2653 
   2654   // Return the generated code.
   2655   return GetCode(kind(), Code::FAST, name);
   2656 }
   2657 
   2658 
   2659 Register* LoadStubCompiler::registers() {
   2660   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   2661   static Register registers[] = { r0, r2, r3, r1, r4, r5 };
   2662   return registers;
   2663 }
   2664 
   2665 
   2666 Register* KeyedLoadStubCompiler::registers() {
   2667   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   2668   static Register registers[] = { r1, r0, r2, r3, r4, r5 };
   2669   return registers;
   2670 }
   2671 
   2672 
   2673 Register* StoreStubCompiler::registers() {
   2674   // receiver, name, value, scratch1, scratch2, scratch3.
   2675   static Register registers[] = { r1, r2, r0, r3, r4, r5 };
   2676   return registers;
   2677 }
   2678 
   2679 
   2680 Register* KeyedStoreStubCompiler::registers() {
   2681   // receiver, name, value, scratch1, scratch2, scratch3.
   2682   static Register registers[] = { r2, r1, r0, r3, r4, r5 };
   2683   return registers;
   2684 }
   2685 
   2686 
   2687 void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
   2688                                               Register name_reg,
   2689                                               Label* miss) {
   2690   __ cmp(name_reg, Operand(name));
   2691   __ b(ne, miss);
   2692 }
   2693 
   2694 
   2695 void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
   2696                                                Register name_reg,
   2697                                                Label* miss) {
   2698   __ cmp(name_reg, Operand(name));
   2699   __ b(ne, miss);
   2700 }
   2701 
   2702 
   2703 #undef __
   2704 #define __ ACCESS_MASM(masm)
   2705 
   2706 
   2707 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
   2708                                              Register receiver,
   2709                                              Handle<JSFunction> getter) {
   2710   // ----------- S t a t e -------------
   2711   //  -- r0    : receiver
   2712   //  -- r2    : name
   2713   //  -- lr    : return address
   2714   // -----------------------------------
   2715   {
   2716     FrameScope scope(masm, StackFrame::INTERNAL);
   2717 
   2718     if (!getter.is_null()) {
   2719       // Call the JavaScript getter with the receiver on the stack.
   2720       __ push(receiver);
   2721       ParameterCount actual(0);
   2722       ParameterCount expected(getter);
   2723       __ InvokeFunction(getter, expected, actual,
   2724                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2725     } else {
   2726       // If we generate a global code snippet for deoptimization only, remember
   2727       // the place to continue after deoptimization.
   2728       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
   2729     }
   2730 
   2731     // Restore context register.
   2732     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2733   }
   2734   __ Ret();
   2735 }
   2736 
   2737 
   2738 #undef __
   2739 #define __ ACCESS_MASM(masm())
   2740 
   2741 
   2742 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
   2743     Handle<Type> type,
   2744     Handle<GlobalObject> global,
   2745     Handle<PropertyCell> cell,
   2746     Handle<Name> name,
   2747     bool is_dont_delete) {
   2748   Label miss;
   2749 
   2750   HandlerFrontendHeader(type, receiver(), global, name, &miss);
   2751 
   2752   // Get the value from the cell.
   2753   __ mov(r3, Operand(cell));
   2754   __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset));
   2755 
   2756   // Check for deleted property if property can actually be deleted.
   2757   if (!is_dont_delete) {
   2758     __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   2759     __ cmp(r4, ip);
   2760     __ b(eq, &miss);
   2761   }
   2762 
   2763   HandlerFrontendFooter(name, &miss);
   2764 
   2765   Counters* counters = isolate()->counters();
   2766   __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
   2767   __ mov(r0, r4);
   2768   __ Ret();
   2769 
   2770   // Return the generated code.
   2771   return GetCode(kind(), Code::NORMAL, name);
   2772 }
   2773 
   2774 
   2775 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
   2776     TypeHandleList* types,
   2777     CodeHandleList* handlers,
   2778     Handle<Name> name,
   2779     Code::StubType type,
   2780     IcCheckType check) {
   2781   Label miss;
   2782 
   2783   if (check == PROPERTY) {
   2784     GenerateNameCheck(name, this->name(), &miss);
   2785   }
   2786 
   2787   Label number_case;
   2788   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
   2789   __ JumpIfSmi(receiver(), smi_target);
   2790 
   2791   Register map_reg = scratch1();
   2792 
   2793   int receiver_count = types->length();
   2794   int number_of_handled_maps = 0;
   2795   __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
   2796   for (int current = 0; current < receiver_count; ++current) {
   2797     Handle<Type> type = types->at(current);
   2798     Handle<Map> map = IC::TypeToMap(*type, isolate());
   2799     if (!map->is_deprecated()) {
   2800       number_of_handled_maps++;
   2801       __ mov(ip, Operand(map));
   2802       __ cmp(map_reg, ip);
   2803       if (type->Is(Type::Number())) {
   2804         ASSERT(!number_case.is_unused());
   2805         __ bind(&number_case);
   2806       }
   2807       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
   2808     }
   2809   }
   2810   ASSERT(number_of_handled_maps != 0);
   2811 
   2812   __ bind(&miss);
   2813   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2814 
   2815   // Return the generated code.
   2816   InlineCacheState state =
   2817       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   2818   return GetICCode(kind(), type, name, state);
   2819 }
   2820 
   2821 
   2822 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
   2823     MapHandleList* receiver_maps,
   2824     CodeHandleList* handler_stubs,
   2825     MapHandleList* transitioned_maps) {
   2826   Label miss;
   2827   __ JumpIfSmi(receiver(), &miss);
   2828 
   2829   int receiver_count = receiver_maps->length();
   2830   __ ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
   2831   for (int i = 0; i < receiver_count; ++i) {
   2832     __ mov(ip, Operand(receiver_maps->at(i)));
   2833     __ cmp(scratch1(), ip);
   2834     if (transitioned_maps->at(i).is_null()) {
   2835       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
   2836     } else {
   2837       Label next_map;
   2838       __ b(ne, &next_map);
   2839       __ mov(transition_map(), Operand(transitioned_maps->at(i)));
   2840       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
   2841       __ bind(&next_map);
   2842     }
   2843   }
   2844 
   2845   __ bind(&miss);
   2846   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2847 
   2848   // Return the generated code.
   2849   return GetICCode(
   2850       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
   2851 }
   2852 
   2853 
   2854 #undef __
   2855 #define __ ACCESS_MASM(masm)
   2856 
   2857 
   2858 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
   2859     MacroAssembler* masm) {
   2860   // ---------- S t a t e --------------
   2861   //  -- lr     : return address
   2862   //  -- r0     : key
   2863   //  -- r1     : receiver
   2864   // -----------------------------------
   2865   Label slow, miss;
   2866 
   2867   Register key = r0;
   2868   Register receiver = r1;
   2869 
   2870   __ UntagAndJumpIfNotSmi(r2, key, &miss);
   2871   __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
   2872   __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
   2873   __ Ret();
   2874 
   2875   __ bind(&slow);
   2876   __ IncrementCounter(
   2877       masm->isolate()->counters()->keyed_load_external_array_slow(),
   2878       1, r2, r3);
   2879 
   2880   // ---------- S t a t e --------------
   2881   //  -- lr     : return address
   2882   //  -- r0     : key
   2883   //  -- r1     : receiver
   2884   // -----------------------------------
   2885   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
   2886 
   2887   // Miss case, call the runtime.
   2888   __ bind(&miss);
   2889 
   2890   // ---------- S t a t e --------------
   2891   //  -- lr     : return address
   2892   //  -- r0     : key
   2893   //  -- r1     : receiver
   2894   // -----------------------------------
   2895   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
   2896 }
   2897 
   2898 
   2899 #undef __
   2900 
   2901 } }  // namespace v8::internal
   2902 
   2903 #endif  // V8_TARGET_ARCH_ARM
   2904