Home | History | Annotate | Download | only in arm
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_ARM)
     31 
     32 #include "ic-inl.h"
     33 #include "codegen.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 #define __ ACCESS_MASM(masm)
     40 
     41 
     42 static void ProbeTable(Isolate* isolate,
     43                        MacroAssembler* masm,
     44                        Code::Flags flags,
     45                        StubCache::Table table,
     46                        Register name,
     47                        Register offset,
     48                        Register scratch,
     49                        Register scratch2) {
     50   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     51   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     52 
     53   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
     54   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
     55 
     56   // Check the relative positions of the address fields.
     57   ASSERT(value_off_addr > key_off_addr);
     58   ASSERT((value_off_addr - key_off_addr) % 4 == 0);
     59   ASSERT((value_off_addr - key_off_addr) < (256 * 4));
     60 
     61   Label miss;
     62   Register offsets_base_addr = scratch;
     63 
     64   // Check that the key in the entry matches the name.
     65   __ mov(offsets_base_addr, Operand(key_offset));
     66   __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
     67   __ cmp(name, ip);
     68   __ b(ne, &miss);
     69 
     70   // Get the code entry from the cache.
     71   __ add(offsets_base_addr, offsets_base_addr,
     72          Operand(value_off_addr - key_off_addr));
     73   __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
     74 
     75   // Check that the flags match what we're looking for.
     76   __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
     77   __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
     78   __ cmp(scratch2, Operand(flags));
     79   __ b(ne, &miss);
     80 
     81   // Re-load code entry from cache.
     82   __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
     83 
     84   // Jump to the first instruction in the code stub.
     85   __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
     86   __ Jump(offset);
     87 
     88   // Miss: fall through.
     89   __ bind(&miss);
     90 }
     91 
     92 
     93 // Helper function used to check that the dictionary doesn't contain
     94 // the property. This function may return false negatives, so miss_label
     95 // must always call a backup property check that is complete.
     96 // This function is safe to call if the receiver has fast properties.
     97 // Name must be a symbol and receiver must be a heap object.
     98 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
     99                                              Label* miss_label,
    100                                              Register receiver,
    101                                              String* name,
    102                                              Register scratch0,
    103                                              Register scratch1) {
    104   ASSERT(name->IsSymbol());
    105   Counters* counters = masm->isolate()->counters();
    106   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
    107   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    108 
    109   Label done;
    110 
    111   const int kInterceptorOrAccessCheckNeededMask =
    112       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    113 
    114   // Bail out if the receiver has a named interceptor or requires access checks.
    115   Register map = scratch1;
    116   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
    117   __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
    118   __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
    119   __ b(ne, miss_label);
    120 
    121   // Check that receiver is a JSObject.
    122   __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
    123   __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
    124   __ b(lt, miss_label);
    125 
    126   // Load properties array.
    127   Register properties = scratch0;
    128   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    129   // Check that the properties array is a dictionary.
    130   __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
    131   Register tmp = properties;
    132   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
    133   __ cmp(map, tmp);
    134   __ b(ne, miss_label);
    135 
    136   // Restore the temporarily used register.
    137   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    138 
    139   // Compute the capacity mask.
    140   const int kCapacityOffset =
    141       StringDictionary::kHeaderSize +
    142       StringDictionary::kCapacityIndex * kPointerSize;
    143 
    144   // Generate an unrolled loop that performs a few probes before
    145   // giving up.
    146   static const int kProbes = 4;
    147   const int kElementsStartOffset =
    148       StringDictionary::kHeaderSize +
    149       StringDictionary::kElementsStartIndex * kPointerSize;
    150 
    151   // If names of slots in range from 1 to kProbes - 1 for the hash value are
    152   // not equal to the name and kProbes-th slot is not used (its name is the
    153   // undefined value), it guarantees the hash table doesn't contain the
    154   // property. It's true even if some slots represent deleted properties
    155   // (their names are the null value).
    156   for (int i = 0; i < kProbes; i++) {
    157     // scratch0 points to properties hash.
    158     // Compute the masked index: (hash + i + i * i) & mask.
    159     Register index = scratch1;
    160     // Capacity is smi 2^n.
    161     __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
    162     __ sub(index, index, Operand(1));
    163     __ and_(index, index, Operand(
    164         Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
    165 
    166     // Scale the index by multiplying by the entry size.
    167     ASSERT(StringDictionary::kEntrySize == 3);
    168     __ add(index, index, Operand(index, LSL, 1));  // index *= 3.
    169 
    170     Register entity_name = scratch1;
    171     // Having undefined at this place means the name is not contained.
    172     ASSERT_EQ(kSmiTagSize, 1);
    173     Register tmp = properties;
    174     __ add(tmp, properties, Operand(index, LSL, 1));
    175     __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
    176 
    177     ASSERT(!tmp.is(entity_name));
    178     __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
    179     __ cmp(entity_name, tmp);
    180     if (i != kProbes - 1) {
    181       __ b(eq, &done);
    182 
    183       // Stop if found the property.
    184       __ cmp(entity_name, Operand(Handle<String>(name)));
    185       __ b(eq, miss_label);
    186 
    187       // Check if the entry name is not a symbol.
    188       __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
    189       __ ldrb(entity_name,
    190               FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
    191       __ tst(entity_name, Operand(kIsSymbolMask));
    192       __ b(eq, miss_label);
    193 
    194       // Restore the properties.
    195       __ ldr(properties,
    196              FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    197     } else {
    198       // Give up probing if still not found the undefined value.
    199       __ b(ne, miss_label);
    200     }
    201   }
    202   __ bind(&done);
    203   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    204 }
    205 
    206 
    207 void StubCache::GenerateProbe(MacroAssembler* masm,
    208                               Code::Flags flags,
    209                               Register receiver,
    210                               Register name,
    211                               Register scratch,
    212                               Register extra,
    213                               Register extra2) {
    214   Isolate* isolate = masm->isolate();
    215   Label miss;
    216 
    217   // Make sure that code is valid. The shifting code relies on the
    218   // entry size being 8.
    219   ASSERT(sizeof(Entry) == 8);
    220 
    221   // Make sure the flags does not name a specific type.
    222   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    223 
    224   // Make sure that there are no register conflicts.
    225   ASSERT(!scratch.is(receiver));
    226   ASSERT(!scratch.is(name));
    227   ASSERT(!extra.is(receiver));
    228   ASSERT(!extra.is(name));
    229   ASSERT(!extra.is(scratch));
    230   ASSERT(!extra2.is(receiver));
    231   ASSERT(!extra2.is(name));
    232   ASSERT(!extra2.is(scratch));
    233   ASSERT(!extra2.is(extra));
    234 
    235   // Check scratch, extra and extra2 registers are valid.
    236   ASSERT(!scratch.is(no_reg));
    237   ASSERT(!extra.is(no_reg));
    238   ASSERT(!extra2.is(no_reg));
    239 
    240   // Check that the receiver isn't a smi.
    241   __ tst(receiver, Operand(kSmiTagMask));
    242   __ b(eq, &miss);
    243 
    244   // Get the map of the receiver and compute the hash.
    245   __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
    246   __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
    247   __ add(scratch, scratch, Operand(ip));
    248   __ eor(scratch, scratch, Operand(flags));
    249   __ and_(scratch,
    250           scratch,
    251           Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
    252 
    253   // Probe the primary table.
    254   ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
    255 
    256   // Primary miss: Compute hash for secondary probe.
    257   __ sub(scratch, scratch, Operand(name));
    258   __ add(scratch, scratch, Operand(flags));
    259   __ and_(scratch,
    260           scratch,
    261           Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
    262 
    263   // Probe the secondary table.
    264   ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
    265 
    266   // Cache miss: Fall-through and let caller handle the miss by
    267   // entering the runtime system.
    268   __ bind(&miss);
    269 }
    270 
    271 
    272 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    273                                                        int index,
    274                                                        Register prototype) {
    275   // Load the global or builtins object from the current context.
    276   __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
    277   // Load the global context from the global or builtins object.
    278   __ ldr(prototype,
    279          FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
    280   // Load the function from the global context.
    281   __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
    282   // Load the initial map.  The global functions all have initial maps.
    283   __ ldr(prototype,
    284          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
    285   // Load the prototype from the initial map.
    286   __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    287 }
    288 
    289 
    290 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    291     MacroAssembler* masm, int index, Register prototype, Label* miss) {
    292   Isolate* isolate = masm->isolate();
    293   // Check we're still in the same context.
    294   __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
    295   __ Move(ip, isolate->global());
    296   __ cmp(prototype, ip);
    297   __ b(ne, miss);
    298   // Get the global function with the given index.
    299   JSFunction* function =
    300       JSFunction::cast(isolate->global_context()->get(index));
    301   // Load its initial map. The global functions all have initial maps.
    302   __ Move(prototype, Handle<Map>(function->initial_map()));
    303   // Load the prototype from the initial map.
    304   __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    305 }
    306 
    307 
    308 // Load a fast property out of a holder object (src). In-object properties
    309 // are loaded directly otherwise the property is loaded from the properties
    310 // fixed array.
    311 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    312                                             Register dst, Register src,
    313                                             JSObject* holder, int index) {
    314   // Adjust for the number of properties stored in the holder.
    315   index -= holder->map()->inobject_properties();
    316   if (index < 0) {
    317     // Get the property straight out of the holder.
    318     int offset = holder->map()->instance_size() + (index * kPointerSize);
    319     __ ldr(dst, FieldMemOperand(src, offset));
    320   } else {
    321     // Calculate the offset into the properties array.
    322     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    323     __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
    324     __ ldr(dst, FieldMemOperand(dst, offset));
    325   }
    326 }
    327 
    328 
    329 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    330                                            Register receiver,
    331                                            Register scratch,
    332                                            Label* miss_label) {
    333   // Check that the receiver isn't a smi.
    334   __ tst(receiver, Operand(kSmiTagMask));
    335   __ b(eq, miss_label);
    336 
    337   // Check that the object is a JS array.
    338   __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
    339   __ b(ne, miss_label);
    340 
    341   // Load length directly from the JS array.
    342   __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
    343   __ Ret();
    344 }
    345 
    346 
    347 // Generate code to check if an object is a string.  If the object is a
    348 // heap object, its map's instance type is left in the scratch1 register.
    349 // If this is not needed, scratch1 and scratch2 may be the same register.
    350 static void GenerateStringCheck(MacroAssembler* masm,
    351                                 Register receiver,
    352                                 Register scratch1,
    353                                 Register scratch2,
    354                                 Label* smi,
    355                                 Label* non_string_object) {
    356   // Check that the receiver isn't a smi.
    357   __ tst(receiver, Operand(kSmiTagMask));
    358   __ b(eq, smi);
    359 
    360   // Check that the object is a string.
    361   __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
    362   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
    363   __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
    364   // The cast is to resolve the overload for the argument of 0x0.
    365   __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
    366   __ b(ne, non_string_object);
    367 }
    368 
    369 
    370 // Generate code to load the length from a string object and return the length.
    371 // If the receiver object is not a string or a wrapped string object the
    372 // execution continues at the miss label. The register containing the
    373 // receiver is potentially clobbered.
    374 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
    375                                             Register receiver,
    376                                             Register scratch1,
    377                                             Register scratch2,
    378                                             Label* miss,
    379                                             bool support_wrappers) {
    380   Label check_wrapper;
    381 
    382   // Check if the object is a string leaving the instance type in the
    383   // scratch1 register.
    384   GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
    385                       support_wrappers ? &check_wrapper : miss);
    386 
    387   // Load length directly from the string.
    388   __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
    389   __ Ret();
    390 
    391   if (support_wrappers) {
    392     // Check if the object is a JSValue wrapper.
    393     __ bind(&check_wrapper);
    394     __ cmp(scratch1, Operand(JS_VALUE_TYPE));
    395     __ b(ne, miss);
    396 
    397     // Unwrap the value and check if the wrapped value is a string.
    398     __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
    399     GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
    400     __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
    401     __ Ret();
    402   }
    403 }
    404 
    405 
    406 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    407                                                  Register receiver,
    408                                                  Register scratch1,
    409                                                  Register scratch2,
    410                                                  Label* miss_label) {
    411   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    412   __ mov(r0, scratch1);
    413   __ Ret();
    414 }
    415 
    416 
    417 // Generate StoreField code, value is passed in r0 register.
    418 // When leaving generated code after success, the receiver_reg and name_reg
    419 // may be clobbered.  Upon branch to miss_label, the receiver and name
    420 // registers have their original values.
    421 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
    422                                       JSObject* object,
    423                                       int index,
    424                                       Map* transition,
    425                                       Register receiver_reg,
    426                                       Register name_reg,
    427                                       Register scratch,
    428                                       Label* miss_label) {
    429   // r0 : value
    430   Label exit;
    431 
    432   // Check that the receiver isn't a smi.
    433   __ tst(receiver_reg, Operand(kSmiTagMask));
    434   __ b(eq, miss_label);
    435 
    436   // Check that the map of the receiver hasn't changed.
    437   __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
    438   __ cmp(scratch, Operand(Handle<Map>(object->map())));
    439   __ b(ne, miss_label);
    440 
    441   // Perform global security token check if needed.
    442   if (object->IsJSGlobalProxy()) {
    443     __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
    444   }
    445 
    446   // Stub never generated for non-global objects that require access
    447   // checks.
    448   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    449 
    450   // Perform map transition for the receiver if necessary.
    451   if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
    452     // The properties must be extended before we can store the value.
    453     // We jump to a runtime call that extends the properties array.
    454     __ push(receiver_reg);
    455     __ mov(r2, Operand(Handle<Map>(transition)));
    456     __ Push(r2, r0);
    457     __ TailCallExternalReference(
    458         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    459                           masm->isolate()),
    460         3,
    461         1);
    462     return;
    463   }
    464 
    465   if (transition != NULL) {
    466     // Update the map of the object; no write barrier updating is
    467     // needed because the map is never in new space.
    468     __ mov(ip, Operand(Handle<Map>(transition)));
    469     __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
    470   }
    471 
    472   // Adjust for the number of properties stored in the object. Even in the
    473   // face of a transition we can use the old map here because the size of the
    474   // object and the number of in-object properties is not going to change.
    475   index -= object->map()->inobject_properties();
    476 
    477   if (index < 0) {
    478     // Set the property straight into the object.
    479     int offset = object->map()->instance_size() + (index * kPointerSize);
    480     __ str(r0, FieldMemOperand(receiver_reg, offset));
    481 
    482     // Skip updating write barrier if storing a smi.
    483     __ tst(r0, Operand(kSmiTagMask));
    484     __ b(eq, &exit);
    485 
    486     // Update the write barrier for the array address.
    487     // Pass the now unused name_reg as a scratch register.
    488     __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
    489   } else {
    490     // Write to the properties array.
    491     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    492     // Get the properties array
    493     __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    494     __ str(r0, FieldMemOperand(scratch, offset));
    495 
    496     // Skip updating write barrier if storing a smi.
    497     __ tst(r0, Operand(kSmiTagMask));
    498     __ b(eq, &exit);
    499 
    500     // Update the write barrier for the array address.
    501     // Ok to clobber receiver_reg and name_reg, since we return.
    502     __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
    503   }
    504 
    505   // Return the value (register r0).
    506   __ bind(&exit);
    507   __ Ret();
    508 }
    509 
    510 
    511 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
    512   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
    513   Code* code = NULL;
    514   if (kind == Code::LOAD_IC) {
    515     code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
    516   } else {
    517     code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
    518   }
    519 
    520   Handle<Code> ic(code);
    521   __ Jump(ic, RelocInfo::CODE_TARGET);
    522 }
    523 
    524 
    525 static void GenerateCallFunction(MacroAssembler* masm,
    526                                  Object* object,
    527                                  const ParameterCount& arguments,
    528                                  Label* miss) {
    529   // ----------- S t a t e -------------
    530   //  -- r0: receiver
    531   //  -- r1: function to call
    532   // -----------------------------------
    533 
    534   // Check that the function really is a function.
    535   __ JumpIfSmi(r1, miss);
    536   __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
    537   __ b(ne, miss);
    538 
    539   // Patch the receiver on the stack with the global proxy if
    540   // necessary.
    541   if (object->IsGlobalObject()) {
    542     __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
    543     __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
    544   }
    545 
    546   // Invoke the function.
    547   __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
    548 }
    549 
    550 
    551 static void PushInterceptorArguments(MacroAssembler* masm,
    552                                      Register receiver,
    553                                      Register holder,
    554                                      Register name,
    555                                      JSObject* holder_obj) {
    556   __ push(name);
    557   InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
    558   ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
    559   Register scratch = name;
    560   __ mov(scratch, Operand(Handle<Object>(interceptor)));
    561   __ push(scratch);
    562   __ push(receiver);
    563   __ push(holder);
    564   __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
    565   __ push(scratch);
    566 }
    567 
    568 
    569 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
    570                                                    Register receiver,
    571                                                    Register holder,
    572                                                    Register name,
    573                                                    JSObject* holder_obj) {
    574   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    575 
    576   ExternalReference ref =
    577       ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
    578                         masm->isolate());
    579   __ mov(r0, Operand(5));
    580   __ mov(r1, Operand(ref));
    581 
    582   CEntryStub stub(1);
    583   __ CallStub(&stub);
    584 }
    585 
    586 static const int kFastApiCallArguments = 3;
    587 
    588 // Reserves space for the extra arguments to FastHandleApiCall in the
    589 // caller's frame.
    590 //
    591 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
    592 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
    593                                        Register scratch) {
    594   __ mov(scratch, Operand(Smi::FromInt(0)));
    595   for (int i = 0; i < kFastApiCallArguments; i++) {
    596     __ push(scratch);
    597   }
    598 }
    599 
    600 
    601 // Undoes the effects of ReserveSpaceForFastApiCall.
    602 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
    603   __ Drop(kFastApiCallArguments);
    604 }
    605 
    606 
    607 static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
    608                                       const CallOptimization& optimization,
    609                                       int argc) {
    610   // ----------- S t a t e -------------
    611   //  -- sp[0]              : holder (set by CheckPrototypes)
    612   //  -- sp[4]              : callee js function
    613   //  -- sp[8]              : call data
    614   //  -- sp[12]             : last js argument
    615   //  -- ...
    616   //  -- sp[(argc + 3) * 4] : first js argument
    617   //  -- sp[(argc + 4) * 4] : receiver
    618   // -----------------------------------
    619   // Get the function and setup the context.
    620   JSFunction* function = optimization.constant_function();
    621   __ mov(r5, Operand(Handle<JSFunction>(function)));
    622   __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
    623 
    624   // Pass the additional arguments FastHandleApiCall expects.
    625   Object* call_data = optimization.api_call_info()->data();
    626   Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
    627   if (masm->isolate()->heap()->InNewSpace(call_data)) {
    628     __ Move(r0, api_call_info_handle);
    629     __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
    630   } else {
    631     __ Move(r6, Handle<Object>(call_data));
    632   }
    633   // Store js function and call data.
    634   __ stm(ib, sp, r5.bit() | r6.bit());
    635 
    636   // r2 points to call data as expected by Arguments
    637   // (refer to layout above).
    638   __ add(r2, sp, Operand(2 * kPointerSize));
    639 
    640   Object* callback = optimization.api_call_info()->callback();
    641   Address api_function_address = v8::ToCData<Address>(callback);
    642   ApiFunction fun(api_function_address);
    643 
    644   const int kApiStackSpace = 4;
    645   __ EnterExitFrame(false, kApiStackSpace);
    646 
    647   // r0 = v8::Arguments&
    648   // Arguments is after the return address.
    649   __ add(r0, sp, Operand(1 * kPointerSize));
    650   // v8::Arguments::implicit_args = data
    651   __ str(r2, MemOperand(r0, 0 * kPointerSize));
    652   // v8::Arguments::values = last argument
    653   __ add(ip, r2, Operand(argc * kPointerSize));
    654   __ str(ip, MemOperand(r0, 1 * kPointerSize));
    655   // v8::Arguments::length_ = argc
    656   __ mov(ip, Operand(argc));
    657   __ str(ip, MemOperand(r0, 2 * kPointerSize));
    658   // v8::Arguments::is_construct_call = 0
    659   __ mov(ip, Operand(0));
    660   __ str(ip, MemOperand(r0, 3 * kPointerSize));
    661 
    662   // Emitting a stub call may try to allocate (if the code is not
    663   // already generated). Do not allow the assembler to perform a
    664   // garbage collection but instead return the allocation failure
    665   // object.
    666   const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
    667   ExternalReference ref = ExternalReference(&fun,
    668                                             ExternalReference::DIRECT_API_CALL,
    669                                             masm->isolate());
    670   return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
    671 }
    672 
    673 class CallInterceptorCompiler BASE_EMBEDDED {
    674  public:
    675   CallInterceptorCompiler(StubCompiler* stub_compiler,
    676                           const ParameterCount& arguments,
    677                           Register name)
    678       : stub_compiler_(stub_compiler),
    679         arguments_(arguments),
    680         name_(name) {}
    681 
    682   MaybeObject* Compile(MacroAssembler* masm,
    683                        JSObject* object,
    684                        JSObject* holder,
    685                        String* name,
    686                        LookupResult* lookup,
    687                        Register receiver,
    688                        Register scratch1,
    689                        Register scratch2,
    690                        Register scratch3,
    691                        Label* miss) {
    692     ASSERT(holder->HasNamedInterceptor());
    693     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
    694 
    695     // Check that the receiver isn't a smi.
    696     __ JumpIfSmi(receiver, miss);
    697 
    698     CallOptimization optimization(lookup);
    699 
    700     if (optimization.is_constant_call()) {
    701       return CompileCacheable(masm,
    702                               object,
    703                               receiver,
    704                               scratch1,
    705                               scratch2,
    706                               scratch3,
    707                               holder,
    708                               lookup,
    709                               name,
    710                               optimization,
    711                               miss);
    712     } else {
    713       CompileRegular(masm,
    714                      object,
    715                      receiver,
    716                      scratch1,
    717                      scratch2,
    718                      scratch3,
    719                      name,
    720                      holder,
    721                      miss);
    722       return masm->isolate()->heap()->undefined_value();
    723     }
    724   }
    725 
    726  private:
    727   MaybeObject* CompileCacheable(MacroAssembler* masm,
    728                                 JSObject* object,
    729                                 Register receiver,
    730                                 Register scratch1,
    731                                 Register scratch2,
    732                                 Register scratch3,
    733                                 JSObject* interceptor_holder,
    734                                 LookupResult* lookup,
    735                                 String* name,
    736                                 const CallOptimization& optimization,
    737                                 Label* miss_label) {
    738     ASSERT(optimization.is_constant_call());
    739     ASSERT(!lookup->holder()->IsGlobalObject());
    740 
    741     Counters* counters = masm->isolate()->counters();
    742 
    743     int depth1 = kInvalidProtoDepth;
    744     int depth2 = kInvalidProtoDepth;
    745     bool can_do_fast_api_call = false;
    746     if (optimization.is_simple_api_call() &&
    747        !lookup->holder()->IsGlobalObject()) {
    748      depth1 =
    749          optimization.GetPrototypeDepthOfExpectedType(object,
    750                                                       interceptor_holder);
    751      if (depth1 == kInvalidProtoDepth) {
    752        depth2 =
    753            optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
    754                                                         lookup->holder());
    755      }
    756      can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
    757                             (depth2 != kInvalidProtoDepth);
    758     }
    759 
    760     __ IncrementCounter(counters->call_const_interceptor(), 1,
    761                       scratch1, scratch2);
    762 
    763     if (can_do_fast_api_call) {
    764       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
    765                           scratch1, scratch2);
    766       ReserveSpaceForFastApiCall(masm, scratch1);
    767     }
    768 
    769     // Check that the maps from receiver to interceptor's holder
    770     // haven't changed and thus we can invoke interceptor.
    771     Label miss_cleanup;
    772     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
    773     Register holder =
    774         stub_compiler_->CheckPrototypes(object, receiver,
    775                                         interceptor_holder, scratch1,
    776                                         scratch2, scratch3, name, depth1, miss);
    777 
    778     // Invoke an interceptor and if it provides a value,
    779     // branch to |regular_invoke|.
    780     Label regular_invoke;
    781     LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
    782                         &regular_invoke);
    783 
    784     // Interceptor returned nothing for this property.  Try to use cached
    785     // constant function.
    786 
    787     // Check that the maps from interceptor's holder to constant function's
    788     // holder haven't changed and thus we can use cached constant function.
    789     if (interceptor_holder != lookup->holder()) {
    790       stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
    791                                       lookup->holder(), scratch1,
    792                                       scratch2, scratch3, name, depth2, miss);
    793     } else {
    794       // CheckPrototypes has a side effect of fetching a 'holder'
    795       // for API (object which is instanceof for the signature).  It's
    796       // safe to omit it here, as if present, it should be fetched
    797       // by the previous CheckPrototypes.
    798       ASSERT(depth2 == kInvalidProtoDepth);
    799     }
    800 
    801     // Invoke function.
    802     if (can_do_fast_api_call) {
    803       MaybeObject* result = GenerateFastApiDirectCall(masm,
    804                                                       optimization,
    805                                                       arguments_.immediate());
    806       if (result->IsFailure()) return result;
    807     } else {
    808       __ InvokeFunction(optimization.constant_function(), arguments_,
    809                         JUMP_FUNCTION);
    810     }
    811 
    812     // Deferred code for fast API call case---clean preallocated space.
    813     if (can_do_fast_api_call) {
    814       __ bind(&miss_cleanup);
    815       FreeSpaceForFastApiCall(masm);
    816       __ b(miss_label);
    817     }
    818 
    819     // Invoke a regular function.
    820     __ bind(&regular_invoke);
    821     if (can_do_fast_api_call) {
    822       FreeSpaceForFastApiCall(masm);
    823     }
    824 
    825     return masm->isolate()->heap()->undefined_value();
    826   }
    827 
    828   void CompileRegular(MacroAssembler* masm,
    829                       JSObject* object,
    830                       Register receiver,
    831                       Register scratch1,
    832                       Register scratch2,
    833                       Register scratch3,
    834                       String* name,
    835                       JSObject* interceptor_holder,
    836                       Label* miss_label) {
    837     Register holder =
    838         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
    839                                         scratch1, scratch2, scratch3, name,
    840                                         miss_label);
    841 
    842     // Call a runtime function to load the interceptor property.
    843     __ EnterInternalFrame();
    844     // Save the name_ register across the call.
    845     __ push(name_);
    846 
    847     PushInterceptorArguments(masm,
    848                              receiver,
    849                              holder,
    850                              name_,
    851                              interceptor_holder);
    852 
    853     __ CallExternalReference(
    854         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
    855                           masm->isolate()),
    856         5);
    857 
    858     // Restore the name_ register.
    859     __ pop(name_);
    860     __ LeaveInternalFrame();
    861   }
    862 
    863   void LoadWithInterceptor(MacroAssembler* masm,
    864                            Register receiver,
    865                            Register holder,
    866                            JSObject* holder_obj,
    867                            Register scratch,
    868                            Label* interceptor_succeeded) {
    869     __ EnterInternalFrame();
    870     __ Push(holder, name_);
    871 
    872     CompileCallLoadPropertyWithInterceptor(masm,
    873                                            receiver,
    874                                            holder,
    875                                            name_,
    876                                            holder_obj);
    877 
    878     __ pop(name_);  // Restore the name.
    879     __ pop(receiver);  // Restore the holder.
    880     __ LeaveInternalFrame();
    881 
    882     // If interceptor returns no-result sentinel, call the constant function.
    883     __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
    884     __ cmp(r0, scratch);
    885     __ b(ne, interceptor_succeeded);
    886   }
    887 
    888   StubCompiler* stub_compiler_;
    889   const ParameterCount& arguments_;
    890   Register name_;
    891 };
    892 
    893 
    894 // Generate code to check that a global property cell is empty. Create
    895 // the property cell at compilation time if no cell exists for the
    896 // property.
    897 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
    898     MacroAssembler* masm,
    899     GlobalObject* global,
    900     String* name,
    901     Register scratch,
    902     Label* miss) {
    903   Object* probe;
    904   { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
    905     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
    906   }
    907   JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
    908   ASSERT(cell->value()->IsTheHole());
    909   __ mov(scratch, Operand(Handle<Object>(cell)));
    910   __ ldr(scratch,
    911          FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
    912   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    913   __ cmp(scratch, ip);
    914   __ b(ne, miss);
    915   return cell;
    916 }
    917 
    918 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
    919 // from object to (but not including) holder.
    920 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
    921     MacroAssembler* masm,
    922     JSObject* object,
    923     JSObject* holder,
    924     String* name,
    925     Register scratch,
    926     Label* miss) {
    927   JSObject* current = object;
    928   while (current != holder) {
    929     if (current->IsGlobalObject()) {
    930       // Returns a cell or a failure.
    931       MaybeObject* result = GenerateCheckPropertyCell(
    932           masm,
    933           GlobalObject::cast(current),
    934           name,
    935           scratch,
    936           miss);
    937       if (result->IsFailure()) return result;
    938     }
    939     ASSERT(current->IsJSObject());
    940     current = JSObject::cast(current->GetPrototype());
    941   }
    942   return NULL;
    943 }
    944 
    945 
    946 // Convert and store int passed in register ival to IEEE 754 single precision
    947 // floating point value at memory location (dst + 4 * wordoffset)
    948 // If VFP3 is available use it for conversion.
    949 static void StoreIntAsFloat(MacroAssembler* masm,
    950                             Register dst,
    951                             Register wordoffset,
    952                             Register ival,
    953                             Register fval,
    954                             Register scratch1,
    955                             Register scratch2) {
    956   if (CpuFeatures::IsSupported(VFP3)) {
    957     CpuFeatures::Scope scope(VFP3);
    958     __ vmov(s0, ival);
    959     __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
    960     __ vcvt_f32_s32(s0, s0);
    961     __ vstr(s0, scratch1, 0);
    962   } else {
    963     Label not_special, done;
    964     // Move sign bit from source to destination.  This works because the sign
    965     // bit in the exponent word of the double has the same position and polarity
    966     // as the 2's complement sign bit in a Smi.
    967     ASSERT(kBinary32SignMask == 0x80000000u);
    968 
    969     __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
    970     // Negate value if it is negative.
    971     __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
    972 
    973     // We have -1, 0 or 1, which we treat specially. Register ival contains
    974     // absolute value: it is either equal to 1 (special case of -1 and 1),
    975     // greater than 1 (not a special case) or less than 1 (special case of 0).
    976     __ cmp(ival, Operand(1));
    977     __ b(gt, &not_special);
    978 
    979     // For 1 or -1 we need to or in the 0 exponent (biased).
    980     static const uint32_t exponent_word_for_1 =
    981         kBinary32ExponentBias << kBinary32ExponentShift;
    982 
    983     __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
    984     __ b(&done);
    985 
    986     __ bind(&not_special);
    987     // Count leading zeros.
    988     // Gets the wrong answer for 0, but we already checked for that case above.
    989     Register zeros = scratch2;
    990     __ CountLeadingZeros(zeros, ival, scratch1);
    991 
    992     // Compute exponent and or it into the exponent register.
    993     __ rsb(scratch1,
    994            zeros,
    995            Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
    996 
    997     __ orr(fval,
    998            fval,
    999            Operand(scratch1, LSL, kBinary32ExponentShift));
   1000 
   1001     // Shift up the source chopping the top bit off.
   1002     __ add(zeros, zeros, Operand(1));
   1003     // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
   1004     __ mov(ival, Operand(ival, LSL, zeros));
   1005     // And the top (top 20 bits).
   1006     __ orr(fval,
   1007            fval,
   1008            Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
   1009 
   1010     __ bind(&done);
   1011     __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
   1012   }
   1013 }
   1014 
   1015 
   1016 // Convert unsigned integer with specified number of leading zeroes in binary
   1017 // representation to IEEE 754 double.
   1018 // Integer to convert is passed in register hiword.
   1019 // Resulting double is returned in registers hiword:loword.
   1020 // This functions does not work correctly for 0.
   1021 static void GenerateUInt2Double(MacroAssembler* masm,
   1022                                 Register hiword,
   1023                                 Register loword,
   1024                                 Register scratch,
   1025                                 int leading_zeroes) {
   1026   const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
   1027   const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
   1028 
   1029   const int mantissa_shift_for_hi_word =
   1030       meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
   1031 
   1032   const int mantissa_shift_for_lo_word =
   1033       kBitsPerInt - mantissa_shift_for_hi_word;
   1034 
   1035   __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
   1036   if (mantissa_shift_for_hi_word > 0) {
   1037     __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
   1038     __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
   1039   } else {
   1040     __ mov(loword, Operand(0, RelocInfo::NONE));
   1041     __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
   1042   }
   1043 
   1044   // If least significant bit of biased exponent was not 1 it was corrupted
   1045   // by most significant bit of mantissa so we should fix that.
   1046   if (!(biased_exponent & 1)) {
   1047     __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
   1048   }
   1049 }
   1050 
   1051 
   1052 #undef __
   1053 #define __ ACCESS_MASM(masm())
   1054 
   1055 
   1056 Register StubCompiler::CheckPrototypes(JSObject* object,
   1057                                        Register object_reg,
   1058                                        JSObject* holder,
   1059                                        Register holder_reg,
   1060                                        Register scratch1,
   1061                                        Register scratch2,
   1062                                        String* name,
   1063                                        int save_at_depth,
   1064                                        Label* miss) {
   1065   // Make sure there's no overlap between holder and object registers.
   1066   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
   1067   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
   1068          && !scratch2.is(scratch1));
   1069 
   1070   // Keep track of the current object in register reg.
   1071   Register reg = object_reg;
   1072   int depth = 0;
   1073 
   1074   if (save_at_depth == depth) {
   1075     __ str(reg, MemOperand(sp));
   1076   }
   1077 
   1078   // Check the maps in the prototype chain.
   1079   // Traverse the prototype chain from the object and do map checks.
   1080   JSObject* current = object;
   1081   while (current != holder) {
   1082     depth++;
   1083 
   1084     // Only global objects and objects that do not require access
   1085     // checks are allowed in stubs.
   1086     ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
   1087 
   1088     ASSERT(current->GetPrototype()->IsJSObject());
   1089     JSObject* prototype = JSObject::cast(current->GetPrototype());
   1090     if (!current->HasFastProperties() &&
   1091         !current->IsJSGlobalObject() &&
   1092         !current->IsJSGlobalProxy()) {
   1093       if (!name->IsSymbol()) {
   1094         MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
   1095         Object* lookup_result = NULL;  // Initialization to please compiler.
   1096         if (!maybe_lookup_result->ToObject(&lookup_result)) {
   1097           set_failure(Failure::cast(maybe_lookup_result));
   1098           return reg;
   1099         }
   1100         name = String::cast(lookup_result);
   1101       }
   1102       ASSERT(current->property_dictionary()->FindEntry(name) ==
   1103              StringDictionary::kNotFound);
   1104 
   1105       GenerateDictionaryNegativeLookup(masm(),
   1106                                        miss,
   1107                                        reg,
   1108                                        name,
   1109                                        scratch1,
   1110                                        scratch2);
   1111       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1112       reg = holder_reg;  // from now the object is in holder_reg
   1113       __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
   1114     } else if (heap()->InNewSpace(prototype)) {
   1115       // Get the map of the current object.
   1116       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1117       __ cmp(scratch1, Operand(Handle<Map>(current->map())));
   1118 
   1119       // Branch on the result of the map check.
   1120       __ b(ne, miss);
   1121 
   1122       // Check access rights to the global object.  This has to happen
   1123       // after the map check so that we know that the object is
   1124       // actually a global object.
   1125       if (current->IsJSGlobalProxy()) {
   1126         __ CheckAccessGlobalProxy(reg, scratch1, miss);
   1127         // Restore scratch register to be the map of the object.  In the
   1128         // new space case below, we load the prototype from the map in
   1129         // the scratch register.
   1130         __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1131       }
   1132 
   1133       reg = holder_reg;  // from now the object is in holder_reg
   1134       // The prototype is in new space; we cannot store a reference
   1135       // to it in the code. Load it from the map.
   1136       __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
   1137     } else {
   1138       // Check the map of the current object.
   1139       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1140       __ cmp(scratch1, Operand(Handle<Map>(current->map())));
   1141       // Branch on the result of the map check.
   1142       __ b(ne, miss);
   1143       // Check access rights to the global object.  This has to happen
   1144       // after the map check so that we know that the object is
   1145       // actually a global object.
   1146       if (current->IsJSGlobalProxy()) {
   1147         __ CheckAccessGlobalProxy(reg, scratch1, miss);
   1148       }
   1149       // The prototype is in old space; load it directly.
   1150       reg = holder_reg;  // from now the object is in holder_reg
   1151       __ mov(reg, Operand(Handle<JSObject>(prototype)));
   1152     }
   1153 
   1154     if (save_at_depth == depth) {
   1155       __ str(reg, MemOperand(sp));
   1156     }
   1157 
   1158     // Go to the next object in the prototype chain.
   1159     current = prototype;
   1160   }
   1161 
   1162   // Check the holder map.
   1163   __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1164   __ cmp(scratch1, Operand(Handle<Map>(current->map())));
   1165   __ b(ne, miss);
   1166 
   1167   // Log the check depth.
   1168   LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
   1169 
   1170   // Perform security check for access to the global object.
   1171   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   1172   if (holder->IsJSGlobalProxy()) {
   1173     __ CheckAccessGlobalProxy(reg, scratch1, miss);
   1174   };
   1175 
   1176   // If we've skipped any global objects, it's not enough to verify
   1177   // that their maps haven't changed.  We also need to check that the
   1178   // property cell for the property is still empty.
   1179   MaybeObject* result = GenerateCheckPropertyCells(masm(),
   1180                                                    object,
   1181                                                    holder,
   1182                                                    name,
   1183                                                    scratch1,
   1184                                                    miss);
   1185   if (result->IsFailure()) set_failure(Failure::cast(result));
   1186 
   1187   // Return the register containing the holder.
   1188   return reg;
   1189 }
   1190 
   1191 
   1192 void StubCompiler::GenerateLoadField(JSObject* object,
   1193                                      JSObject* holder,
   1194                                      Register receiver,
   1195                                      Register scratch1,
   1196                                      Register scratch2,
   1197                                      Register scratch3,
   1198                                      int index,
   1199                                      String* name,
   1200                                      Label* miss) {
   1201   // Check that the receiver isn't a smi.
   1202   __ tst(receiver, Operand(kSmiTagMask));
   1203   __ b(eq, miss);
   1204 
   1205   // Check that the maps haven't changed.
   1206   Register reg =
   1207       CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
   1208                       name, miss);
   1209   GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
   1210   __ Ret();
   1211 }
   1212 
   1213 
   1214 void StubCompiler::GenerateLoadConstant(JSObject* object,
   1215                                         JSObject* holder,
   1216                                         Register receiver,
   1217                                         Register scratch1,
   1218                                         Register scratch2,
   1219                                         Register scratch3,
   1220                                         Object* value,
   1221                                         String* name,
   1222                                         Label* miss) {
   1223   // Check that the receiver isn't a smi.
   1224   __ tst(receiver, Operand(kSmiTagMask));
   1225   __ b(eq, miss);
   1226 
   1227   // Check that the maps haven't changed.
   1228   Register reg =
   1229       CheckPrototypes(object, receiver, holder,
   1230                       scratch1, scratch2, scratch3, name, miss);
   1231 
   1232   // Return the constant value.
   1233   __ mov(r0, Operand(Handle<Object>(value)));
   1234   __ Ret();
   1235 }
   1236 
   1237 
   1238 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
   1239                                                 JSObject* holder,
   1240                                                 Register receiver,
   1241                                                 Register name_reg,
   1242                                                 Register scratch1,
   1243                                                 Register scratch2,
   1244                                                 Register scratch3,
   1245                                                 AccessorInfo* callback,
   1246                                                 String* name,
   1247                                                 Label* miss) {
   1248   // Check that the receiver isn't a smi.
   1249   __ tst(receiver, Operand(kSmiTagMask));
   1250   __ b(eq, miss);
   1251 
   1252   // Check that the maps haven't changed.
   1253   Register reg =
   1254       CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
   1255                       name, miss);
   1256 
   1257   // Build AccessorInfo::args_ list on the stack and push property name below
   1258   // the exit frame to make GC aware of them and store pointers to them.
   1259   __ push(receiver);
   1260   __ mov(scratch2, sp);  // scratch2 = AccessorInfo::args_
   1261   Handle<AccessorInfo> callback_handle(callback);
   1262   if (heap()->InNewSpace(callback_handle->data())) {
   1263     __ Move(scratch3, callback_handle);
   1264     __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
   1265   } else {
   1266     __ Move(scratch3, Handle<Object>(callback_handle->data()));
   1267   }
   1268   __ Push(reg, scratch3, name_reg);
   1269   __ mov(r0, sp);  // r0 = Handle<String>
   1270 
   1271   Address getter_address = v8::ToCData<Address>(callback->getter());
   1272   ApiFunction fun(getter_address);
   1273 
   1274   const int kApiStackSpace = 1;
   1275   __ EnterExitFrame(false, kApiStackSpace);
   1276   // Create AccessorInfo instance on the stack above the exit frame with
   1277   // scratch2 (internal::Object **args_) as the data.
   1278   __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
   1279   __ add(r1, sp, Operand(1 * kPointerSize));  // r1 = AccessorInfo&
   1280 
   1281   // Emitting a stub call may try to allocate (if the code is not
   1282   // already generated).  Do not allow the assembler to perform a
   1283   // garbage collection but instead return the allocation failure
   1284   // object.
   1285   const int kStackUnwindSpace = 4;
   1286   ExternalReference ref =
   1287       ExternalReference(&fun,
   1288                         ExternalReference::DIRECT_GETTER_CALL,
   1289                         masm()->isolate());
   1290   return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
   1291 }
   1292 
   1293 
   1294 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
   1295                                            JSObject* interceptor_holder,
   1296                                            LookupResult* lookup,
   1297                                            Register receiver,
   1298                                            Register name_reg,
   1299                                            Register scratch1,
   1300                                            Register scratch2,
   1301                                            Register scratch3,
   1302                                            String* name,
   1303                                            Label* miss) {
   1304   ASSERT(interceptor_holder->HasNamedInterceptor());
   1305   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
   1306 
   1307   // Check that the receiver isn't a smi.
   1308   __ JumpIfSmi(receiver, miss);
   1309 
   1310   // So far the most popular follow ups for interceptor loads are FIELD
   1311   // and CALLBACKS, so inline only them, other cases may be added
   1312   // later.
   1313   bool compile_followup_inline = false;
   1314   if (lookup->IsProperty() && lookup->IsCacheable()) {
   1315     if (lookup->type() == FIELD) {
   1316       compile_followup_inline = true;
   1317     } else if (lookup->type() == CALLBACKS &&
   1318         lookup->GetCallbackObject()->IsAccessorInfo() &&
   1319         AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
   1320       compile_followup_inline = true;
   1321     }
   1322   }
   1323 
   1324   if (compile_followup_inline) {
   1325     // Compile the interceptor call, followed by inline code to load the
   1326     // property from further up the prototype chain if the call fails.
   1327     // Check that the maps haven't changed.
   1328     Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
   1329                                           scratch1, scratch2, scratch3,
   1330                                           name, miss);
   1331     ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
   1332 
   1333     // Save necessary data before invoking an interceptor.
   1334     // Requires a frame to make GC aware of pushed pointers.
   1335     __ EnterInternalFrame();
   1336 
   1337     if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
   1338       // CALLBACKS case needs a receiver to be passed into C++ callback.
   1339       __ Push(receiver, holder_reg, name_reg);
   1340     } else {
   1341       __ Push(holder_reg, name_reg);
   1342     }
   1343 
   1344     // Invoke an interceptor.  Note: map checks from receiver to
   1345     // interceptor's holder has been compiled before (see a caller
   1346     // of this method.)
   1347     CompileCallLoadPropertyWithInterceptor(masm(),
   1348                                            receiver,
   1349                                            holder_reg,
   1350                                            name_reg,
   1351                                            interceptor_holder);
   1352 
   1353     // Check if interceptor provided a value for property.  If it's
   1354     // the case, return immediately.
   1355     Label interceptor_failed;
   1356     __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
   1357     __ cmp(r0, scratch1);
   1358     __ b(eq, &interceptor_failed);
   1359     __ LeaveInternalFrame();
   1360     __ Ret();
   1361 
   1362     __ bind(&interceptor_failed);
   1363     __ pop(name_reg);
   1364     __ pop(holder_reg);
   1365     if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
   1366       __ pop(receiver);
   1367     }
   1368 
   1369     __ LeaveInternalFrame();
   1370 
   1371     // Check that the maps from interceptor's holder to lookup's holder
   1372     // haven't changed.  And load lookup's holder into |holder| register.
   1373     if (interceptor_holder != lookup->holder()) {
   1374       holder_reg = CheckPrototypes(interceptor_holder,
   1375                                    holder_reg,
   1376                                    lookup->holder(),
   1377                                    scratch1,
   1378                                    scratch2,
   1379                                    scratch3,
   1380                                    name,
   1381                                    miss);
   1382     }
   1383 
   1384     if (lookup->type() == FIELD) {
   1385       // We found FIELD property in prototype chain of interceptor's holder.
   1386       // Retrieve a field from field's holder.
   1387       GenerateFastPropertyLoad(masm(), r0, holder_reg,
   1388                                lookup->holder(), lookup->GetFieldIndex());
   1389       __ Ret();
   1390     } else {
   1391       // We found CALLBACKS property in prototype chain of interceptor's
   1392       // holder.
   1393       ASSERT(lookup->type() == CALLBACKS);
   1394       ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
   1395       AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
   1396       ASSERT(callback != NULL);
   1397       ASSERT(callback->getter() != NULL);
   1398 
   1399       // Tail call to runtime.
   1400       // Important invariant in CALLBACKS case: the code above must be
   1401       // structured to never clobber |receiver| register.
   1402       __ Move(scratch2, Handle<AccessorInfo>(callback));
   1403       // holder_reg is either receiver or scratch1.
   1404       if (!receiver.is(holder_reg)) {
   1405         ASSERT(scratch1.is(holder_reg));
   1406         __ Push(receiver, holder_reg);
   1407         __ ldr(scratch3,
   1408                FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
   1409         __ Push(scratch3, scratch2, name_reg);
   1410       } else {
   1411         __ push(receiver);
   1412         __ ldr(scratch3,
   1413                FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
   1414         __ Push(holder_reg, scratch3, scratch2, name_reg);
   1415       }
   1416 
   1417       ExternalReference ref =
   1418           ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
   1419                             masm()->isolate());
   1420       __ TailCallExternalReference(ref, 5, 1);
   1421     }
   1422   } else {  // !compile_followup_inline
   1423     // Call the runtime system to load the interceptor.
   1424     // Check that the maps haven't changed.
   1425     Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
   1426                                           scratch1, scratch2, scratch3,
   1427                                           name, miss);
   1428     PushInterceptorArguments(masm(), receiver, holder_reg,
   1429                              name_reg, interceptor_holder);
   1430 
   1431     ExternalReference ref =
   1432         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
   1433                           masm()->isolate());
   1434     __ TailCallExternalReference(ref, 5, 1);
   1435   }
   1436 }
   1437 
   1438 
   1439 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
   1440   if (kind_ == Code::KEYED_CALL_IC) {
   1441     __ cmp(r2, Operand(Handle<String>(name)));
   1442     __ b(ne, miss);
   1443   }
   1444 }
   1445 
   1446 
   1447 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
   1448                                                    JSObject* holder,
   1449                                                    String* name,
   1450                                                    Label* miss) {
   1451   ASSERT(holder->IsGlobalObject());
   1452 
   1453   // Get the number of arguments.
   1454   const int argc = arguments().immediate();
   1455 
   1456   // Get the receiver from the stack.
   1457   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   1458 
   1459   // If the object is the holder then we know that it's a global
   1460   // object which can only happen for contextual calls. In this case,
   1461   // the receiver cannot be a smi.
   1462   if (object != holder) {
   1463     __ tst(r0, Operand(kSmiTagMask));
   1464     __ b(eq, miss);
   1465   }
   1466 
   1467   // Check that the maps haven't changed.
   1468   CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
   1469 }
   1470 
   1471 
   1472 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
   1473                                                     JSFunction* function,
   1474                                                     Label* miss) {
   1475   // Get the value from the cell.
   1476   __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
   1477   __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
   1478 
   1479   // Check that the cell contains the same function.
   1480   if (heap()->InNewSpace(function)) {
   1481     // We can't embed a pointer to a function in new space so we have
   1482     // to verify that the shared function info is unchanged. This has
   1483     // the nice side effect that multiple closures based on the same
   1484     // function can all use this call IC. Before we load through the
   1485     // function, we have to verify that it still is a function.
   1486     __ tst(r1, Operand(kSmiTagMask));
   1487     __ b(eq, miss);
   1488     __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
   1489     __ b(ne, miss);
   1490 
   1491     // Check the shared function info. Make sure it hasn't changed.
   1492     __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
   1493     __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   1494     __ cmp(r4, r3);
   1495     __ b(ne, miss);
   1496   } else {
   1497     __ cmp(r1, Operand(Handle<JSFunction>(function)));
   1498     __ b(ne, miss);
   1499   }
   1500 }
   1501 
   1502 
   1503 MaybeObject* CallStubCompiler::GenerateMissBranch() {
   1504   MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
   1505       arguments().immediate(), kind_);
   1506   Object* obj;
   1507   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   1508   __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
   1509   return obj;
   1510 }
   1511 
   1512 
   1513 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
   1514                                                 JSObject* holder,
   1515                                                 int index,
   1516                                                 String* name) {
   1517   // ----------- S t a t e -------------
   1518   //  -- r2    : name
   1519   //  -- lr    : return address
   1520   // -----------------------------------
   1521   Label miss;
   1522 
   1523   GenerateNameCheck(name, &miss);
   1524 
   1525   const int argc = arguments().immediate();
   1526 
   1527   // Get the receiver of the function from the stack into r0.
   1528   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   1529   // Check that the receiver isn't a smi.
   1530   __ tst(r0, Operand(kSmiTagMask));
   1531   __ b(eq, &miss);
   1532 
   1533   // Do the right check and compute the holder register.
   1534   Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
   1535   GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
   1536 
   1537   GenerateCallFunction(masm(), object, arguments(), &miss);
   1538 
   1539   // Handle call cache miss.
   1540   __ bind(&miss);
   1541   MaybeObject* maybe_result = GenerateMissBranch();
   1542   if (maybe_result->IsFailure()) return maybe_result;
   1543 
   1544   // Return the generated code.
   1545   return GetCode(FIELD, name);
   1546 }
   1547 
   1548 
   1549 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
   1550                                                     JSObject* holder,
   1551                                                     JSGlobalPropertyCell* cell,
   1552                                                     JSFunction* function,
   1553                                                     String* name) {
   1554   // ----------- S t a t e -------------
   1555   //  -- r2    : name
   1556   //  -- lr    : return address
   1557   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1558   //  -- ...
   1559   //  -- sp[argc * 4]           : receiver
   1560   // -----------------------------------
   1561 
   1562   // If object is not an array, bail out to regular call.
   1563   if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
   1564 
   1565   Label miss;
   1566 
   1567   GenerateNameCheck(name, &miss);
   1568 
   1569   Register receiver = r1;
   1570 
   1571   // Get the receiver from the stack
   1572   const int argc = arguments().immediate();
   1573   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1574 
   1575   // Check that the receiver isn't a smi.
   1576   __ JumpIfSmi(receiver, &miss);
   1577 
   1578   // Check that the maps haven't changed.
   1579   CheckPrototypes(JSObject::cast(object), receiver,
   1580                   holder, r3, r0, r4, name, &miss);
   1581 
   1582   if (argc == 0) {
   1583     // Nothing to do, just return the length.
   1584     __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1585     __ Drop(argc + 1);
   1586     __ Ret();
   1587   } else {
   1588     Label call_builtin;
   1589 
   1590     Register elements = r3;
   1591     Register end_elements = r5;
   1592 
   1593     // Get the elements array of the object.
   1594     __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1595 
   1596     // Check that the elements are in fast mode and writable.
   1597     __ CheckMap(elements, r0,
   1598                 Heap::kFixedArrayMapRootIndex, &call_builtin, true);
   1599 
   1600     if (argc == 1) {  // Otherwise fall through to call the builtin.
   1601       Label exit, with_write_barrier, attempt_to_grow_elements;
   1602 
   1603       // Get the array's length into r0 and calculate new length.
   1604       __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1605       STATIC_ASSERT(kSmiTagSize == 1);
   1606       STATIC_ASSERT(kSmiTag == 0);
   1607       __ add(r0, r0, Operand(Smi::FromInt(argc)));
   1608 
   1609       // Get the element's length.
   1610       __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1611 
   1612       // Check if we could survive without allocation.
   1613       __ cmp(r0, r4);
   1614       __ b(gt, &attempt_to_grow_elements);
   1615 
   1616       // Save new length.
   1617       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1618 
   1619       // Push the element.
   1620       __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
   1621       // We may need a register containing the address end_elements below,
   1622       // so write back the value in end_elements.
   1623       __ add(end_elements, elements,
   1624              Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
   1625       const int kEndElementsOffset =
   1626           FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
   1627       __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
   1628 
   1629       // Check for a smi.
   1630       __ JumpIfNotSmi(r4, &with_write_barrier);
   1631       __ bind(&exit);
   1632       __ Drop(argc + 1);
   1633       __ Ret();
   1634 
   1635       __ bind(&with_write_barrier);
   1636       __ InNewSpace(elements, r4, eq, &exit);
   1637       __ RecordWriteHelper(elements, end_elements, r4);
   1638       __ Drop(argc + 1);
   1639       __ Ret();
   1640 
   1641       __ bind(&attempt_to_grow_elements);
   1642       // r0: array's length + 1.
   1643       // r4: elements' length.
   1644 
   1645       if (!FLAG_inline_new) {
   1646         __ b(&call_builtin);
   1647       }
   1648 
   1649       Isolate* isolate = masm()->isolate();
   1650       ExternalReference new_space_allocation_top =
   1651           ExternalReference::new_space_allocation_top_address(isolate);
   1652       ExternalReference new_space_allocation_limit =
   1653           ExternalReference::new_space_allocation_limit_address(isolate);
   1654 
   1655       const int kAllocationDelta = 4;
   1656       // Load top and check if it is the end of elements.
   1657       __ add(end_elements, elements,
   1658              Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
   1659       __ add(end_elements, end_elements, Operand(kEndElementsOffset));
   1660       __ mov(r7, Operand(new_space_allocation_top));
   1661       __ ldr(r6, MemOperand(r7));
   1662       __ cmp(end_elements, r6);
   1663       __ b(ne, &call_builtin);
   1664 
   1665       __ mov(r9, Operand(new_space_allocation_limit));
   1666       __ ldr(r9, MemOperand(r9));
   1667       __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
   1668       __ cmp(r6, r9);
   1669       __ b(hi, &call_builtin);
   1670 
   1671       // We fit and could grow elements.
   1672       // Update new_space_allocation_top.
   1673       __ str(r6, MemOperand(r7));
   1674       // Push the argument.
   1675       __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
   1676       __ str(r6, MemOperand(end_elements));
   1677       // Fill the rest with holes.
   1678       __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
   1679       for (int i = 1; i < kAllocationDelta; i++) {
   1680         __ str(r6, MemOperand(end_elements, i * kPointerSize));
   1681       }
   1682 
   1683       // Update elements' and array's sizes.
   1684       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1685       __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
   1686       __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1687 
   1688       // Elements are in new space, so write barrier is not required.
   1689       __ Drop(argc + 1);
   1690       __ Ret();
   1691     }
   1692     __ bind(&call_builtin);
   1693     __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
   1694                                                    masm()->isolate()),
   1695                                  argc + 1,
   1696                                  1);
   1697   }
   1698 
   1699   // Handle call cache miss.
   1700   __ bind(&miss);
   1701   MaybeObject* maybe_result = GenerateMissBranch();
   1702   if (maybe_result->IsFailure()) return maybe_result;
   1703 
   1704   // Return the generated code.
   1705   return GetCode(function);
   1706 }
   1707 
   1708 
   1709 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   1710                                                    JSObject* holder,
   1711                                                    JSGlobalPropertyCell* cell,
   1712                                                    JSFunction* function,
   1713                                                    String* name) {
   1714   // ----------- S t a t e -------------
   1715   //  -- r2    : name
   1716   //  -- lr    : return address
   1717   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1718   //  -- ...
   1719   //  -- sp[argc * 4]           : receiver
   1720   // -----------------------------------
   1721 
   1722   // If object is not an array, bail out to regular call.
   1723   if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
   1724 
   1725   Label miss, return_undefined, call_builtin;
   1726 
   1727   Register receiver = r1;
   1728   Register elements = r3;
   1729 
   1730   GenerateNameCheck(name, &miss);
   1731 
   1732   // Get the receiver from the stack
   1733   const int argc = arguments().immediate();
   1734   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1735 
   1736   // Check that the receiver isn't a smi.
   1737   __ JumpIfSmi(receiver, &miss);
   1738 
   1739   // Check that the maps haven't changed.
   1740   CheckPrototypes(JSObject::cast(object),
   1741                   receiver, holder, elements, r4, r0, name, &miss);
   1742 
   1743   // Get the elements array of the object.
   1744   __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1745 
   1746   // Check that the elements are in fast mode and writable.
   1747   __ CheckMap(elements, r0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
   1748 
   1749   // Get the array's length into r4 and calculate new length.
   1750   __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1751   __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
   1752   __ b(lt, &return_undefined);
   1753 
   1754   // Get the last element.
   1755   __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
   1756   STATIC_ASSERT(kSmiTagSize == 1);
   1757   STATIC_ASSERT(kSmiTag == 0);
   1758   // We can't address the last element in one operation. Compute the more
   1759   // expensive shift first, and use an offset later on.
   1760   __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
   1761   __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
   1762   __ cmp(r0, r6);
   1763   __ b(eq, &call_builtin);
   1764 
   1765   // Set the array's length.
   1766   __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1767 
   1768   // Fill with the hole.
   1769   __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
   1770   __ Drop(argc + 1);
   1771   __ Ret();
   1772 
   1773   __ bind(&return_undefined);
   1774   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   1775   __ Drop(argc + 1);
   1776   __ Ret();
   1777 
   1778   __ bind(&call_builtin);
   1779   __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
   1780                                                  masm()->isolate()),
   1781                                argc + 1,
   1782                                1);
   1783 
   1784   // Handle call cache miss.
   1785   __ bind(&miss);
   1786   MaybeObject* maybe_result = GenerateMissBranch();
   1787   if (maybe_result->IsFailure()) return maybe_result;
   1788 
   1789   // Return the generated code.
   1790   return GetCode(function);
   1791 }
   1792 
   1793 
   1794 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
   1795     Object* object,
   1796     JSObject* holder,
   1797     JSGlobalPropertyCell* cell,
   1798     JSFunction* function,
   1799     String* name) {
   1800   // ----------- S t a t e -------------
   1801   //  -- r2                     : function name
   1802   //  -- lr                     : return address
   1803   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1804   //  -- ...
   1805   //  -- sp[argc * 4]           : receiver
   1806   // -----------------------------------
   1807 
   1808   // If object is not a string, bail out to regular call.
   1809   if (!object->IsString() || cell != NULL) return heap()->undefined_value();
   1810 
   1811   const int argc = arguments().immediate();
   1812 
   1813   Label miss;
   1814   Label name_miss;
   1815   Label index_out_of_range;
   1816   Label* index_out_of_range_label = &index_out_of_range;
   1817 
   1818   if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
   1819     index_out_of_range_label = &miss;
   1820   }
   1821 
   1822   GenerateNameCheck(name, &name_miss);
   1823 
   1824   // Check that the maps starting from the prototype haven't changed.
   1825   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   1826                                             Context::STRING_FUNCTION_INDEX,
   1827                                             r0,
   1828                                             &miss);
   1829   ASSERT(object != holder);
   1830   CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
   1831                   r1, r3, r4, name, &miss);
   1832 
   1833   Register receiver = r1;
   1834   Register index = r4;
   1835   Register scratch = r3;
   1836   Register result = r0;
   1837   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1838   if (argc > 0) {
   1839     __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
   1840   } else {
   1841     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   1842   }
   1843 
   1844   StringCharCodeAtGenerator char_code_at_generator(receiver,
   1845                                                    index,
   1846                                                    scratch,
   1847                                                    result,
   1848                                                    &miss,  // When not a string.
   1849                                                    &miss,  // When not a number.
   1850                                                    index_out_of_range_label,
   1851                                                    STRING_INDEX_IS_NUMBER);
   1852   char_code_at_generator.GenerateFast(masm());
   1853   __ Drop(argc + 1);
   1854   __ Ret();
   1855 
   1856   StubRuntimeCallHelper call_helper;
   1857   char_code_at_generator.GenerateSlow(masm(), call_helper);
   1858 
   1859   if (index_out_of_range.is_linked()) {
   1860     __ bind(&index_out_of_range);
   1861     __ LoadRoot(r0, Heap::kNanValueRootIndex);
   1862     __ Drop(argc + 1);
   1863     __ Ret();
   1864   }
   1865 
   1866   __ bind(&miss);
   1867   // Restore function name in r2.
   1868   __ Move(r2, Handle<String>(name));
   1869   __ bind(&name_miss);
   1870   MaybeObject* maybe_result = GenerateMissBranch();
   1871   if (maybe_result->IsFailure()) return maybe_result;
   1872 
   1873   // Return the generated code.
   1874   return GetCode(function);
   1875 }
   1876 
   1877 
   1878 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
   1879     Object* object,
   1880     JSObject* holder,
   1881     JSGlobalPropertyCell* cell,
   1882     JSFunction* function,
   1883     String* name) {
   1884   // ----------- S t a t e -------------
   1885   //  -- r2                     : function name
   1886   //  -- lr                     : return address
   1887   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1888   //  -- ...
   1889   //  -- sp[argc * 4]           : receiver
   1890   // -----------------------------------
   1891 
   1892   // If object is not a string, bail out to regular call.
   1893   if (!object->IsString() || cell != NULL) return heap()->undefined_value();
   1894 
   1895   const int argc = arguments().immediate();
   1896 
   1897   Label miss;
   1898   Label name_miss;
   1899   Label index_out_of_range;
   1900   Label* index_out_of_range_label = &index_out_of_range;
   1901 
   1902   if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
   1903     index_out_of_range_label = &miss;
   1904   }
   1905 
   1906   GenerateNameCheck(name, &name_miss);
   1907 
   1908   // Check that the maps starting from the prototype haven't changed.
   1909   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   1910                                             Context::STRING_FUNCTION_INDEX,
   1911                                             r0,
   1912                                             &miss);
   1913   ASSERT(object != holder);
   1914   CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
   1915                   r1, r3, r4, name, &miss);
   1916 
   1917   Register receiver = r0;
   1918   Register index = r4;
   1919   Register scratch1 = r1;
   1920   Register scratch2 = r3;
   1921   Register result = r0;
   1922   __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
   1923   if (argc > 0) {
   1924     __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
   1925   } else {
   1926     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   1927   }
   1928 
   1929   StringCharAtGenerator char_at_generator(receiver,
   1930                                           index,
   1931                                           scratch1,
   1932                                           scratch2,
   1933                                           result,
   1934                                           &miss,  // When not a string.
   1935                                           &miss,  // When not a number.
   1936                                           index_out_of_range_label,
   1937                                           STRING_INDEX_IS_NUMBER);
   1938   char_at_generator.GenerateFast(masm());
   1939   __ Drop(argc + 1);
   1940   __ Ret();
   1941 
   1942   StubRuntimeCallHelper call_helper;
   1943   char_at_generator.GenerateSlow(masm(), call_helper);
   1944 
   1945   if (index_out_of_range.is_linked()) {
   1946     __ bind(&index_out_of_range);
   1947     __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
   1948     __ Drop(argc + 1);
   1949     __ Ret();
   1950   }
   1951 
   1952   __ bind(&miss);
   1953   // Restore function name in r2.
   1954   __ Move(r2, Handle<String>(name));
   1955   __ bind(&name_miss);
   1956   MaybeObject* maybe_result = GenerateMissBranch();
   1957   if (maybe_result->IsFailure()) return maybe_result;
   1958 
   1959   // Return the generated code.
   1960   return GetCode(function);
   1961 }
   1962 
   1963 
   1964 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
   1965     Object* object,
   1966     JSObject* holder,
   1967     JSGlobalPropertyCell* cell,
   1968     JSFunction* function,
   1969     String* name) {
   1970   // ----------- S t a t e -------------
   1971   //  -- r2                     : function name
   1972   //  -- lr                     : return address
   1973   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1974   //  -- ...
   1975   //  -- sp[argc * 4]           : receiver
   1976   // -----------------------------------
   1977 
   1978   const int argc = arguments().immediate();
   1979 
   1980   // If the object is not a JSObject or we got an unexpected number of
   1981   // arguments, bail out to the regular call.
   1982   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
   1983 
   1984   Label miss;
   1985   GenerateNameCheck(name, &miss);
   1986 
   1987   if (cell == NULL) {
   1988     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
   1989 
   1990     STATIC_ASSERT(kSmiTag == 0);
   1991     __ tst(r1, Operand(kSmiTagMask));
   1992     __ b(eq, &miss);
   1993 
   1994     CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
   1995                     &miss);
   1996   } else {
   1997     ASSERT(cell->value() == function);
   1998     GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
   1999     GenerateLoadFunctionFromCell(cell, function, &miss);
   2000   }
   2001 
   2002   // Load the char code argument.
   2003   Register code = r1;
   2004   __ ldr(code, MemOperand(sp, 0 * kPointerSize));
   2005 
   2006   // Check the code is a smi.
   2007   Label slow;
   2008   STATIC_ASSERT(kSmiTag == 0);
   2009   __ tst(code, Operand(kSmiTagMask));
   2010   __ b(ne, &slow);
   2011 
   2012   // Convert the smi code to uint16.
   2013   __ and_(code, code, Operand(Smi::FromInt(0xffff)));
   2014 
   2015   StringCharFromCodeGenerator char_from_code_generator(code, r0);
   2016   char_from_code_generator.GenerateFast(masm());
   2017   __ Drop(argc + 1);
   2018   __ Ret();
   2019 
   2020   StubRuntimeCallHelper call_helper;
   2021   char_from_code_generator.GenerateSlow(masm(), call_helper);
   2022 
   2023   // Tail call the full function. We do not have to patch the receiver
   2024   // because the function makes no use of it.
   2025   __ bind(&slow);
   2026   __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
   2027 
   2028   __ bind(&miss);
   2029   // r2: function name.
   2030   MaybeObject* maybe_result = GenerateMissBranch();
   2031   if (maybe_result->IsFailure()) return maybe_result;
   2032 
   2033   // Return the generated code.
   2034   return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
   2035 }
   2036 
   2037 
   2038 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
   2039                                                     JSObject* holder,
   2040                                                     JSGlobalPropertyCell* cell,
   2041                                                     JSFunction* function,
   2042                                                     String* name) {
   2043   // ----------- S t a t e -------------
   2044   //  -- r2                     : function name
   2045   //  -- lr                     : return address
   2046   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2047   //  -- ...
   2048   //  -- sp[argc * 4]           : receiver
   2049   // -----------------------------------
   2050 
   2051   if (!CpuFeatures::IsSupported(VFP3)) {
   2052       return heap()->undefined_value();
   2053   }
   2054 
   2055   CpuFeatures::Scope scope_vfp3(VFP3);
   2056 
   2057   const int argc = arguments().immediate();
   2058 
   2059   // If the object is not a JSObject or we got an unexpected number of
   2060   // arguments, bail out to the regular call.
   2061   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
   2062 
   2063   Label miss, slow;
   2064   GenerateNameCheck(name, &miss);
   2065 
   2066   if (cell == NULL) {
   2067     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
   2068 
   2069     STATIC_ASSERT(kSmiTag == 0);
   2070     __ JumpIfSmi(r1, &miss);
   2071 
   2072     CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
   2073                     &miss);
   2074   } else {
   2075     ASSERT(cell->value() == function);
   2076     GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
   2077     GenerateLoadFunctionFromCell(cell, function, &miss);
   2078   }
   2079 
   2080   // Load the (only) argument into r0.
   2081   __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
   2082 
   2083   // If the argument is a smi, just return.
   2084   STATIC_ASSERT(kSmiTag == 0);
   2085   __ tst(r0, Operand(kSmiTagMask));
   2086   __ Drop(argc + 1, eq);
   2087   __ Ret(eq);
   2088 
   2089   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
   2090 
   2091   Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
   2092 
   2093   // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
   2094   // minus infinity) mode.
   2095 
   2096   // Load the HeapNumber value.
   2097   // We will need access to the value in the core registers, so we load it
   2098   // with ldrd and move it to the fpu. It also spares a sub instruction for
   2099   // updating the HeapNumber value address, as vldr expects a multiple
   2100   // of 4 offset.
   2101   __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
   2102   __ vmov(d1, r4, r5);
   2103 
   2104   // Backup FPSCR.
   2105   __ vmrs(r3);
   2106   // Set custom FPCSR:
   2107   //  - Set rounding mode to "Round towards Minus Infinity"
   2108   //    (ie bits [23:22] = 0b10).
   2109   //  - Clear vfp cumulative exception flags (bits [3:0]).
   2110   //  - Make sure Flush-to-zero mode control bit is unset (bit 22).
   2111   __ bic(r9, r3,
   2112       Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
   2113   __ orr(r9, r9, Operand(kRoundToMinusInf));
   2114   __ vmsr(r9);
   2115 
   2116   // Convert the argument to an integer.
   2117   __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
   2118 
   2119   // Use vcvt latency to start checking for special cases.
   2120   // Get the argument exponent and clear the sign bit.
   2121   __ bic(r6, r5, Operand(HeapNumber::kSignMask));
   2122   __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
   2123 
   2124   // Retrieve FPSCR and check for vfp exceptions.
   2125   __ vmrs(r9);
   2126   __ tst(r9, Operand(kVFPExceptionMask));
   2127   __ b(&no_vfp_exception, eq);
   2128 
   2129   // Check for NaN, Infinity, and -Infinity.
   2130   // They are invariant through a Math.Floor call, so just
   2131   // return the original argument.
   2132   __ sub(r7, r6, Operand(HeapNumber::kExponentMask
   2133         >> HeapNumber::kMantissaBitsInTopWord), SetCC);
   2134   __ b(&restore_fpscr_and_return, eq);
   2135   // We had an overflow or underflow in the conversion. Check if we
   2136   // have a big exponent.
   2137   __ cmp(r7, Operand(HeapNumber::kMantissaBits));
   2138   // If greater or equal, the argument is already round and in r0.
   2139   __ b(&restore_fpscr_and_return, ge);
   2140   __ b(&wont_fit_smi);
   2141 
   2142   __ bind(&no_vfp_exception);
   2143   // Move the result back to general purpose register r0.
   2144   __ vmov(r0, s0);
   2145   // Check if the result fits into a smi.
   2146   __ add(r1, r0, Operand(0x40000000), SetCC);
   2147   __ b(&wont_fit_smi, mi);
   2148   // Tag the result.
   2149   STATIC_ASSERT(kSmiTag == 0);
   2150   __ mov(r0, Operand(r0, LSL, kSmiTagSize));
   2151 
   2152   // Check for -0.
   2153   __ cmp(r0, Operand(0, RelocInfo::NONE));
   2154   __ b(&restore_fpscr_and_return, ne);
   2155   // r5 already holds the HeapNumber exponent.
   2156   __ tst(r5, Operand(HeapNumber::kSignMask));
   2157   // If our HeapNumber is negative it was -0, so load its address and return.
   2158   // Else r0 is loaded with 0, so we can also just return.
   2159   __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
   2160 
   2161   __ bind(&restore_fpscr_and_return);
   2162   // Restore FPSCR and return.
   2163   __ vmsr(r3);
   2164   __ Drop(argc + 1);
   2165   __ Ret();
   2166 
   2167   __ bind(&wont_fit_smi);
   2168   // Restore FPCSR and fall to slow case.
   2169   __ vmsr(r3);
   2170 
   2171   __ bind(&slow);
   2172   // Tail call the full function. We do not have to patch the receiver
   2173   // because the function makes no use of it.
   2174   __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
   2175 
   2176   __ bind(&miss);
   2177   // r2: function name.
   2178   MaybeObject* maybe_result = GenerateMissBranch();
   2179   if (maybe_result->IsFailure()) return maybe_result;
   2180 
   2181   // Return the generated code.
   2182   return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
   2183 }
   2184 
   2185 
   2186 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
   2187                                                   JSObject* holder,
   2188                                                   JSGlobalPropertyCell* cell,
   2189                                                   JSFunction* function,
   2190                                                   String* name) {
   2191   // ----------- S t a t e -------------
   2192   //  -- r2                     : function name
   2193   //  -- lr                     : return address
   2194   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2195   //  -- ...
   2196   //  -- sp[argc * 4]           : receiver
   2197   // -----------------------------------
   2198 
   2199   const int argc = arguments().immediate();
   2200 
   2201   // If the object is not a JSObject or we got an unexpected number of
   2202   // arguments, bail out to the regular call.
   2203   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
   2204 
   2205   Label miss;
   2206   GenerateNameCheck(name, &miss);
   2207 
   2208   if (cell == NULL) {
   2209     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
   2210 
   2211     STATIC_ASSERT(kSmiTag == 0);
   2212     __ tst(r1, Operand(kSmiTagMask));
   2213     __ b(eq, &miss);
   2214 
   2215     CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
   2216                     &miss);
   2217   } else {
   2218     ASSERT(cell->value() == function);
   2219     GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
   2220     GenerateLoadFunctionFromCell(cell, function, &miss);
   2221   }
   2222 
   2223   // Load the (only) argument into r0.
   2224   __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
   2225 
   2226   // Check if the argument is a smi.
   2227   Label not_smi;
   2228   STATIC_ASSERT(kSmiTag == 0);
   2229   __ JumpIfNotSmi(r0, &not_smi);
   2230 
   2231   // Do bitwise not or do nothing depending on the sign of the
   2232   // argument.
   2233   __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
   2234 
   2235   // Add 1 or do nothing depending on the sign of the argument.
   2236   __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
   2237 
   2238   // If the result is still negative, go to the slow case.
   2239   // This only happens for the most negative smi.
   2240   Label slow;
   2241   __ b(mi, &slow);
   2242 
   2243   // Smi case done.
   2244   __ Drop(argc + 1);
   2245   __ Ret();
   2246 
   2247   // Check if the argument is a heap number and load its exponent and
   2248   // sign.
   2249   __ bind(&not_smi);
   2250   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
   2251   __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
   2252 
   2253   // Check the sign of the argument. If the argument is positive,
   2254   // just return it.
   2255   Label negative_sign;
   2256   __ tst(r1, Operand(HeapNumber::kSignMask));
   2257   __ b(ne, &negative_sign);
   2258   __ Drop(argc + 1);
   2259   __ Ret();
   2260 
   2261   // If the argument is negative, clear the sign, and return a new
   2262   // number.
   2263   __ bind(&negative_sign);
   2264   __ eor(r1, r1, Operand(HeapNumber::kSignMask));
   2265   __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
   2266   __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   2267   __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
   2268   __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
   2269   __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
   2270   __ Drop(argc + 1);
   2271   __ Ret();
   2272 
   2273   // Tail call the full function. We do not have to patch the receiver
   2274   // because the function makes no use of it.
   2275   __ bind(&slow);
   2276   __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
   2277 
   2278   __ bind(&miss);
   2279   // r2: function name.
   2280   MaybeObject* maybe_result = GenerateMissBranch();
   2281   if (maybe_result->IsFailure()) return maybe_result;
   2282 
   2283   // Return the generated code.
   2284   return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
   2285 }
   2286 
   2287 
   2288 MaybeObject* CallStubCompiler::CompileFastApiCall(
   2289     const CallOptimization& optimization,
   2290     Object* object,
   2291     JSObject* holder,
   2292     JSGlobalPropertyCell* cell,
   2293     JSFunction* function,
   2294     String* name) {
   2295   Counters* counters = isolate()->counters();
   2296 
   2297   ASSERT(optimization.is_simple_api_call());
   2298   // Bail out if object is a global object as we don't want to
   2299   // repatch it to global receiver.
   2300   if (object->IsGlobalObject()) return heap()->undefined_value();
   2301   if (cell != NULL) return heap()->undefined_value();
   2302   int depth = optimization.GetPrototypeDepthOfExpectedType(
   2303             JSObject::cast(object), holder);
   2304   if (depth == kInvalidProtoDepth) return heap()->undefined_value();
   2305 
   2306   Label miss, miss_before_stack_reserved;
   2307 
   2308   GenerateNameCheck(name, &miss_before_stack_reserved);
   2309 
   2310   // Get the receiver from the stack.
   2311   const int argc = arguments().immediate();
   2312   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2313 
   2314   // Check that the receiver isn't a smi.
   2315   __ tst(r1, Operand(kSmiTagMask));
   2316   __ b(eq, &miss_before_stack_reserved);
   2317 
   2318   __ IncrementCounter(counters->call_const(), 1, r0, r3);
   2319   __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
   2320 
   2321   ReserveSpaceForFastApiCall(masm(), r0);
   2322 
   2323   // Check that the maps haven't changed and find a Holder as a side effect.
   2324   CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
   2325                   depth, &miss);
   2326 
   2327   MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
   2328   if (result->IsFailure()) return result;
   2329 
   2330   __ bind(&miss);
   2331   FreeSpaceForFastApiCall(masm());
   2332 
   2333   __ bind(&miss_before_stack_reserved);
   2334   MaybeObject* maybe_result = GenerateMissBranch();
   2335   if (maybe_result->IsFailure()) return maybe_result;
   2336 
   2337   // Return the generated code.
   2338   return GetCode(function);
   2339 }
   2340 
   2341 
   2342 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
   2343                                                    JSObject* holder,
   2344                                                    JSFunction* function,
   2345                                                    String* name,
   2346                                                    CheckType check) {
   2347   // ----------- S t a t e -------------
   2348   //  -- r2    : name
   2349   //  -- lr    : return address
   2350   // -----------------------------------
   2351   if (HasCustomCallGenerator(function)) {
   2352     MaybeObject* maybe_result = CompileCustomCall(
   2353         object, holder, NULL, function, name);
   2354     Object* result;
   2355     if (!maybe_result->ToObject(&result)) return maybe_result;
   2356     // undefined means bail out to regular compiler.
   2357     if (!result->IsUndefined()) return result;
   2358   }
   2359 
   2360   Label miss;
   2361 
   2362   GenerateNameCheck(name, &miss);
   2363 
   2364   // Get the receiver from the stack
   2365   const int argc = arguments().immediate();
   2366   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2367 
   2368   // Check that the receiver isn't a smi.
   2369   if (check != NUMBER_CHECK) {
   2370     __ tst(r1, Operand(kSmiTagMask));
   2371     __ b(eq, &miss);
   2372   }
   2373 
   2374   // Make sure that it's okay not to patch the on stack receiver
   2375   // unless we're doing a receiver map check.
   2376   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
   2377 
   2378   SharedFunctionInfo* function_info = function->shared();
   2379   switch (check) {
   2380     case RECEIVER_MAP_CHECK:
   2381       __ IncrementCounter(masm()->isolate()->counters()->call_const(),
   2382                           1, r0, r3);
   2383 
   2384       // Check that the maps haven't changed.
   2385       CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
   2386                       &miss);
   2387 
   2388       // Patch the receiver on the stack with the global proxy if
   2389       // necessary.
   2390       if (object->IsGlobalObject()) {
   2391         __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
   2392         __ str(r3, MemOperand(sp, argc * kPointerSize));
   2393       }
   2394       break;
   2395 
   2396     case STRING_CHECK:
   2397       if (!function->IsBuiltin() && !function_info->strict_mode()) {
   2398         // Calling non-strict non-builtins with a value as the receiver
   2399         // requires boxing.
   2400         __ jmp(&miss);
   2401       } else {
   2402         // Check that the object is a two-byte string or a symbol.
   2403         __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
   2404         __ b(hs, &miss);
   2405         // Check that the maps starting from the prototype haven't changed.
   2406         GenerateDirectLoadGlobalFunctionPrototype(
   2407             masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
   2408         CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
   2409                         r1, r4, name, &miss);
   2410       }
   2411       break;
   2412 
   2413     case NUMBER_CHECK: {
   2414       if (!function->IsBuiltin() && !function_info->strict_mode()) {
   2415         // Calling non-strict non-builtins with a value as the receiver
   2416         // requires boxing.
   2417         __ jmp(&miss);
   2418       } else {
   2419         Label fast;
   2420         // Check that the object is a smi or a heap number.
   2421         __ tst(r1, Operand(kSmiTagMask));
   2422         __ b(eq, &fast);
   2423         __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
   2424         __ b(ne, &miss);
   2425         __ bind(&fast);
   2426         // Check that the maps starting from the prototype haven't changed.
   2427         GenerateDirectLoadGlobalFunctionPrototype(
   2428             masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
   2429         CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
   2430                         r1, r4, name, &miss);
   2431       }
   2432       break;
   2433     }
   2434 
   2435     case BOOLEAN_CHECK: {
   2436       if (!function->IsBuiltin() && !function_info->strict_mode()) {
   2437         // Calling non-strict non-builtins with a value as the receiver
   2438         // requires boxing.
   2439         __ jmp(&miss);
   2440       } else {
   2441         Label fast;
   2442         // Check that the object is a boolean.
   2443         __ LoadRoot(ip, Heap::kTrueValueRootIndex);
   2444         __ cmp(r1, ip);
   2445         __ b(eq, &fast);
   2446         __ LoadRoot(ip, Heap::kFalseValueRootIndex);
   2447         __ cmp(r1, ip);
   2448         __ b(ne, &miss);
   2449         __ bind(&fast);
   2450         // Check that the maps starting from the prototype haven't changed.
   2451         GenerateDirectLoadGlobalFunctionPrototype(
   2452             masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
   2453         CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
   2454                         r1, r4, name, &miss);
   2455       }
   2456       break;
   2457     }
   2458 
   2459     default:
   2460       UNREACHABLE();
   2461   }
   2462 
   2463   __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
   2464 
   2465   // Handle call cache miss.
   2466   __ bind(&miss);
   2467   MaybeObject* maybe_result = GenerateMissBranch();
   2468   if (maybe_result->IsFailure()) return maybe_result;
   2469 
   2470   // Return the generated code.
   2471   return GetCode(function);
   2472 }
   2473 
   2474 
   2475 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
   2476                                                       JSObject* holder,
   2477                                                       String* name) {
   2478   // ----------- S t a t e -------------
   2479   //  -- r2    : name
   2480   //  -- lr    : return address
   2481   // -----------------------------------
   2482 
   2483   Label miss;
   2484 
   2485   GenerateNameCheck(name, &miss);
   2486 
   2487   // Get the number of arguments.
   2488   const int argc = arguments().immediate();
   2489 
   2490   LookupResult lookup;
   2491   LookupPostInterceptor(holder, name, &lookup);
   2492 
   2493   // Get the receiver from the stack.
   2494   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
   2495 
   2496   CallInterceptorCompiler compiler(this, arguments(), r2);
   2497   MaybeObject* result = compiler.Compile(masm(),
   2498                                          object,
   2499                                          holder,
   2500                                          name,
   2501                                          &lookup,
   2502                                          r1,
   2503                                          r3,
   2504                                          r4,
   2505                                          r0,
   2506                                          &miss);
   2507   if (result->IsFailure()) {
   2508       return result;
   2509   }
   2510 
   2511   // Move returned value, the function to call, to r1.
   2512   __ mov(r1, r0);
   2513   // Restore receiver.
   2514   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
   2515 
   2516   GenerateCallFunction(masm(), object, arguments(), &miss);
   2517 
   2518   // Handle call cache miss.
   2519   __ bind(&miss);
   2520   MaybeObject* maybe_result = GenerateMissBranch();
   2521   if (maybe_result->IsFailure()) return maybe_result;
   2522 
   2523   // Return the generated code.
   2524   return GetCode(INTERCEPTOR, name);
   2525 }
   2526 
   2527 
   2528 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
   2529                                                  GlobalObject* holder,
   2530                                                  JSGlobalPropertyCell* cell,
   2531                                                  JSFunction* function,
   2532                                                  String* name) {
   2533   // ----------- S t a t e -------------
   2534   //  -- r2    : name
   2535   //  -- lr    : return address
   2536   // -----------------------------------
   2537 
   2538   if (HasCustomCallGenerator(function)) {
   2539     MaybeObject* maybe_result = CompileCustomCall(
   2540         object, holder, cell, function, name);
   2541     Object* result;
   2542     if (!maybe_result->ToObject(&result)) return maybe_result;
   2543     // undefined means bail out to regular compiler.
   2544     if (!result->IsUndefined()) return result;
   2545   }
   2546 
   2547   Label miss;
   2548 
   2549   GenerateNameCheck(name, &miss);
   2550 
   2551   // Get the number of arguments.
   2552   const int argc = arguments().immediate();
   2553 
   2554   GenerateGlobalReceiverCheck(object, holder, name, &miss);
   2555 
   2556   GenerateLoadFunctionFromCell(cell, function, &miss);
   2557 
   2558   // Patch the receiver on the stack with the global proxy if
   2559   // necessary.
   2560   if (object->IsGlobalObject()) {
   2561     __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
   2562     __ str(r3, MemOperand(sp, argc * kPointerSize));
   2563   }
   2564 
   2565   // Setup the context (function already in r1).
   2566   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
   2567 
   2568   // Jump to the cached code (tail call).
   2569   Counters* counters = masm()->isolate()->counters();
   2570   __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
   2571   ASSERT(function->is_compiled());
   2572   Handle<Code> code(function->code());
   2573   ParameterCount expected(function->shared()->formal_parameter_count());
   2574   if (V8::UseCrankshaft()) {
   2575     // TODO(kasperl): For now, we always call indirectly through the
   2576     // code field in the function to allow recompilation to take effect
   2577     // without changing any of the call sites.
   2578     __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   2579     __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION);
   2580   } else {
   2581     __ InvokeCode(code, expected, arguments(),
   2582                   RelocInfo::CODE_TARGET, JUMP_FUNCTION);
   2583   }
   2584 
   2585   // Handle call cache miss.
   2586   __ bind(&miss);
   2587   __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
   2588   MaybeObject* maybe_result = GenerateMissBranch();
   2589   if (maybe_result->IsFailure()) return maybe_result;
   2590 
   2591   // Return the generated code.
   2592   return GetCode(NORMAL, name);
   2593 }
   2594 
   2595 
   2596 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
   2597                                                   int index,
   2598                                                   Map* transition,
   2599                                                   String* name) {
   2600   // ----------- S t a t e -------------
   2601   //  -- r0    : value
   2602   //  -- r1    : receiver
   2603   //  -- r2    : name
   2604   //  -- lr    : return address
   2605   // -----------------------------------
   2606   Label miss;
   2607 
   2608   GenerateStoreField(masm(),
   2609                      object,
   2610                      index,
   2611                      transition,
   2612                      r1, r2, r3,
   2613                      &miss);
   2614   __ bind(&miss);
   2615   Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
   2616   __ Jump(ic, RelocInfo::CODE_TARGET);
   2617 
   2618   // Return the generated code.
   2619   return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
   2620 }
   2621 
   2622 
   2623 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
   2624                                                      AccessorInfo* callback,
   2625                                                      String* name) {
   2626   // ----------- S t a t e -------------
   2627   //  -- r0    : value
   2628   //  -- r1    : receiver
   2629   //  -- r2    : name
   2630   //  -- lr    : return address
   2631   // -----------------------------------
   2632   Label miss;
   2633 
   2634   // Check that the object isn't a smi.
   2635   __ tst(r1, Operand(kSmiTagMask));
   2636   __ b(eq, &miss);
   2637 
   2638   // Check that the map of the object hasn't changed.
   2639   __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
   2640   __ cmp(r3, Operand(Handle<Map>(object->map())));
   2641   __ b(ne, &miss);
   2642 
   2643   // Perform global security token check if needed.
   2644   if (object->IsJSGlobalProxy()) {
   2645     __ CheckAccessGlobalProxy(r1, r3, &miss);
   2646   }
   2647 
   2648   // Stub never generated for non-global objects that require access
   2649   // checks.
   2650   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
   2651 
   2652   __ push(r1);  // receiver
   2653   __ mov(ip, Operand(Handle<AccessorInfo>(callback)));  // callback info
   2654   __ Push(ip, r2, r0);
   2655 
   2656   // Do tail-call to the runtime system.
   2657   ExternalReference store_callback_property =
   2658       ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
   2659                         masm()->isolate());
   2660   __ TailCallExternalReference(store_callback_property, 4, 1);
   2661 
   2662   // Handle store cache miss.
   2663   __ bind(&miss);
   2664   Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
   2665   __ Jump(ic, RelocInfo::CODE_TARGET);
   2666 
   2667   // Return the generated code.
   2668   return GetCode(CALLBACKS, name);
   2669 }
   2670 
   2671 
   2672 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
   2673                                                         String* name) {
   2674   // ----------- S t a t e -------------
   2675   //  -- r0    : value
   2676   //  -- r1    : receiver
   2677   //  -- r2    : name
   2678   //  -- lr    : return address
   2679   // -----------------------------------
   2680   Label miss;
   2681 
   2682   // Check that the object isn't a smi.
   2683   __ tst(r1, Operand(kSmiTagMask));
   2684   __ b(eq, &miss);
   2685 
   2686   // Check that the map of the object hasn't changed.
   2687   __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
   2688   __ cmp(r3, Operand(Handle<Map>(receiver->map())));
   2689   __ b(ne, &miss);
   2690 
   2691   // Perform global security token check if needed.
   2692   if (receiver->IsJSGlobalProxy()) {
   2693     __ CheckAccessGlobalProxy(r1, r3, &miss);
   2694   }
   2695 
   2696   // Stub is never generated for non-global objects that require access
   2697   // checks.
   2698   ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
   2699 
   2700   __ Push(r1, r2, r0);  // Receiver, name, value.
   2701 
   2702   __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
   2703   __ push(r0);  // strict mode
   2704 
   2705   // Do tail-call to the runtime system.
   2706   ExternalReference store_ic_property =
   2707       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
   2708                         masm()->isolate());
   2709   __ TailCallExternalReference(store_ic_property, 4, 1);
   2710 
   2711   // Handle store cache miss.
   2712   __ bind(&miss);
   2713   Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
   2714   __ Jump(ic, RelocInfo::CODE_TARGET);
   2715 
   2716   // Return the generated code.
   2717   return GetCode(INTERCEPTOR, name);
   2718 }
   2719 
   2720 
   2721 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
   2722                                                    JSGlobalPropertyCell* cell,
   2723                                                    String* name) {
   2724   // ----------- S t a t e -------------
   2725   //  -- r0    : value
   2726   //  -- r1    : receiver
   2727   //  -- r2    : name
   2728   //  -- lr    : return address
   2729   // -----------------------------------
   2730   Label miss;
   2731 
   2732   // Check that the map of the global has not changed.
   2733   __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
   2734   __ cmp(r3, Operand(Handle<Map>(object->map())));
   2735   __ b(ne, &miss);
   2736 
   2737   // Check that the value in the cell is not the hole. If it is, this
   2738   // cell could have been deleted and reintroducing the global needs
   2739   // to update the property details in the property dictionary of the
   2740   // global object. We bail out to the runtime system to do that.
   2741   __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell)));
   2742   __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
   2743   __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
   2744   __ cmp(r5, r6);
   2745   __ b(eq, &miss);
   2746 
   2747   // Store the value in the cell.
   2748   __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
   2749 
   2750   Counters* counters = masm()->isolate()->counters();
   2751   __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
   2752   __ Ret();
   2753 
   2754   // Handle store cache miss.
   2755   __ bind(&miss);
   2756   __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
   2757   Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
   2758   __ Jump(ic, RelocInfo::CODE_TARGET);
   2759 
   2760   // Return the generated code.
   2761   return GetCode(NORMAL, name);
   2762 }
   2763 
   2764 
   2765 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
   2766                                                       JSObject* object,
   2767                                                       JSObject* last) {
   2768   // ----------- S t a t e -------------
   2769   //  -- r0    : receiver
   2770   //  -- lr    : return address
   2771   // -----------------------------------
   2772   Label miss;
   2773 
   2774   // Check that receiver is not a smi.
   2775   __ tst(r0, Operand(kSmiTagMask));
   2776   __ b(eq, &miss);
   2777 
   2778   // Check the maps of the full prototype chain.
   2779   CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
   2780 
   2781   // If the last object in the prototype chain is a global object,
   2782   // check that the global property cell is empty.
   2783   if (last->IsGlobalObject()) {
   2784     MaybeObject* cell = GenerateCheckPropertyCell(masm(),
   2785                                                   GlobalObject::cast(last),
   2786                                                   name,
   2787                                                   r1,
   2788                                                   &miss);
   2789     if (cell->IsFailure()) {
   2790       miss.Unuse();
   2791       return cell;
   2792     }
   2793   }
   2794 
   2795   // Return undefined if maps of the full prototype chain are still the
   2796   // same and no global property with this name contains a value.
   2797   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2798   __ Ret();
   2799 
   2800   __ bind(&miss);
   2801   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2802 
   2803   // Return the generated code.
   2804   return GetCode(NONEXISTENT, heap()->empty_string());
   2805 }
   2806 
   2807 
   2808 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
   2809                                                 JSObject* holder,
   2810                                                 int index,
   2811                                                 String* name) {
   2812   // ----------- S t a t e -------------
   2813   //  -- r0    : receiver
   2814   //  -- r2    : name
   2815   //  -- lr    : return address
   2816   // -----------------------------------
   2817   Label miss;
   2818 
   2819   GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
   2820   __ bind(&miss);
   2821   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2822 
   2823   // Return the generated code.
   2824   return GetCode(FIELD, name);
   2825 }
   2826 
   2827 
   2828 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
   2829                                                    JSObject* object,
   2830                                                    JSObject* holder,
   2831                                                    AccessorInfo* callback) {
   2832   // ----------- S t a t e -------------
   2833   //  -- r0    : receiver
   2834   //  -- r2    : name
   2835   //  -- lr    : return address
   2836   // -----------------------------------
   2837   Label miss;
   2838 
   2839   MaybeObject* result = GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4,
   2840                                              callback, name, &miss);
   2841   if (result->IsFailure()) {
   2842     miss.Unuse();
   2843     return result;
   2844   }
   2845 
   2846   __ bind(&miss);
   2847   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2848 
   2849   // Return the generated code.
   2850   return GetCode(CALLBACKS, name);
   2851 }
   2852 
   2853 
   2854 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
   2855                                                    JSObject* holder,
   2856                                                    Object* value,
   2857                                                    String* name) {
   2858   // ----------- S t a t e -------------
   2859   //  -- r0    : receiver
   2860   //  -- r2    : name
   2861   //  -- lr    : return address
   2862   // -----------------------------------
   2863   Label miss;
   2864 
   2865   GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
   2866   __ bind(&miss);
   2867   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2868 
   2869   // Return the generated code.
   2870   return GetCode(CONSTANT_FUNCTION, name);
   2871 }
   2872 
   2873 
   2874 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
   2875                                                       JSObject* holder,
   2876                                                       String* name) {
   2877   // ----------- S t a t e -------------
   2878   //  -- r0    : receiver
   2879   //  -- r2    : name
   2880   //  -- lr    : return address
   2881   // -----------------------------------
   2882   Label miss;
   2883 
   2884   LookupResult lookup;
   2885   LookupPostInterceptor(holder, name, &lookup);
   2886   GenerateLoadInterceptor(object,
   2887                           holder,
   2888                           &lookup,
   2889                           r0,
   2890                           r2,
   2891                           r3,
   2892                           r1,
   2893                           r4,
   2894                           name,
   2895                           &miss);
   2896   __ bind(&miss);
   2897   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2898 
   2899   // Return the generated code.
   2900   return GetCode(INTERCEPTOR, name);
   2901 }
   2902 
   2903 
   2904 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
   2905                                                  GlobalObject* holder,
   2906                                                  JSGlobalPropertyCell* cell,
   2907                                                  String* name,
   2908                                                  bool is_dont_delete) {
   2909   // ----------- S t a t e -------------
   2910   //  -- r0    : receiver
   2911   //  -- r2    : name
   2912   //  -- lr    : return address
   2913   // -----------------------------------
   2914   Label miss;
   2915 
   2916   // If the object is the holder then we know that it's a global
   2917   // object which can only happen for contextual calls. In this case,
   2918   // the receiver cannot be a smi.
   2919   if (object != holder) {
   2920     __ tst(r0, Operand(kSmiTagMask));
   2921     __ b(eq, &miss);
   2922   }
   2923 
   2924   // Check that the map of the global has not changed.
   2925   CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
   2926 
   2927   // Get the value from the cell.
   2928   __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
   2929   __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
   2930 
   2931   // Check for deleted property if property can actually be deleted.
   2932   if (!is_dont_delete) {
   2933     __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   2934     __ cmp(r4, ip);
   2935     __ b(eq, &miss);
   2936   }
   2937 
   2938   __ mov(r0, r4);
   2939   Counters* counters = masm()->isolate()->counters();
   2940   __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
   2941   __ Ret();
   2942 
   2943   __ bind(&miss);
   2944   __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
   2945   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2946 
   2947   // Return the generated code.
   2948   return GetCode(NORMAL, name);
   2949 }
   2950 
   2951 
   2952 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
   2953                                                      JSObject* receiver,
   2954                                                      JSObject* holder,
   2955                                                      int index) {
   2956   // ----------- S t a t e -------------
   2957   //  -- lr    : return address
   2958   //  -- r0    : key
   2959   //  -- r1    : receiver
   2960   // -----------------------------------
   2961   Label miss;
   2962 
   2963   // Check the key is the cached one.
   2964   __ cmp(r0, Operand(Handle<String>(name)));
   2965   __ b(ne, &miss);
   2966 
   2967   GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
   2968   __ bind(&miss);
   2969   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   2970 
   2971   return GetCode(FIELD, name);
   2972 }
   2973 
   2974 
   2975 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
   2976     String* name,
   2977     JSObject* receiver,
   2978     JSObject* holder,
   2979     AccessorInfo* callback) {
   2980   // ----------- S t a t e -------------
   2981   //  -- lr    : return address
   2982   //  -- r0    : key
   2983   //  -- r1    : receiver
   2984   // -----------------------------------
   2985   Label miss;
   2986 
   2987   // Check the key is the cached one.
   2988   __ cmp(r0, Operand(Handle<String>(name)));
   2989   __ b(ne, &miss);
   2990 
   2991   MaybeObject* result = GenerateLoadCallback(receiver, holder, r1, r0, r2, r3,
   2992                                              r4, callback, name, &miss);
   2993   if (result->IsFailure()) {
   2994     miss.Unuse();
   2995     return result;
   2996   }
   2997 
   2998   __ bind(&miss);
   2999   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3000 
   3001   return GetCode(CALLBACKS, name);
   3002 }
   3003 
   3004 
   3005 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
   3006                                                         JSObject* receiver,
   3007                                                         JSObject* holder,
   3008                                                         Object* value) {
   3009   // ----------- S t a t e -------------
   3010   //  -- lr    : return address
   3011   //  -- r0    : key
   3012   //  -- r1    : receiver
   3013   // -----------------------------------
   3014   Label miss;
   3015 
   3016   // Check the key is the cached one.
   3017   __ cmp(r0, Operand(Handle<String>(name)));
   3018   __ b(ne, &miss);
   3019 
   3020   GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
   3021   __ bind(&miss);
   3022   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3023 
   3024   // Return the generated code.
   3025   return GetCode(CONSTANT_FUNCTION, name);
   3026 }
   3027 
   3028 
   3029 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
   3030                                                            JSObject* holder,
   3031                                                            String* name) {
   3032   // ----------- S t a t e -------------
   3033   //  -- lr    : return address
   3034   //  -- r0    : key
   3035   //  -- r1    : receiver
   3036   // -----------------------------------
   3037   Label miss;
   3038 
   3039   // Check the key is the cached one.
   3040   __ cmp(r0, Operand(Handle<String>(name)));
   3041   __ b(ne, &miss);
   3042 
   3043   LookupResult lookup;
   3044   LookupPostInterceptor(holder, name, &lookup);
   3045   GenerateLoadInterceptor(receiver,
   3046                           holder,
   3047                           &lookup,
   3048                           r1,
   3049                           r0,
   3050                           r2,
   3051                           r3,
   3052                           r4,
   3053                           name,
   3054                           &miss);
   3055   __ bind(&miss);
   3056   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3057 
   3058   return GetCode(INTERCEPTOR, name);
   3059 }
   3060 
   3061 
   3062 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
   3063   // ----------- S t a t e -------------
   3064   //  -- lr    : return address
   3065   //  -- r0    : key
   3066   //  -- r1    : receiver
   3067   // -----------------------------------
   3068   Label miss;
   3069 
   3070   // Check the key is the cached one.
   3071   __ cmp(r0, Operand(Handle<String>(name)));
   3072   __ b(ne, &miss);
   3073 
   3074   GenerateLoadArrayLength(masm(), r1, r2, &miss);
   3075   __ bind(&miss);
   3076   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3077 
   3078   return GetCode(CALLBACKS, name);
   3079 }
   3080 
   3081 
   3082 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
   3083   // ----------- S t a t e -------------
   3084   //  -- lr    : return address
   3085   //  -- r0    : key
   3086   //  -- r1    : receiver
   3087   // -----------------------------------
   3088   Label miss;
   3089 
   3090   Counters* counters = masm()->isolate()->counters();
   3091   __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
   3092 
   3093   // Check the key is the cached one.
   3094   __ cmp(r0, Operand(Handle<String>(name)));
   3095   __ b(ne, &miss);
   3096 
   3097   GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
   3098   __ bind(&miss);
   3099   __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
   3100 
   3101   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3102 
   3103   return GetCode(CALLBACKS, name);
   3104 }
   3105 
   3106 
   3107 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
   3108   // ----------- S t a t e -------------
   3109   //  -- lr    : return address
   3110   //  -- r0    : key
   3111   //  -- r1    : receiver
   3112   // -----------------------------------
   3113   Label miss;
   3114 
   3115   Counters* counters = masm()->isolate()->counters();
   3116   __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
   3117 
   3118   // Check the name hasn't changed.
   3119   __ cmp(r0, Operand(Handle<String>(name)));
   3120   __ b(ne, &miss);
   3121 
   3122   GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
   3123   __ bind(&miss);
   3124   __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
   3125   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3126 
   3127   return GetCode(CALLBACKS, name);
   3128 }
   3129 
   3130 
   3131 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
   3132   // ----------- S t a t e -------------
   3133   //  -- lr    : return address
   3134   //  -- r0    : key
   3135   //  -- r1    : receiver
   3136   // -----------------------------------
   3137   Label miss;
   3138 
   3139   // Check that the receiver isn't a smi.
   3140   __ tst(r1, Operand(kSmiTagMask));
   3141   __ b(eq, &miss);
   3142 
   3143   // Check that the map matches.
   3144   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
   3145   __ cmp(r2, Operand(Handle<Map>(receiver->map())));
   3146   __ b(ne, &miss);
   3147 
   3148   // Check that the key is a smi.
   3149   __ tst(r0, Operand(kSmiTagMask));
   3150   __ b(ne, &miss);
   3151 
   3152   // Get the elements array.
   3153   __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
   3154   __ AssertFastElements(r2);
   3155 
   3156   // Check that the key is within bounds.
   3157   __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
   3158   __ cmp(r0, Operand(r3));
   3159   __ b(hs, &miss);
   3160 
   3161   // Load the result and make sure it's not the hole.
   3162   __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3163   ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   3164   __ ldr(r4,
   3165          MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
   3166   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   3167   __ cmp(r4, ip);
   3168   __ b(eq, &miss);
   3169   __ mov(r0, r4);
   3170   __ Ret();
   3171 
   3172   __ bind(&miss);
   3173   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3174 
   3175   // Return the generated code.
   3176   return GetCode(NORMAL, NULL);
   3177 }
   3178 
   3179 
   3180 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
   3181                                                        int index,
   3182                                                        Map* transition,
   3183                                                        String* name) {
   3184   // ----------- S t a t e -------------
   3185   //  -- r0    : value
   3186   //  -- r1    : name
   3187   //  -- r2    : receiver
   3188   //  -- lr    : return address
   3189   // -----------------------------------
   3190   Label miss;
   3191 
   3192   Counters* counters = masm()->isolate()->counters();
   3193   __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
   3194 
   3195   // Check that the name has not changed.
   3196   __ cmp(r1, Operand(Handle<String>(name)));
   3197   __ b(ne, &miss);
   3198 
   3199   // r3 is used as scratch register. r1 and r2 keep their values if a jump to
   3200   // the miss label is generated.
   3201   GenerateStoreField(masm(),
   3202                      object,
   3203                      index,
   3204                      transition,
   3205                      r2, r1, r3,
   3206                      &miss);
   3207   __ bind(&miss);
   3208 
   3209   __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
   3210   Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
   3211   __ Jump(ic, RelocInfo::CODE_TARGET);
   3212 
   3213   // Return the generated code.
   3214   return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
   3215 }
   3216 
   3217 
   3218 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
   3219     JSObject* receiver) {
   3220   // ----------- S t a t e -------------
   3221   //  -- r0    : value
   3222   //  -- r1    : key
   3223   //  -- r2    : receiver
   3224   //  -- lr    : return address
   3225   //  -- r3    : scratch
   3226   //  -- r4    : scratch (elements)
   3227   // -----------------------------------
   3228   Label miss;
   3229 
   3230   Register value_reg = r0;
   3231   Register key_reg = r1;
   3232   Register receiver_reg = r2;
   3233   Register scratch = r3;
   3234   Register elements_reg = r4;
   3235 
   3236   // Check that the receiver isn't a smi.
   3237   __ tst(receiver_reg, Operand(kSmiTagMask));
   3238   __ b(eq, &miss);
   3239 
   3240   // Check that the map matches.
   3241   __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
   3242   __ cmp(scratch, Operand(Handle<Map>(receiver->map())));
   3243   __ b(ne, &miss);
   3244 
   3245   // Check that the key is a smi.
   3246   __ tst(key_reg, Operand(kSmiTagMask));
   3247   __ b(ne, &miss);
   3248 
   3249   // Get the elements array and make sure it is a fast element array, not 'cow'.
   3250   __ ldr(elements_reg,
   3251          FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3252   __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
   3253   __ cmp(scratch, Operand(Handle<Map>(factory()->fixed_array_map())));
   3254   __ b(ne, &miss);
   3255 
   3256   // Check that the key is within bounds.
   3257   if (receiver->IsJSArray()) {
   3258     __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3259   } else {
   3260     __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3261   }
   3262   // Compare smis.
   3263   __ cmp(key_reg, scratch);
   3264   __ b(hs, &miss);
   3265 
   3266   __ add(scratch,
   3267          elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3268   ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   3269   __ str(value_reg,
   3270          MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
   3271   __ RecordWrite(scratch,
   3272                  Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
   3273                  receiver_reg , elements_reg);
   3274 
   3275   // value_reg (r0) is preserved.
   3276   // Done.
   3277   __ Ret();
   3278 
   3279   __ bind(&miss);
   3280   Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
   3281   __ Jump(ic, RelocInfo::CODE_TARGET);
   3282 
   3283   // Return the generated code.
   3284   return GetCode(NORMAL, NULL);
   3285 }
   3286 
   3287 
   3288 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   3289   // ----------- S t a t e -------------
   3290   //  -- r0    : argc
   3291   //  -- r1    : constructor
   3292   //  -- lr    : return address
   3293   //  -- [sp]  : last argument
   3294   // -----------------------------------
   3295   Label generic_stub_call;
   3296 
   3297   // Use r7 for holding undefined which is used in several places below.
   3298   __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
   3299 
   3300 #ifdef ENABLE_DEBUGGER_SUPPORT
   3301   // Check to see whether there are any break points in the function code. If
   3302   // there are jump to the generic constructor stub which calls the actual
   3303   // code for the function thereby hitting the break points.
   3304   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
   3305   __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
   3306   __ cmp(r2, r7);
   3307   __ b(ne, &generic_stub_call);
   3308 #endif
   3309 
   3310   // Load the initial map and verify that it is in fact a map.
   3311   // r1: constructor function
   3312   // r7: undefined
   3313   __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
   3314   __ tst(r2, Operand(kSmiTagMask));
   3315   __ b(eq, &generic_stub_call);
   3316   __ CompareObjectType(r2, r3, r4, MAP_TYPE);
   3317   __ b(ne, &generic_stub_call);
   3318 
   3319 #ifdef DEBUG
   3320   // Cannot construct functions this way.
   3321   // r0: argc
   3322   // r1: constructor function
   3323   // r2: initial map
   3324   // r7: undefined
   3325   __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
   3326   __ Check(ne, "Function constructed by construct stub.");
   3327 #endif
   3328 
   3329   // Now allocate the JSObject in new space.
   3330   // r0: argc
   3331   // r1: constructor function
   3332   // r2: initial map
   3333   // r7: undefined
   3334   __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
   3335   __ AllocateInNewSpace(r3,
   3336                         r4,
   3337                         r5,
   3338                         r6,
   3339                         &generic_stub_call,
   3340                         SIZE_IN_WORDS);
   3341 
   3342   // Allocated the JSObject, now initialize the fields. Map is set to initial
   3343   // map and properties and elements are set to empty fixed array.
   3344   // r0: argc
   3345   // r1: constructor function
   3346   // r2: initial map
   3347   // r3: object size (in words)
   3348   // r4: JSObject (not tagged)
   3349   // r7: undefined
   3350   __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
   3351   __ mov(r5, r4);
   3352   ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
   3353   __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
   3354   ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
   3355   __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
   3356   ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
   3357   __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
   3358 
   3359   // Calculate the location of the first argument. The stack contains only the
   3360   // argc arguments.
   3361   __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
   3362 
   3363   // Fill all the in-object properties with undefined.
   3364   // r0: argc
   3365   // r1: first argument
   3366   // r3: object size (in words)
   3367   // r4: JSObject (not tagged)
   3368   // r5: First in-object property of JSObject (not tagged)
   3369   // r7: undefined
   3370   // Fill the initialized properties with a constant value or a passed argument
   3371   // depending on the this.x = ...; assignment in the function.
   3372   SharedFunctionInfo* shared = function->shared();
   3373   for (int i = 0; i < shared->this_property_assignments_count(); i++) {
   3374     if (shared->IsThisPropertyAssignmentArgument(i)) {
   3375       Label not_passed, next;
   3376       // Check if the argument assigned to the property is actually passed.
   3377       int arg_number = shared->GetThisPropertyAssignmentArgument(i);
   3378       __ cmp(r0, Operand(arg_number));
   3379       __ b(le, &not_passed);
   3380       // Argument passed - find it on the stack.
   3381       __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
   3382       __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
   3383       __ b(&next);
   3384       __ bind(&not_passed);
   3385       // Set the property to undefined.
   3386       __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
   3387       __ bind(&next);
   3388     } else {
   3389       // Set the property to the constant value.
   3390       Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
   3391       __ mov(r2, Operand(constant));
   3392       __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
   3393     }
   3394   }
   3395 
   3396   // Fill the unused in-object property fields with undefined.
   3397   ASSERT(function->has_initial_map());
   3398   for (int i = shared->this_property_assignments_count();
   3399        i < function->initial_map()->inobject_properties();
   3400        i++) {
   3401       __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
   3402   }
   3403 
   3404   // r0: argc
   3405   // r4: JSObject (not tagged)
   3406   // Move argc to r1 and the JSObject to return to r0 and tag it.
   3407   __ mov(r1, r0);
   3408   __ mov(r0, r4);
   3409   __ orr(r0, r0, Operand(kHeapObjectTag));
   3410 
   3411   // r0: JSObject
   3412   // r1: argc
   3413   // Remove caller arguments and receiver from the stack and return.
   3414   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
   3415   __ add(sp, sp, Operand(kPointerSize));
   3416   Counters* counters = masm()->isolate()->counters();
   3417   __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
   3418   __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
   3419   __ Jump(lr);
   3420 
   3421   // Jump to the generic stub in case the specialized code cannot handle the
   3422   // construction.
   3423   __ bind(&generic_stub_call);
   3424   Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
   3425   __ Jump(code, RelocInfo::CODE_TARGET);
   3426 
   3427   // Return the generated code.
   3428   return GetCode();
   3429 }
   3430 
   3431 
   3432 static bool IsElementTypeSigned(ExternalArrayType array_type) {
   3433   switch (array_type) {
   3434     case kExternalByteArray:
   3435     case kExternalShortArray:
   3436     case kExternalIntArray:
   3437       return true;
   3438 
   3439     case kExternalUnsignedByteArray:
   3440     case kExternalUnsignedShortArray:
   3441     case kExternalUnsignedIntArray:
   3442       return false;
   3443 
   3444     default:
   3445       UNREACHABLE();
   3446       return false;
   3447   }
   3448 }
   3449 
   3450 
   3451 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
   3452     JSObject* receiver_object,
   3453     ExternalArrayType array_type,
   3454     Code::Flags flags) {
   3455   // ---------- S t a t e --------------
   3456   //  -- lr     : return address
   3457   //  -- r0     : key
   3458   //  -- r1     : receiver
   3459   // -----------------------------------
   3460   Label slow, failed_allocation;
   3461 
   3462   Register key = r0;
   3463   Register receiver = r1;
   3464 
   3465   // Check that the object isn't a smi
   3466   __ JumpIfSmi(receiver, &slow);
   3467 
   3468   // Check that the key is a smi.
   3469   __ JumpIfNotSmi(key, &slow);
   3470 
   3471   // Make sure that we've got the right map.
   3472   __ ldr(r2, FieldMemOperand(receiver, HeapObject::kMapOffset));
   3473   __ cmp(r2, Operand(Handle<Map>(receiver_object->map())));
   3474   __ b(ne, &slow);
   3475 
   3476   __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
   3477   // r3: elements array
   3478 
   3479   // Check that the index is in range.
   3480   __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
   3481   __ cmp(ip, Operand(key, ASR, kSmiTagSize));
   3482   // Unsigned comparison catches both negative and too-large values.
   3483   __ b(lo, &slow);
   3484 
   3485   __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
   3486   // r3: base pointer of external storage
   3487 
   3488   // We are not untagging smi key and instead work with it
   3489   // as if it was premultiplied by 2.
   3490   ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
   3491 
   3492   Register value = r2;
   3493   switch (array_type) {
   3494     case kExternalByteArray:
   3495       __ ldrsb(value, MemOperand(r3, key, LSR, 1));
   3496       break;
   3497     case kExternalPixelArray:
   3498     case kExternalUnsignedByteArray:
   3499       __ ldrb(value, MemOperand(r3, key, LSR, 1));
   3500       break;
   3501     case kExternalShortArray:
   3502       __ ldrsh(value, MemOperand(r3, key, LSL, 0));
   3503       break;
   3504     case kExternalUnsignedShortArray:
   3505       __ ldrh(value, MemOperand(r3, key, LSL, 0));
   3506       break;
   3507     case kExternalIntArray:
   3508     case kExternalUnsignedIntArray:
   3509       __ ldr(value, MemOperand(r3, key, LSL, 1));
   3510       break;
   3511     case kExternalFloatArray:
   3512       if (CpuFeatures::IsSupported(VFP3)) {
   3513         CpuFeatures::Scope scope(VFP3);
   3514         __ add(r2, r3, Operand(key, LSL, 1));
   3515         __ vldr(s0, r2, 0);
   3516       } else {
   3517         __ ldr(value, MemOperand(r3, key, LSL, 1));
   3518       }
   3519       break;
   3520     default:
   3521       UNREACHABLE();
   3522       break;
   3523   }
   3524 
   3525   // For integer array types:
   3526   // r2: value
   3527   // For floating-point array type
   3528   // s0: value (if VFP3 is supported)
   3529   // r2: value (if VFP3 is not supported)
   3530 
   3531   if (array_type == kExternalIntArray) {
   3532     // For the Int and UnsignedInt array types, we need to see whether
   3533     // the value can be represented in a Smi. If not, we need to convert
   3534     // it to a HeapNumber.
   3535     Label box_int;
   3536     __ cmp(value, Operand(0xC0000000));
   3537     __ b(mi, &box_int);
   3538     // Tag integer as smi and return it.
   3539     __ mov(r0, Operand(value, LSL, kSmiTagSize));
   3540     __ Ret();
   3541 
   3542     __ bind(&box_int);
   3543     // Allocate a HeapNumber for the result and perform int-to-double
   3544     // conversion.  Don't touch r0 or r1 as they are needed if allocation
   3545     // fails.
   3546     __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   3547     __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
   3548     // Now we can use r0 for the result as key is not needed any more.
   3549     __ mov(r0, r5);
   3550 
   3551     if (CpuFeatures::IsSupported(VFP3)) {
   3552       CpuFeatures::Scope scope(VFP3);
   3553       __ vmov(s0, value);
   3554       __ vcvt_f64_s32(d0, s0);
   3555       __ sub(r3, r0, Operand(kHeapObjectTag));
   3556       __ vstr(d0, r3, HeapNumber::kValueOffset);
   3557       __ Ret();
   3558     } else {
   3559       WriteInt32ToHeapNumberStub stub(value, r0, r3);
   3560       __ TailCallStub(&stub);
   3561     }
   3562   } else if (array_type == kExternalUnsignedIntArray) {
   3563     // The test is different for unsigned int values. Since we need
   3564     // the value to be in the range of a positive smi, we can't
   3565     // handle either of the top two bits being set in the value.
   3566     if (CpuFeatures::IsSupported(VFP3)) {
   3567       CpuFeatures::Scope scope(VFP3);
   3568       Label box_int, done;
   3569       __ tst(value, Operand(0xC0000000));
   3570       __ b(ne, &box_int);
   3571       // Tag integer as smi and return it.
   3572       __ mov(r0, Operand(value, LSL, kSmiTagSize));
   3573       __ Ret();
   3574 
   3575       __ bind(&box_int);
   3576       __ vmov(s0, value);
   3577       // Allocate a HeapNumber for the result and perform int-to-double
   3578       // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
   3579       // registers - also when jumping due to exhausted young space.
   3580       __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   3581       __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
   3582 
   3583       __ vcvt_f64_u32(d0, s0);
   3584       __ sub(r1, r2, Operand(kHeapObjectTag));
   3585       __ vstr(d0, r1, HeapNumber::kValueOffset);
   3586 
   3587       __ mov(r0, r2);
   3588       __ Ret();
   3589     } else {
   3590       // Check whether unsigned integer fits into smi.
   3591       Label box_int_0, box_int_1, done;
   3592       __ tst(value, Operand(0x80000000));
   3593       __ b(ne, &box_int_0);
   3594       __ tst(value, Operand(0x40000000));
   3595       __ b(ne, &box_int_1);
   3596       // Tag integer as smi and return it.
   3597       __ mov(r0, Operand(value, LSL, kSmiTagSize));
   3598       __ Ret();
   3599 
   3600       Register hiword = value;  // r2.
   3601       Register loword = r3;
   3602 
   3603       __ bind(&box_int_0);
   3604       // Integer does not have leading zeros.
   3605       GenerateUInt2Double(masm(), hiword, loword, r4, 0);
   3606       __ b(&done);
   3607 
   3608       __ bind(&box_int_1);
   3609       // Integer has one leading zero.
   3610       GenerateUInt2Double(masm(), hiword, loword, r4, 1);
   3611 
   3612 
   3613       __ bind(&done);
   3614       // Integer was converted to double in registers hiword:loword.
   3615       // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
   3616       // clobbers all registers - also when jumping due to exhausted young
   3617       // space.
   3618       __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   3619       __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
   3620 
   3621       __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
   3622       __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
   3623 
   3624       __ mov(r0, r4);
   3625       __ Ret();
   3626     }
   3627   } else if (array_type == kExternalFloatArray) {
   3628     // For the floating-point array type, we need to always allocate a
   3629     // HeapNumber.
   3630     if (CpuFeatures::IsSupported(VFP3)) {
   3631       CpuFeatures::Scope scope(VFP3);
   3632       // Allocate a HeapNumber for the result. Don't use r0 and r1 as
   3633       // AllocateHeapNumber clobbers all registers - also when jumping due to
   3634       // exhausted young space.
   3635       __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   3636       __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
   3637       __ vcvt_f64_f32(d0, s0);
   3638       __ sub(r1, r2, Operand(kHeapObjectTag));
   3639       __ vstr(d0, r1, HeapNumber::kValueOffset);
   3640 
   3641       __ mov(r0, r2);
   3642       __ Ret();
   3643     } else {
   3644       // Allocate a HeapNumber for the result. Don't use r0 and r1 as
   3645       // AllocateHeapNumber clobbers all registers - also when jumping due to
   3646       // exhausted young space.
   3647       __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   3648       __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
   3649       // VFP is not available, do manual single to double conversion.
   3650 
   3651       // r2: floating point value (binary32)
   3652       // r3: heap number for result
   3653 
   3654       // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
   3655       // the slow case from here.
   3656       __ and_(r0, value, Operand(kBinary32MantissaMask));
   3657 
   3658       // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
   3659       // the slow case from here.
   3660       __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
   3661       __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
   3662 
   3663       Label exponent_rebiased;
   3664       __ teq(r1, Operand(0x00));
   3665       __ b(eq, &exponent_rebiased);
   3666 
   3667       __ teq(r1, Operand(0xff));
   3668       __ mov(r1, Operand(0x7ff), LeaveCC, eq);
   3669       __ b(eq, &exponent_rebiased);
   3670 
   3671       // Rebias exponent.
   3672       __ add(r1,
   3673              r1,
   3674              Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
   3675 
   3676       __ bind(&exponent_rebiased);
   3677       __ and_(r2, value, Operand(kBinary32SignMask));
   3678       value = no_reg;
   3679       __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
   3680 
   3681       // Shift mantissa.
   3682       static const int kMantissaShiftForHiWord =
   3683           kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
   3684 
   3685       static const int kMantissaShiftForLoWord =
   3686           kBitsPerInt - kMantissaShiftForHiWord;
   3687 
   3688       __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
   3689       __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
   3690 
   3691       __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
   3692       __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
   3693 
   3694       __ mov(r0, r3);
   3695       __ Ret();
   3696     }
   3697 
   3698   } else {
   3699     // Tag integer as smi and return it.
   3700     __ mov(r0, Operand(value, LSL, kSmiTagSize));
   3701     __ Ret();
   3702   }
   3703 
   3704   // Slow case, key and receiver still in r0 and r1.
   3705   __ bind(&slow);
   3706   __ IncrementCounter(
   3707       masm()->isolate()->counters()->keyed_load_external_array_slow(),
   3708       1, r2, r3);
   3709 
   3710   // ---------- S t a t e --------------
   3711   //  -- lr     : return address
   3712   //  -- r0     : key
   3713   //  -- r1     : receiver
   3714   // -----------------------------------
   3715 
   3716   __ Push(r1, r0);
   3717 
   3718   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
   3719 
   3720   return GetCode(flags);
   3721 }
   3722 
   3723 
   3724 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
   3725     JSObject* receiver_object,
   3726     ExternalArrayType array_type,
   3727     Code::Flags flags) {
   3728   // ---------- S t a t e --------------
   3729   //  -- r0     : value
   3730   //  -- r1     : key
   3731   //  -- r2     : receiver
   3732   //  -- lr     : return address
   3733   // -----------------------------------
   3734   Label slow, check_heap_number;
   3735 
   3736   // Register usage.
   3737   Register value = r0;
   3738   Register key = r1;
   3739   Register receiver = r2;
   3740   // r3 mostly holds the elements array or the destination external array.
   3741 
   3742   // Check that the object isn't a smi.
   3743   __ JumpIfSmi(receiver, &slow);
   3744 
   3745   // Make sure that we've got the right map.
   3746   __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
   3747   __ cmp(r3, Operand(Handle<Map>(receiver_object->map())));
   3748   __ b(ne, &slow);
   3749 
   3750   __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
   3751 
   3752   // Check that the key is a smi.
   3753   __ JumpIfNotSmi(key, &slow);
   3754 
   3755   // Check that the index is in range
   3756   __ SmiUntag(r4, key);
   3757   __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
   3758   __ cmp(r4, ip);
   3759   // Unsigned comparison catches both negative and too-large values.
   3760   __ b(hs, &slow);
   3761 
   3762   // Handle both smis and HeapNumbers in the fast path. Go to the
   3763   // runtime for all other kinds of values.
   3764   // r3: external array.
   3765   // r4: key (integer).
   3766   if (array_type == kExternalPixelArray) {
   3767     // Double to pixel conversion is only implemented in the runtime for now.
   3768     __ JumpIfNotSmi(value, &slow);
   3769   } else {
   3770     __ JumpIfNotSmi(value, &check_heap_number);
   3771   }
   3772   __ SmiUntag(r5, value);
   3773   __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
   3774 
   3775   // r3: base pointer of external storage.
   3776   // r4: key (integer).
   3777   // r5: value (integer).
   3778   switch (array_type) {
   3779     case kExternalPixelArray:
   3780       // Clamp the value to [0..255].
   3781       __ Usat(r5, 8, Operand(r5));
   3782       __ strb(r5, MemOperand(r3, r4, LSL, 0));
   3783       break;
   3784     case kExternalByteArray:
   3785     case kExternalUnsignedByteArray:
   3786       __ strb(r5, MemOperand(r3, r4, LSL, 0));
   3787       break;
   3788     case kExternalShortArray:
   3789     case kExternalUnsignedShortArray:
   3790       __ strh(r5, MemOperand(r3, r4, LSL, 1));
   3791       break;
   3792     case kExternalIntArray:
   3793     case kExternalUnsignedIntArray:
   3794       __ str(r5, MemOperand(r3, r4, LSL, 2));
   3795       break;
   3796     case kExternalFloatArray:
   3797       // Perform int-to-float conversion and store to memory.
   3798       StoreIntAsFloat(masm(), r3, r4, r5, r6, r7, r9);
   3799       break;
   3800     default:
   3801       UNREACHABLE();
   3802       break;
   3803   }
   3804 
   3805   // Entry registers are intact, r0 holds the value which is the return value.
   3806   __ Ret();
   3807 
   3808   if (array_type != kExternalPixelArray) {
   3809     // r3: external array.
   3810     // r4: index (integer).
   3811     __ bind(&check_heap_number);
   3812     __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
   3813     __ b(ne, &slow);
   3814 
   3815     __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
   3816 
   3817     // r3: base pointer of external storage.
   3818     // r4: key (integer).
   3819 
   3820     // The WebGL specification leaves the behavior of storing NaN and
   3821     // +/-Infinity into integer arrays basically undefined. For more
   3822     // reproducible behavior, convert these to zero.
   3823     if (CpuFeatures::IsSupported(VFP3)) {
   3824       CpuFeatures::Scope scope(VFP3);
   3825 
   3826       if (array_type == kExternalFloatArray) {
   3827         // vldr requires offset to be a multiple of 4 so we can not
   3828         // include -kHeapObjectTag into it.
   3829         __ sub(r5, r0, Operand(kHeapObjectTag));
   3830         __ vldr(d0, r5, HeapNumber::kValueOffset);
   3831         __ add(r5, r3, Operand(r4, LSL, 2));
   3832         __ vcvt_f32_f64(s0, d0);
   3833         __ vstr(s0, r5, 0);
   3834       } else {
   3835         // Need to perform float-to-int conversion.
   3836         // Test for NaN or infinity (both give zero).
   3837         __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
   3838 
   3839         // Hoisted load.  vldr requires offset to be a multiple of 4 so we can
   3840         // not include -kHeapObjectTag into it.
   3841         __ sub(r5, value, Operand(kHeapObjectTag));
   3842         __ vldr(d0, r5, HeapNumber::kValueOffset);
   3843 
   3844         __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
   3845         // NaNs and Infinities have all-one exponents so they sign extend to -1.
   3846         __ cmp(r6, Operand(-1));
   3847         __ mov(r5, Operand(0), LeaveCC, eq);
   3848 
   3849         // Not infinity or NaN simply convert to int.
   3850         if (IsElementTypeSigned(array_type)) {
   3851           __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
   3852         } else {
   3853           __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
   3854         }
   3855         __ vmov(r5, s0, ne);
   3856 
   3857         switch (array_type) {
   3858           case kExternalByteArray:
   3859           case kExternalUnsignedByteArray:
   3860             __ strb(r5, MemOperand(r3, r4, LSL, 0));
   3861             break;
   3862           case kExternalShortArray:
   3863           case kExternalUnsignedShortArray:
   3864             __ strh(r5, MemOperand(r3, r4, LSL, 1));
   3865             break;
   3866           case kExternalIntArray:
   3867           case kExternalUnsignedIntArray:
   3868             __ str(r5, MemOperand(r3, r4, LSL, 2));
   3869             break;
   3870           default:
   3871             UNREACHABLE();
   3872             break;
   3873         }
   3874       }
   3875 
   3876       // Entry registers are intact, r0 holds the value which is the return
   3877       // value.
   3878       __ Ret();
   3879     } else {
   3880       // VFP3 is not available do manual conversions.
   3881       __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
   3882       __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
   3883 
   3884       if (array_type == kExternalFloatArray) {
   3885         Label done, nan_or_infinity_or_zero;
   3886         static const int kMantissaInHiWordShift =
   3887             kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
   3888 
   3889         static const int kMantissaInLoWordShift =
   3890             kBitsPerInt - kMantissaInHiWordShift;
   3891 
   3892         // Test for all special exponent values: zeros, subnormal numbers, NaNs
   3893         // and infinities. All these should be converted to 0.
   3894         __ mov(r7, Operand(HeapNumber::kExponentMask));
   3895         __ and_(r9, r5, Operand(r7), SetCC);
   3896         __ b(eq, &nan_or_infinity_or_zero);
   3897 
   3898         __ teq(r9, Operand(r7));
   3899         __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
   3900         __ b(eq, &nan_or_infinity_or_zero);
   3901 
   3902         // Rebias exponent.
   3903         __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
   3904         __ add(r9,
   3905                r9,
   3906                Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
   3907 
   3908         __ cmp(r9, Operand(kBinary32MaxExponent));
   3909         __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
   3910         __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
   3911         __ b(gt, &done);
   3912 
   3913         __ cmp(r9, Operand(kBinary32MinExponent));
   3914         __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
   3915         __ b(lt, &done);
   3916 
   3917         __ and_(r7, r5, Operand(HeapNumber::kSignMask));
   3918         __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
   3919         __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
   3920         __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
   3921         __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
   3922 
   3923         __ bind(&done);
   3924         __ str(r5, MemOperand(r3, r4, LSL, 2));
   3925         // Entry registers are intact, r0 holds the value which is the return
   3926         // value.
   3927         __ Ret();
   3928 
   3929         __ bind(&nan_or_infinity_or_zero);
   3930         __ and_(r7, r5, Operand(HeapNumber::kSignMask));
   3931         __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
   3932         __ orr(r9, r9, r7);
   3933         __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
   3934         __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
   3935         __ b(&done);
   3936       } else {
   3937         bool is_signed_type = IsElementTypeSigned(array_type);
   3938         int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
   3939         int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
   3940 
   3941         Label done, sign;
   3942 
   3943         // Test for all special exponent values: zeros, subnormal numbers, NaNs
   3944         // and infinities. All these should be converted to 0.
   3945         __ mov(r7, Operand(HeapNumber::kExponentMask));
   3946         __ and_(r9, r5, Operand(r7), SetCC);
   3947         __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
   3948         __ b(eq, &done);
   3949 
   3950         __ teq(r9, Operand(r7));
   3951         __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
   3952         __ b(eq, &done);
   3953 
   3954         // Unbias exponent.
   3955         __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
   3956         __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
   3957         // If exponent is negative then result is 0.
   3958         __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
   3959         __ b(mi, &done);
   3960 
   3961         // If exponent is too big then result is minimal value.
   3962         __ cmp(r9, Operand(meaningfull_bits - 1));
   3963         __ mov(r5, Operand(min_value), LeaveCC, ge);
   3964         __ b(ge, &done);
   3965 
   3966         __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
   3967         __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
   3968         __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
   3969 
   3970         __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
   3971         __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
   3972         __ b(pl, &sign);
   3973 
   3974         __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
   3975         __ mov(r5, Operand(r5, LSL, r9));
   3976         __ rsb(r9, r9, Operand(meaningfull_bits));
   3977         __ orr(r5, r5, Operand(r6, LSR, r9));
   3978 
   3979         __ bind(&sign);
   3980         __ teq(r7, Operand(0, RelocInfo::NONE));
   3981         __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
   3982 
   3983         __ bind(&done);
   3984         switch (array_type) {
   3985           case kExternalByteArray:
   3986           case kExternalUnsignedByteArray:
   3987             __ strb(r5, MemOperand(r3, r4, LSL, 0));
   3988             break;
   3989           case kExternalShortArray:
   3990           case kExternalUnsignedShortArray:
   3991             __ strh(r5, MemOperand(r3, r4, LSL, 1));
   3992             break;
   3993           case kExternalIntArray:
   3994           case kExternalUnsignedIntArray:
   3995             __ str(r5, MemOperand(r3, r4, LSL, 2));
   3996             break;
   3997           default:
   3998             UNREACHABLE();
   3999             break;
   4000         }
   4001       }
   4002     }
   4003   }
   4004 
   4005   // Slow case: call runtime.
   4006   __ bind(&slow);
   4007 
   4008   // Entry registers are intact.
   4009   // ---------- S t a t e --------------
   4010   //  -- r0     : value
   4011   //  -- r1     : key
   4012   //  -- r2     : receiver
   4013   //  -- lr     : return address
   4014   // -----------------------------------
   4015 
   4016   // Push receiver, key and value for runtime call.
   4017   __ Push(r2, r1, r0);
   4018 
   4019   __ mov(r1, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
   4020   __ mov(r0, Operand(Smi::FromInt(
   4021       Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
   4022   __ Push(r1, r0);
   4023 
   4024   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
   4025 
   4026   return GetCode(flags);
   4027 }
   4028 
   4029 
   4030 #undef __
   4031 
   4032 } }  // namespace v8::internal
   4033 
   4034 #endif  // V8_TARGET_ARCH_ARM
   4035