Home | History | Annotate | Download | only in arm
      1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "codegen-inl.h"
     31 #include "ic-inl.h"
     32 #include "runtime.h"
     33 #include "stub-cache.h"
     34 
     35 namespace v8 {
     36 namespace internal {
     37 
     38 
     39 // ----------------------------------------------------------------------------
     40 // Static IC stub generators.
     41 //
     42 
     43 #define __ ACCESS_MASM(masm)
     44 
     45 
     46 // Helper function used from LoadIC/CallIC GenerateNormal.
     47 static void GenerateDictionaryLoad(MacroAssembler* masm,
     48                                    Label* miss,
     49                                    Register t0,
     50                                    Register t1) {
     51   // Register use:
     52   //
     53   // t0 - used to hold the property dictionary.
     54   //
     55   // t1 - initially the receiver
     56   //    - used for the index into the property dictionary
     57   //    - holds the result on exit.
     58   //
     59   // r3 - used as temporary and to hold the capacity of the property
     60   //      dictionary.
     61   //
     62   // r2 - holds the name of the property and is unchanged.
     63 
     64   Label done;
     65 
     66   // Check for the absence of an interceptor.
     67   // Load the map into t0.
     68   __ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset));
     69   // Test the has_named_interceptor bit in the map.
     70   __ ldr(r3, FieldMemOperand(t0, Map::kInstanceAttributesOffset));
     71   __ tst(r3, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8))));
     72   // Jump to miss if the interceptor bit is set.
     73   __ b(ne, miss);
     74 
     75   // Bail out if we have a JS global proxy object.
     76   __ ldrb(r3, FieldMemOperand(t0, Map::kInstanceTypeOffset));
     77   __ cmp(r3, Operand(JS_GLOBAL_PROXY_TYPE));
     78   __ b(eq, miss);
     79 
     80   // Possible work-around for http://crbug.com/16276.
     81   // See also: http://codereview.chromium.org/155418.
     82   __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE));
     83   __ b(eq, miss);
     84   __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
     85   __ b(eq, miss);
     86 
     87   // Check that the properties array is a dictionary.
     88   __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
     89   __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
     90   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
     91   __ cmp(r3, ip);
     92   __ b(ne, miss);
     93 
     94   // Compute the capacity mask.
     95   const int kCapacityOffset = StringDictionary::kHeaderSize +
     96       StringDictionary::kCapacityIndex * kPointerSize;
     97   __ ldr(r3, FieldMemOperand(t0, kCapacityOffset));
     98   __ mov(r3, Operand(r3, ASR, kSmiTagSize));  // convert smi to int
     99   __ sub(r3, r3, Operand(1));
    100 
    101   const int kElementsStartOffset = StringDictionary::kHeaderSize +
    102       StringDictionary::kElementsStartIndex * kPointerSize;
    103 
    104   // Generate an unrolled loop that performs a few probes before
    105   // giving up. Measurements done on Gmail indicate that 2 probes
    106   // cover ~93% of loads from dictionaries.
    107   static const int kProbes = 4;
    108   for (int i = 0; i < kProbes; i++) {
    109     // Compute the masked index: (hash + i + i * i) & mask.
    110     __ ldr(t1, FieldMemOperand(r2, String::kHashFieldOffset));
    111     if (i > 0) {
    112       // Add the probe offset (i + i * i) left shifted to avoid right shifting
    113       // the hash in a separate instruction. The value hash + i + i * i is right
    114       // shifted in the following and instruction.
    115       ASSERT(StringDictionary::GetProbeOffset(i) <
    116              1 << (32 - String::kHashFieldOffset));
    117       __ add(t1, t1, Operand(
    118           StringDictionary::GetProbeOffset(i) << String::kHashShift));
    119     }
    120     __ and_(t1, r3, Operand(t1, LSR, String::kHashShift));
    121 
    122     // Scale the index by multiplying by the element size.
    123     ASSERT(StringDictionary::kEntrySize == 3);
    124     __ add(t1, t1, Operand(t1, LSL, 1));  // t1 = t1 * 3
    125 
    126     // Check if the key is identical to the name.
    127     __ add(t1, t0, Operand(t1, LSL, 2));
    128     __ ldr(ip, FieldMemOperand(t1, kElementsStartOffset));
    129     __ cmp(r2, Operand(ip));
    130     if (i != kProbes - 1) {
    131       __ b(eq, &done);
    132     } else {
    133       __ b(ne, miss);
    134     }
    135   }
    136 
    137   // Check that the value is a normal property.
    138   __ bind(&done);  // t1 == t0 + 4*index
    139   __ ldr(r3, FieldMemOperand(t1, kElementsStartOffset + 2 * kPointerSize));
    140   __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
    141   __ b(ne, miss);
    142 
    143   // Get the value at the masked, scaled index and return.
    144   __ ldr(t1, FieldMemOperand(t1, kElementsStartOffset + 1 * kPointerSize));
    145 }
    146 
    147 
    148 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
    149   // ----------- S t a t e -------------
    150   //  -- r2    : name
    151   //  -- lr    : return address
    152   //  -- [sp]  : receiver
    153   // -----------------------------------
    154   Label miss;
    155 
    156   __ ldr(r0, MemOperand(sp, 0));
    157 
    158   StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
    159   __ bind(&miss);
    160   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
    161 }
    162 
    163 
    164 void LoadIC::GenerateStringLength(MacroAssembler* masm) {
    165   // ----------- S t a t e -------------
    166   //  -- r2    : name
    167   //  -- lr    : return address
    168   //  -- [sp]  : receiver
    169   // -----------------------------------
    170   Label miss;
    171 
    172   __ ldr(r0, MemOperand(sp, 0));
    173 
    174   StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss);
    175   // Cache miss: Jump to runtime.
    176   __ bind(&miss);
    177   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
    178 }
    179 
    180 
    181 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
    182   // ----------- S t a t e -------------
    183   //  -- r2    : name
    184   //  -- lr    : return address
    185   //  -- [sp]  : receiver
    186   // -----------------------------------
    187   Label miss;
    188 
    189   // Load receiver.
    190   __ ldr(r0, MemOperand(sp, 0));
    191 
    192   StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
    193   __ bind(&miss);
    194   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
    195 }
    196 
    197 
    198 // Defined in ic.cc.
    199 Object* CallIC_Miss(Arguments args);
    200 
    201 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
    202   // ----------- S t a t e -------------
    203   //  -- r2    : name
    204   //  -- lr    : return address
    205   // -----------------------------------
    206   Label number, non_number, non_string, boolean, probe, miss;
    207 
    208   // Get the receiver of the function from the stack into r1.
    209   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
    210 
    211   // Probe the stub cache.
    212   Code::Flags flags =
    213       Code::ComputeFlags(Code::CALL_IC, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
    214   StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
    215 
    216   // If the stub cache probing failed, the receiver might be a value.
    217   // For value objects, we use the map of the prototype objects for
    218   // the corresponding JSValue for the cache and that is what we need
    219   // to probe.
    220   //
    221   // Check for number.
    222   __ tst(r1, Operand(kSmiTagMask));
    223   __ b(eq, &number);
    224   __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
    225   __ b(ne, &non_number);
    226   __ bind(&number);
    227   StubCompiler::GenerateLoadGlobalFunctionPrototype(
    228       masm, Context::NUMBER_FUNCTION_INDEX, r1);
    229   __ b(&probe);
    230 
    231   // Check for string.
    232   __ bind(&non_number);
    233   __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
    234   __ b(hs, &non_string);
    235   StubCompiler::GenerateLoadGlobalFunctionPrototype(
    236       masm, Context::STRING_FUNCTION_INDEX, r1);
    237   __ b(&probe);
    238 
    239   // Check for boolean.
    240   __ bind(&non_string);
    241   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    242   __ cmp(r1, ip);
    243   __ b(eq, &boolean);
    244   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
    245   __ cmp(r1, ip);
    246   __ b(ne, &miss);
    247   __ bind(&boolean);
    248   StubCompiler::GenerateLoadGlobalFunctionPrototype(
    249       masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
    250 
    251   // Probe the stub cache for the value object.
    252   __ bind(&probe);
    253   StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
    254 
    255   // Cache miss: Jump to runtime.
    256   __ bind(&miss);
    257   GenerateMiss(masm, argc);
    258 }
    259 
    260 
    261 static void GenerateNormalHelper(MacroAssembler* masm,
    262                                  int argc,
    263                                  bool is_global_object,
    264                                  Label* miss) {
    265   // Search dictionary - put result in register r1.
    266   GenerateDictionaryLoad(masm, miss, r0, r1);
    267 
    268   // Check that the value isn't a smi.
    269   __ tst(r1, Operand(kSmiTagMask));
    270   __ b(eq, miss);
    271 
    272   // Check that the value is a JSFunction.
    273   __ CompareObjectType(r1, r0, r0, JS_FUNCTION_TYPE);
    274   __ b(ne, miss);
    275 
    276   // Patch the receiver with the global proxy if necessary.
    277   if (is_global_object) {
    278     __ ldr(r0, MemOperand(sp, argc * kPointerSize));
    279     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
    280     __ str(r0, MemOperand(sp, argc * kPointerSize));
    281   }
    282 
    283   // Invoke the function.
    284   ParameterCount actual(argc);
    285   __ InvokeFunction(r1, actual, JUMP_FUNCTION);
    286 }
    287 
    288 
    289 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
    290   // ----------- S t a t e -------------
    291   //  -- r2    : name
    292   //  -- lr    : return address
    293   // -----------------------------------
    294   Label miss, global_object, non_global_object;
    295 
    296   // Get the receiver of the function from the stack into r1.
    297   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
    298 
    299   // Check that the receiver isn't a smi.
    300   __ tst(r1, Operand(kSmiTagMask));
    301   __ b(eq, &miss);
    302 
    303   // Check that the receiver is a valid JS object.  Put the map in r3.
    304   __ CompareObjectType(r1, r3, r0, FIRST_JS_OBJECT_TYPE);
    305   __ b(lt, &miss);
    306 
    307   // If this assert fails, we have to check upper bound too.
    308   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
    309 
    310   // Check for access to global object.
    311   __ cmp(r0, Operand(JS_GLOBAL_OBJECT_TYPE));
    312   __ b(eq, &global_object);
    313   __ cmp(r0, Operand(JS_BUILTINS_OBJECT_TYPE));
    314   __ b(ne, &non_global_object);
    315 
    316   // Accessing global object: Load and invoke.
    317   __ bind(&global_object);
    318   // Check that the global object does not require access checks.
    319   __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
    320   __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
    321   __ b(ne, &miss);
    322   GenerateNormalHelper(masm, argc, true, &miss);
    323 
    324   // Accessing non-global object: Check for access to global proxy.
    325   Label global_proxy, invoke;
    326   __ bind(&non_global_object);
    327   __ cmp(r0, Operand(JS_GLOBAL_PROXY_TYPE));
    328   __ b(eq, &global_proxy);
    329   // Check that the non-global, non-global-proxy object does not
    330   // require access checks.
    331   __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
    332   __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
    333   __ b(ne, &miss);
    334   __ bind(&invoke);
    335   GenerateNormalHelper(masm, argc, false, &miss);
    336 
    337   // Global object access: Check access rights.
    338   __ bind(&global_proxy);
    339   __ CheckAccessGlobalProxy(r1, r0, &miss);
    340   __ b(&invoke);
    341 
    342   // Cache miss: Jump to runtime.
    343   __ bind(&miss);
    344   GenerateMiss(masm, argc);
    345 }
    346 
    347 
    348 void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
    349   // ----------- S t a t e -------------
    350   //  -- r2    : name
    351   //  -- lr    : return address
    352   // -----------------------------------
    353 
    354   // Get the receiver of the function from the stack.
    355   __ ldr(r3, MemOperand(sp, argc * kPointerSize));
    356 
    357   __ EnterInternalFrame();
    358 
    359   // Push the receiver and the name of the function.
    360   __ stm(db_w, sp, r2.bit() | r3.bit());
    361 
    362   // Call the entry.
    363   __ mov(r0, Operand(2));
    364   __ mov(r1, Operand(ExternalReference(IC_Utility(kCallIC_Miss))));
    365 
    366   CEntryStub stub(1);
    367   __ CallStub(&stub);
    368 
    369   // Move result to r1 and leave the internal frame.
    370   __ mov(r1, Operand(r0));
    371   __ LeaveInternalFrame();
    372 
    373   // Check if the receiver is a global object of some sort.
    374   Label invoke, global;
    375   __ ldr(r2, MemOperand(sp, argc * kPointerSize));  // receiver
    376   __ tst(r2, Operand(kSmiTagMask));
    377   __ b(eq, &invoke);
    378   __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
    379   __ b(eq, &global);
    380   __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
    381   __ b(ne, &invoke);
    382 
    383   // Patch the receiver on the stack.
    384   __ bind(&global);
    385   __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
    386   __ str(r2, MemOperand(sp, argc * kPointerSize));
    387 
    388   // Invoke the function.
    389   ParameterCount actual(argc);
    390   __ bind(&invoke);
    391   __ InvokeFunction(r1, actual, JUMP_FUNCTION);
    392 }
    393 
    394 
    395 // Defined in ic.cc.
    396 Object* LoadIC_Miss(Arguments args);
    397 
    398 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
    399   // ----------- S t a t e -------------
    400   //  -- r2    : name
    401   //  -- lr    : return address
    402   //  -- [sp]  : receiver
    403   // -----------------------------------
    404 
    405   __ ldr(r0, MemOperand(sp, 0));
    406   // Probe the stub cache.
    407   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
    408                                          NOT_IN_LOOP,
    409                                          MONOMORPHIC);
    410   StubCache::GenerateProbe(masm, flags, r0, r2, r3, no_reg);
    411 
    412   // Cache miss: Jump to runtime.
    413   GenerateMiss(masm);
    414 }
    415 
    416 
    417 void LoadIC::GenerateNormal(MacroAssembler* masm) {
    418   // ----------- S t a t e -------------
    419   //  -- r2    : name
    420   //  -- lr    : return address
    421   //  -- [sp]  : receiver
    422   // -----------------------------------
    423   Label miss, probe, global;
    424 
    425   __ ldr(r0, MemOperand(sp, 0));
    426   // Check that the receiver isn't a smi.
    427   __ tst(r0, Operand(kSmiTagMask));
    428   __ b(eq, &miss);
    429 
    430   // Check that the receiver is a valid JS object.  Put the map in r3.
    431   __ CompareObjectType(r0, r3, r1, FIRST_JS_OBJECT_TYPE);
    432   __ b(lt, &miss);
    433   // If this assert fails, we have to check upper bound too.
    434   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
    435 
    436   // Check for access to global object (unlikely).
    437   __ cmp(r1, Operand(JS_GLOBAL_PROXY_TYPE));
    438   __ b(eq, &global);
    439 
    440   // Check for non-global object that requires access check.
    441   __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
    442   __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
    443   __ b(ne, &miss);
    444 
    445   __ bind(&probe);
    446   GenerateDictionaryLoad(masm, &miss, r1, r0);
    447   __ Ret();
    448 
    449   // Global object access: Check access rights.
    450   __ bind(&global);
    451   __ CheckAccessGlobalProxy(r0, r1, &miss);
    452   __ b(&probe);
    453 
    454   // Cache miss: Restore receiver from stack and jump to runtime.
    455   __ bind(&miss);
    456   GenerateMiss(masm);
    457 }
    458 
    459 
    460 void LoadIC::GenerateMiss(MacroAssembler* masm) {
    461   // ----------- S t a t e -------------
    462   //  -- r2    : name
    463   //  -- lr    : return address
    464   //  -- [sp]  : receiver
    465   // -----------------------------------
    466 
    467   __ ldr(r3, MemOperand(sp, 0));
    468   __ stm(db_w, sp, r2.bit() | r3.bit());
    469 
    470   // Perform tail call to the entry.
    471   __ TailCallRuntime(ExternalReference(IC_Utility(kLoadIC_Miss)), 2, 1);
    472 }
    473 
    474 
    475 // TODO(181): Implement map patching once loop nesting is tracked on the
    476 // ARM platform so we can generate inlined fast-case code loads in
    477 // loops.
    478 void LoadIC::ClearInlinedVersion(Address address) {}
    479 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
    480   return false;
    481 }
    482 
    483 void KeyedLoadIC::ClearInlinedVersion(Address address) {}
    484 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
    485   return false;
    486 }
    487 
    488 void KeyedStoreIC::ClearInlinedVersion(Address address) {}
    489 void KeyedStoreIC::RestoreInlinedVersion(Address address) {}
    490 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
    491   return false;
    492 }
    493 
    494 
    495 Object* KeyedLoadIC_Miss(Arguments args);
    496 
    497 
    498 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
    499   // ---------- S t a t e --------------
    500   //  -- lr     : return address
    501   //  -- sp[0]  : key
    502   //  -- sp[4]  : receiver
    503   // -----------------------------------
    504 
    505   __ ldm(ia, sp, r2.bit() | r3.bit());
    506   __ stm(db_w, sp, r2.bit() | r3.bit());
    507 
    508   __ TailCallRuntime(ExternalReference(IC_Utility(kKeyedLoadIC_Miss)), 2, 1);
    509 }
    510 
    511 
    512 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
    513   // ---------- S t a t e --------------
    514   //  -- lr     : return address
    515   //  -- sp[0]  : key
    516   //  -- sp[4]  : receiver
    517   // -----------------------------------
    518 
    519   __ ldm(ia, sp, r2.bit() | r3.bit());
    520   __ stm(db_w, sp, r2.bit() | r3.bit());
    521 
    522   __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2, 1);
    523 }
    524 
    525 
    526 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
    527   // ---------- S t a t e --------------
    528   //  -- lr     : return address
    529   //  -- sp[0]  : key
    530   //  -- sp[4]  : receiver
    531   // -----------------------------------
    532   Label slow, fast;
    533 
    534   // Get the key and receiver object from the stack.
    535   __ ldm(ia, sp, r0.bit() | r1.bit());
    536 
    537   // Check that the object isn't a smi.
    538   __ BranchOnSmi(r1, &slow);
    539   // Get the map of the receiver.
    540   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
    541   // Check bit field.
    542   __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
    543   __ tst(r3, Operand(kSlowCaseBitFieldMask));
    544   __ b(ne, &slow);
    545   // Check that the object is some kind of JS object EXCEPT JS Value type.
    546   // In the case that the object is a value-wrapper object,
    547   // we enter the runtime system to make sure that indexing into string
    548   // objects work as intended.
    549   ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
    550   __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
    551   __ cmp(r2, Operand(JS_OBJECT_TYPE));
    552   __ b(lt, &slow);
    553 
    554   // Check that the key is a smi.
    555   __ BranchOnNotSmi(r0, &slow);
    556   __ mov(r0, Operand(r0, ASR, kSmiTagSize));
    557 
    558   // Get the elements array of the object.
    559   __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
    560   // Check that the object is in fast mode (not dictionary).
    561   __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
    562   __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
    563   __ cmp(r3, ip);
    564   __ b(ne, &slow);
    565   // Check that the key (index) is within bounds.
    566   __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
    567   __ cmp(r0, Operand(r3));
    568   __ b(lo, &fast);
    569 
    570   // Slow case: Push extra copies of the arguments (2).
    571   __ bind(&slow);
    572   __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r0, r1);
    573   GenerateRuntimeGetProperty(masm);
    574 
    575   // Fast case: Do the load.
    576   __ bind(&fast);
    577   __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
    578   __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
    579   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    580   __ cmp(r0, ip);
    581   // In case the loaded value is the_hole we have to consult GetProperty
    582   // to ensure the prototype chain is searched.
    583   __ b(eq, &slow);
    584 
    585   __ Ret();
    586 }
    587 
    588 
    589 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
    590   // ---------- S t a t e --------------
    591   //  -- lr     : return address
    592   //  -- sp[0]  : key
    593   //  -- sp[4]  : receiver
    594   // -----------------------------------
    595 
    596   GenerateGeneric(masm);
    597 }
    598 
    599 
    600 void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm,
    601                                         ExternalArrayType array_type) {
    602   // TODO(476): port specialized code.
    603   GenerateGeneric(masm);
    604 }
    605 
    606 
    607 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
    608   // ---------- S t a t e --------------
    609   //  -- lr     : return address
    610   //  -- sp[0]  : key
    611   //  -- sp[4]  : receiver
    612   // -----------------------------------
    613   Label slow;
    614 
    615   // Get the key and receiver object from the stack.
    616   __ ldm(ia, sp, r0.bit() | r1.bit());
    617 
    618   // Check that the receiver isn't a smi.
    619   __ BranchOnSmi(r1, &slow);
    620 
    621   // Check that the key is a smi.
    622   __ BranchOnNotSmi(r0, &slow);
    623 
    624   // Get the map of the receiver.
    625   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
    626 
    627   // Check that it has indexed interceptor and access checks
    628   // are not enabled for this object.
    629   __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
    630   __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
    631   __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
    632   __ b(ne, &slow);
    633 
    634   // Everything is fine, call runtime.
    635   __ push(r1);  // receiver
    636   __ push(r0);  // key
    637 
    638   // Perform tail call to the entry.
    639   __ TailCallRuntime(ExternalReference(
    640         IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
    641 
    642   __ bind(&slow);
    643   GenerateMiss(masm);
    644 }
    645 
    646 
    647 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
    648   // ---------- S t a t e --------------
    649   //  -- r0     : value
    650   //  -- lr     : return address
    651   //  -- sp[0]  : key
    652   //  -- sp[1]  : receiver
    653   // -----------------------------------
    654 
    655   __ ldm(ia, sp, r2.bit() | r3.bit());
    656   __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
    657 
    658   __ TailCallRuntime(ExternalReference(IC_Utility(kKeyedStoreIC_Miss)), 3, 1);
    659 }
    660 
    661 
    662 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
    663   // ---------- S t a t e --------------
    664   //  -- r0     : value
    665   //  -- lr     : return address
    666   //  -- sp[0]  : key
    667   //  -- sp[1]  : receiver
    668   // -----------------------------------
    669   __ ldm(ia, sp, r1.bit() | r3.bit());  // r0 == value, r1 == key, r3 == object
    670   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
    671 
    672   __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3, 1);
    673 }
    674 
    675 
    676 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
    677   // ---------- S t a t e --------------
    678   //  -- r0     : value
    679   //  -- lr     : return address
    680   //  -- sp[0]  : key
    681   //  -- sp[1]  : receiver
    682   // -----------------------------------
    683   Label slow, fast, array, extra, exit;
    684 
    685   // Get the key and the object from the stack.
    686   __ ldm(ia, sp, r1.bit() | r3.bit());  // r1 = key, r3 = receiver
    687   // Check that the key is a smi.
    688   __ tst(r1, Operand(kSmiTagMask));
    689   __ b(ne, &slow);
    690   // Check that the object isn't a smi.
    691   __ tst(r3, Operand(kSmiTagMask));
    692   __ b(eq, &slow);
    693   // Get the map of the object.
    694   __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
    695   // Check that the receiver does not require access checks.  We need
    696   // to do this because this generic stub does not perform map checks.
    697   __ ldrb(ip, FieldMemOperand(r2, Map::kBitFieldOffset));
    698   __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
    699   __ b(ne, &slow);
    700   // Check if the object is a JS array or not.
    701   __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
    702   __ cmp(r2, Operand(JS_ARRAY_TYPE));
    703   // r1 == key.
    704   __ b(eq, &array);
    705   // Check that the object is some kind of JS object.
    706   __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
    707   __ b(lt, &slow);
    708 
    709 
    710   // Object case: Check key against length in the elements array.
    711   __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
    712   // Check that the object is in fast mode (not dictionary).
    713   __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
    714   __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
    715   __ cmp(r2, ip);
    716   __ b(ne, &slow);
    717   // Untag the key (for checking against untagged length in the fixed array).
    718   __ mov(r1, Operand(r1, ASR, kSmiTagSize));
    719   // Compute address to store into and check array bounds.
    720   __ add(r2, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
    721   __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
    722   __ ldr(ip, FieldMemOperand(r3, FixedArray::kLengthOffset));
    723   __ cmp(r1, Operand(ip));
    724   __ b(lo, &fast);
    725 
    726 
    727   // Slow case:
    728   __ bind(&slow);
    729   GenerateRuntimeSetProperty(masm);
    730 
    731   // Extra capacity case: Check if there is extra capacity to
    732   // perform the store and update the length. Used for adding one
    733   // element to the array by writing to array[array.length].
    734   // r0 == value, r1 == key, r2 == elements, r3 == object
    735   __ bind(&extra);
    736   __ b(ne, &slow);  // do not leave holes in the array
    737   __ mov(r1, Operand(r1, ASR, kSmiTagSize));  // untag
    738   __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
    739   __ cmp(r1, Operand(ip));
    740   __ b(hs, &slow);
    741   __ mov(r1, Operand(r1, LSL, kSmiTagSize));  // restore tag
    742   __ add(r1, r1, Operand(1 << kSmiTagSize));  // and increment
    743   __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
    744   __ mov(r3, Operand(r2));
    745   // NOTE: Computing the address to store into must take the fact
    746   // that the key has been incremented into account.
    747   int displacement = FixedArray::kHeaderSize - kHeapObjectTag -
    748       ((1 << kSmiTagSize) * 2);
    749   __ add(r2, r2, Operand(displacement));
    750   __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
    751   __ b(&fast);
    752 
    753 
    754   // Array case: Get the length and the elements array from the JS
    755   // array. Check that the array is in fast mode; if it is the
    756   // length is always a smi.
    757   // r0 == value, r3 == object
    758   __ bind(&array);
    759   __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
    760   __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
    761   __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
    762   __ cmp(r1, ip);
    763   __ b(ne, &slow);
    764 
    765   // Check the key against the length in the array, compute the
    766   // address to store into and fall through to fast case.
    767   __ ldr(r1, MemOperand(sp));  // restore key
    768   // r0 == value, r1 == key, r2 == elements, r3 == object.
    769   __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
    770   __ cmp(r1, Operand(ip));
    771   __ b(hs, &extra);
    772   __ mov(r3, Operand(r2));
    773   __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
    774   __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
    775 
    776 
    777   // Fast case: Do the store.
    778   // r0 == value, r2 == address to store into, r3 == elements
    779   __ bind(&fast);
    780   __ str(r0, MemOperand(r2));
    781   // Skip write barrier if the written value is a smi.
    782   __ tst(r0, Operand(kSmiTagMask));
    783   __ b(eq, &exit);
    784   // Update write barrier for the elements array address.
    785   __ sub(r1, r2, Operand(r3));
    786   __ RecordWrite(r3, r1, r2);
    787 
    788   __ bind(&exit);
    789   __ Ret();
    790 }
    791 
    792 
    793 void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm,
    794                                          ExternalArrayType array_type) {
    795   // TODO(476): port specialized code.
    796   GenerateGeneric(masm);
    797 }
    798 
    799 
    800 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
    801   // ----------- S t a t e -------------
    802   //  -- r0    : value
    803   //  -- r1    : receiver
    804   //  -- r2    : name
    805   //  -- lr    : return address
    806   // -----------------------------------
    807 
    808   // Get the receiver from the stack and probe the stub cache.
    809   Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
    810                                          NOT_IN_LOOP,
    811                                          MONOMORPHIC);
    812   StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
    813 
    814   // Cache miss: Jump to runtime.
    815   GenerateMiss(masm);
    816 }
    817 
    818 
    819 void StoreIC::GenerateMiss(MacroAssembler* masm) {
    820   // ----------- S t a t e -------------
    821   //  -- r0    : value
    822   //  -- r1    : receiver
    823   //  -- r2    : name
    824   //  -- lr    : return address
    825   // -----------------------------------
    826 
    827   __ push(r1);
    828   __ stm(db_w, sp, r2.bit() | r0.bit());
    829 
    830   // Perform tail call to the entry.
    831   __ TailCallRuntime(ExternalReference(IC_Utility(kStoreIC_Miss)), 3, 1);
    832 }
    833 
    834 
    835 #undef __
    836 
    837 
    838 } }  // namespace v8::internal
    839