Home | History | Annotate | Download | only in mips
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_MIPS
     31 
     32 #include "ic-inl.h"
     33 #include "codegen.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 #define __ ACCESS_MASM(masm)
     40 
     41 
     42 static void ProbeTable(Isolate* isolate,
     43                        MacroAssembler* masm,
     44                        Code::Flags flags,
     45                        StubCache::Table table,
     46                        Register receiver,
     47                        Register name,
     48                        // Number of the cache entry, not scaled.
     49                        Register offset,
     50                        Register scratch,
     51                        Register scratch2,
     52                        Register offset_scratch) {
     53   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     54   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     55   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
     56 
     57   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
     58   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
     59   uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
     60 
     61   // Check the relative positions of the address fields.
     62   ASSERT(value_off_addr > key_off_addr);
     63   ASSERT((value_off_addr - key_off_addr) % 4 == 0);
     64   ASSERT((value_off_addr - key_off_addr) < (256 * 4));
     65   ASSERT(map_off_addr > key_off_addr);
     66   ASSERT((map_off_addr - key_off_addr) % 4 == 0);
     67   ASSERT((map_off_addr - key_off_addr) < (256 * 4));
     68 
     69   Label miss;
     70   Register base_addr = scratch;
     71   scratch = no_reg;
     72 
     73   // Multiply by 3 because there are 3 fields per entry (name, code, map).
     74   __ sll(offset_scratch, offset, 1);
     75   __ Addu(offset_scratch, offset_scratch, offset);
     76 
     77   // Calculate the base address of the entry.
     78   __ li(base_addr, Operand(key_offset));
     79   __ sll(at, offset_scratch, kPointerSizeLog2);
     80   __ Addu(base_addr, base_addr, at);
     81 
     82   // Check that the key in the entry matches the name.
     83   __ lw(at, MemOperand(base_addr, 0));
     84   __ Branch(&miss, ne, name, Operand(at));
     85 
     86   // Check the map matches.
     87   __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
     88   __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
     89   __ Branch(&miss, ne, at, Operand(scratch2));
     90 
     91   // Get the code entry from the cache.
     92   Register code = scratch2;
     93   scratch2 = no_reg;
     94   __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
     95 
     96   // Check that the flags match what we're looking for.
     97   Register flags_reg = base_addr;
     98   base_addr = no_reg;
     99   __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
    100   __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
    101   __ Branch(&miss, ne, flags_reg, Operand(flags));
    102 
    103 #ifdef DEBUG
    104     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
    105       __ jmp(&miss);
    106     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
    107       __ jmp(&miss);
    108     }
    109 #endif
    110 
    111   // Jump to the first instruction in the code stub.
    112   __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
    113   __ Jump(at);
    114 
    115   // Miss: fall through.
    116   __ bind(&miss);
    117 }
    118 
    119 
    120 // Helper function used to check that the dictionary doesn't contain
    121 // the property. This function may return false negatives, so miss_label
    122 // must always call a backup property check that is complete.
    123 // This function is safe to call if the receiver has fast properties.
    124 // Name must be unique and receiver must be a heap object.
    125 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
    126                                              Label* miss_label,
    127                                              Register receiver,
    128                                              Handle<Name> name,
    129                                              Register scratch0,
    130                                              Register scratch1) {
    131   ASSERT(name->IsUniqueName());
    132   Counters* counters = masm->isolate()->counters();
    133   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
    134   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    135 
    136   Label done;
    137 
    138   const int kInterceptorOrAccessCheckNeededMask =
    139       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    140 
    141   // Bail out if the receiver has a named interceptor or requires access checks.
    142   Register map = scratch1;
    143   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
    144   __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
    145   __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
    146   __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
    147 
    148   // Check that receiver is a JSObject.
    149   __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
    150   __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
    151 
    152   // Load properties array.
    153   Register properties = scratch0;
    154   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    155   // Check that the properties array is a dictionary.
    156   __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
    157   Register tmp = properties;
    158   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
    159   __ Branch(miss_label, ne, map, Operand(tmp));
    160 
    161   // Restore the temporarily used register.
    162   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    163 
    164 
    165   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
    166                                                    miss_label,
    167                                                    &done,
    168                                                    receiver,
    169                                                    properties,
    170                                                    name,
    171                                                    scratch1);
    172   __ bind(&done);
    173   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    174 }
    175 
    176 
    177 void StubCache::GenerateProbe(MacroAssembler* masm,
    178                               Code::Flags flags,
    179                               Register receiver,
    180                               Register name,
    181                               Register scratch,
    182                               Register extra,
    183                               Register extra2,
    184                               Register extra3) {
    185   Isolate* isolate = masm->isolate();
    186   Label miss;
    187 
    188   // Make sure that code is valid. The multiplying code relies on the
    189   // entry size being 12.
    190   ASSERT(sizeof(Entry) == 12);
    191 
    192   // Make sure the flags does not name a specific type.
    193   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    194 
    195   // Make sure that there are no register conflicts.
    196   ASSERT(!scratch.is(receiver));
    197   ASSERT(!scratch.is(name));
    198   ASSERT(!extra.is(receiver));
    199   ASSERT(!extra.is(name));
    200   ASSERT(!extra.is(scratch));
    201   ASSERT(!extra2.is(receiver));
    202   ASSERT(!extra2.is(name));
    203   ASSERT(!extra2.is(scratch));
    204   ASSERT(!extra2.is(extra));
    205 
    206   // Check register validity.
    207   ASSERT(!scratch.is(no_reg));
    208   ASSERT(!extra.is(no_reg));
    209   ASSERT(!extra2.is(no_reg));
    210   ASSERT(!extra3.is(no_reg));
    211 
    212   Counters* counters = masm->isolate()->counters();
    213   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
    214                       extra2, extra3);
    215 
    216   // Check that the receiver isn't a smi.
    217   __ JumpIfSmi(receiver, &miss);
    218 
    219   // Get the map of the receiver and compute the hash.
    220   __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
    221   __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
    222   __ Addu(scratch, scratch, at);
    223   uint32_t mask = kPrimaryTableSize - 1;
    224   // We shift out the last two bits because they are not part of the hash and
    225   // they are always 01 for maps.
    226   __ srl(scratch, scratch, kHeapObjectTagSize);
    227   __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
    228   __ And(scratch, scratch, Operand(mask));
    229 
    230   // Probe the primary table.
    231   ProbeTable(isolate,
    232              masm,
    233              flags,
    234              kPrimary,
    235              receiver,
    236              name,
    237              scratch,
    238              extra,
    239              extra2,
    240              extra3);
    241 
    242   // Primary miss: Compute hash for secondary probe.
    243   __ srl(at, name, kHeapObjectTagSize);
    244   __ Subu(scratch, scratch, at);
    245   uint32_t mask2 = kSecondaryTableSize - 1;
    246   __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
    247   __ And(scratch, scratch, Operand(mask2));
    248 
    249   // Probe the secondary table.
    250   ProbeTable(isolate,
    251              masm,
    252              flags,
    253              kSecondary,
    254              receiver,
    255              name,
    256              scratch,
    257              extra,
    258              extra2,
    259              extra3);
    260 
    261   // Cache miss: Fall-through and let caller handle the miss by
    262   // entering the runtime system.
    263   __ bind(&miss);
    264   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
    265                       extra2, extra3);
    266 }
    267 
    268 
    269 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    270                                                        int index,
    271                                                        Register prototype) {
    272   // Load the global or builtins object from the current context.
    273   __ lw(prototype,
    274         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    275   // Load the native context from the global or builtins object.
    276   __ lw(prototype,
    277          FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
    278   // Load the function from the native context.
    279   __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
    280   // Load the initial map.  The global functions all have initial maps.
    281   __ lw(prototype,
    282          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
    283   // Load the prototype from the initial map.
    284   __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    285 }
    286 
    287 
    288 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    289     MacroAssembler* masm,
    290     int index,
    291     Register prototype,
    292     Label* miss) {
    293   Isolate* isolate = masm->isolate();
    294   // Check we're still in the same context.
    295   __ lw(prototype,
    296         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    297   ASSERT(!prototype.is(at));
    298   __ li(at, isolate->global_object());
    299   __ Branch(miss, ne, prototype, Operand(at));
    300   // Get the global function with the given index.
    301   Handle<JSFunction> function(
    302       JSFunction::cast(isolate->native_context()->get(index)));
    303   // Load its initial map. The global functions all have initial maps.
    304   __ li(prototype, Handle<Map>(function->initial_map()));
    305   // Load the prototype from the initial map.
    306   __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
    307 }
    308 
    309 
    310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    311                                             Register dst,
    312                                             Register src,
    313                                             bool inobject,
    314                                             int index,
    315                                             Representation representation) {
    316   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
    317   int offset = index * kPointerSize;
    318   if (!inobject) {
    319     // Calculate the offset into the properties array.
    320     offset = offset + FixedArray::kHeaderSize;
    321     __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
    322     src = dst;
    323   }
    324   __ lw(dst, FieldMemOperand(src, offset));
    325 }
    326 
    327 
    328 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    329                                            Register receiver,
    330                                            Register scratch,
    331                                            Label* miss_label) {
    332   // Check that the receiver isn't a smi.
    333   __ JumpIfSmi(receiver, miss_label);
    334 
    335   // Check that the object is a JS array.
    336   __ GetObjectType(receiver, scratch, scratch);
    337   __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
    338 
    339   // Load length directly from the JS array.
    340   __ Ret(USE_DELAY_SLOT);
    341   __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
    342 }
    343 
    344 
    345 // Generate code to check if an object is a string.  If the object is a
    346 // heap object, its map's instance type is left in the scratch1 register.
    347 // If this is not needed, scratch1 and scratch2 may be the same register.
    348 static void GenerateStringCheck(MacroAssembler* masm,
    349                                 Register receiver,
    350                                 Register scratch1,
    351                                 Register scratch2,
    352                                 Label* smi,
    353                                 Label* non_string_object) {
    354   // Check that the receiver isn't a smi.
    355   __ JumpIfSmi(receiver, smi, t0);
    356 
    357   // Check that the object is a string.
    358   __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
    359   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
    360   __ And(scratch2, scratch1, Operand(kIsNotStringMask));
    361   // The cast is to resolve the overload for the argument of 0x0.
    362   __ Branch(non_string_object,
    363             ne,
    364             scratch2,
    365             Operand(static_cast<int32_t>(kStringTag)));
    366 }
    367 
    368 
    369 // Generate code to load the length from a string object and return the length.
    370 // If the receiver object is not a string or a wrapped string object the
    371 // execution continues at the miss label. The register containing the
    372 // receiver is potentially clobbered.
    373 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
    374                                             Register receiver,
    375                                             Register scratch1,
    376                                             Register scratch2,
    377                                             Label* miss,
    378                                             bool support_wrappers) {
    379   Label check_wrapper;
    380 
    381   // Check if the object is a string leaving the instance type in the
    382   // scratch1 register.
    383   GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
    384                       support_wrappers ? &check_wrapper : miss);
    385 
    386   // Load length directly from the string.
    387   __ Ret(USE_DELAY_SLOT);
    388   __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
    389 
    390   if (support_wrappers) {
    391     // Check if the object is a JSValue wrapper.
    392     __ bind(&check_wrapper);
    393     __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
    394 
    395     // Unwrap the value and check if the wrapped value is a string.
    396     __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
    397     GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
    398     __ Ret(USE_DELAY_SLOT);
    399     __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
    400   }
    401 }
    402 
    403 
    404 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    405                                                  Register receiver,
    406                                                  Register scratch1,
    407                                                  Register scratch2,
    408                                                  Label* miss_label) {
    409   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    410   __ Ret(USE_DELAY_SLOT);
    411   __ mov(v0, scratch1);
    412 }
    413 
    414 
    415 // Generate code to check that a global property cell is empty. Create
    416 // the property cell at compilation time if no cell exists for the
    417 // property.
    418 static void GenerateCheckPropertyCell(MacroAssembler* masm,
    419                                       Handle<GlobalObject> global,
    420                                       Handle<Name> name,
    421                                       Register scratch,
    422                                       Label* miss) {
    423   Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
    424   ASSERT(cell->value()->IsTheHole());
    425   __ li(scratch, Operand(cell));
    426   __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
    427   __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    428   __ Branch(miss, ne, scratch, Operand(at));
    429 }
    430 
    431 
    432 void BaseStoreStubCompiler::GenerateNegativeHolderLookup(
    433     MacroAssembler* masm,
    434     Handle<JSObject> holder,
    435     Register holder_reg,
    436     Handle<Name> name,
    437     Label* miss) {
    438   if (holder->IsJSGlobalObject()) {
    439     GenerateCheckPropertyCell(
    440         masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
    441   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
    442     GenerateDictionaryNegativeLookup(
    443         masm, miss, holder_reg, name, scratch1(), scratch2());
    444   }
    445 }
    446 
    447 
    448 // Generate StoreTransition code, value is passed in a0 register.
    449 // After executing generated code, the receiver_reg and name_reg
    450 // may be clobbered.
    451 void BaseStoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
    452                                                     Handle<JSObject> object,
    453                                                     LookupResult* lookup,
    454                                                     Handle<Map> transition,
    455                                                     Handle<Name> name,
    456                                                     Register receiver_reg,
    457                                                     Register storage_reg,
    458                                                     Register value_reg,
    459                                                     Register scratch1,
    460                                                     Register scratch2,
    461                                                     Register scratch3,
    462                                                     Label* miss_label,
    463                                                     Label* slow) {
    464   // a0 : value.
    465   Label exit;
    466 
    467   int descriptor = transition->LastAdded();
    468   DescriptorArray* descriptors = transition->instance_descriptors();
    469   PropertyDetails details = descriptors->GetDetails(descriptor);
    470   Representation representation = details.representation();
    471   ASSERT(!representation.IsNone());
    472 
    473   if (details.type() == CONSTANT) {
    474     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
    475     __ LoadObject(scratch1, constant);
    476     __ Branch(miss_label, ne, value_reg, Operand(scratch1));
    477   } else if (FLAG_track_fields && representation.IsSmi()) {
    478     __ JumpIfNotSmi(value_reg, miss_label);
    479   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    480     __ JumpIfSmi(value_reg, miss_label);
    481   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    482     Label do_store, heap_number;
    483     __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
    484     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
    485 
    486     __ JumpIfNotSmi(value_reg, &heap_number);
    487     __ SmiUntag(scratch1, value_reg);
    488     __ mtc1(scratch1, f6);
    489     __ cvt_d_w(f4, f6);
    490     __ jmp(&do_store);
    491 
    492     __ bind(&heap_number);
    493     __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
    494                 miss_label, DONT_DO_SMI_CHECK);
    495     __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
    496 
    497     __ bind(&do_store);
    498     __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
    499   }
    500 
    501   // Stub never generated for non-global objects that require access
    502   // checks.
    503   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    504 
    505   // Perform map transition for the receiver if necessary.
    506   if (details.type() == FIELD &&
    507       object->map()->unused_property_fields() == 0) {
    508     // The properties must be extended before we can store the value.
    509     // We jump to a runtime call that extends the properties array.
    510     __ push(receiver_reg);
    511     __ li(a2, Operand(transition));
    512     __ Push(a2, a0);
    513     __ TailCallExternalReference(
    514            ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    515                              masm->isolate()),
    516            3, 1);
    517     return;
    518   }
    519 
    520   // Update the map of the object.
    521   __ li(scratch1, Operand(transition));
    522   __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
    523 
    524   // Update the write barrier for the map field.
    525   __ RecordWriteField(receiver_reg,
    526                       HeapObject::kMapOffset,
    527                       scratch1,
    528                       scratch2,
    529                       kRAHasNotBeenSaved,
    530                       kDontSaveFPRegs,
    531                       OMIT_REMEMBERED_SET,
    532                       OMIT_SMI_CHECK);
    533 
    534   if (details.type() == CONSTANT) {
    535     ASSERT(value_reg.is(a0));
    536     __ Ret(USE_DELAY_SLOT);
    537     __ mov(v0, a0);
    538     return;
    539   }
    540 
    541   int index = transition->instance_descriptors()->GetFieldIndex(
    542       transition->LastAdded());
    543 
    544   // Adjust for the number of properties stored in the object. Even in the
    545   // face of a transition we can use the old map here because the size of the
    546   // object and the number of in-object properties is not going to change.
    547   index -= object->map()->inobject_properties();
    548 
    549   // TODO(verwaest): Share this code as a code stub.
    550   SmiCheck smi_check = representation.IsTagged()
    551       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    552   if (index < 0) {
    553     // Set the property straight into the object.
    554     int offset = object->map()->instance_size() + (index * kPointerSize);
    555     if (FLAG_track_double_fields && representation.IsDouble()) {
    556       __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
    557     } else {
    558       __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
    559     }
    560 
    561     if (!FLAG_track_fields || !representation.IsSmi()) {
    562       // Update the write barrier for the array address.
    563       if (!FLAG_track_double_fields || !representation.IsDouble()) {
    564         __ mov(storage_reg, value_reg);
    565       }
    566       __ RecordWriteField(receiver_reg,
    567                           offset,
    568                           storage_reg,
    569                           scratch1,
    570                           kRAHasNotBeenSaved,
    571                           kDontSaveFPRegs,
    572                           EMIT_REMEMBERED_SET,
    573                           smi_check);
    574     }
    575   } else {
    576     // Write to the properties array.
    577     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    578     // Get the properties array
    579     __ lw(scratch1,
    580           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    581     if (FLAG_track_double_fields && representation.IsDouble()) {
    582       __ sw(storage_reg, FieldMemOperand(scratch1, offset));
    583     } else {
    584       __ sw(value_reg, FieldMemOperand(scratch1, offset));
    585     }
    586 
    587     if (!FLAG_track_fields || !representation.IsSmi()) {
    588       // Update the write barrier for the array address.
    589       if (!FLAG_track_double_fields || !representation.IsDouble()) {
    590         __ mov(storage_reg, value_reg);
    591       }
    592       __ RecordWriteField(scratch1,
    593                           offset,
    594                           storage_reg,
    595                           receiver_reg,
    596                           kRAHasNotBeenSaved,
    597                           kDontSaveFPRegs,
    598                           EMIT_REMEMBERED_SET,
    599                           smi_check);
    600     }
    601   }
    602 
    603   // Return the value (register v0).
    604   ASSERT(value_reg.is(a0));
    605   __ bind(&exit);
    606   __ Ret(USE_DELAY_SLOT);
    607   __ mov(v0, a0);
    608 }
    609 
    610 
    611 // Generate StoreField code, value is passed in a0 register.
    612 // When leaving generated code after success, the receiver_reg and name_reg
    613 // may be clobbered.  Upon branch to miss_label, the receiver and name
    614 // registers have their original values.
    615 void BaseStoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
    616                                                Handle<JSObject> object,
    617                                                LookupResult* lookup,
    618                                                Register receiver_reg,
    619                                                Register name_reg,
    620                                                Register value_reg,
    621                                                Register scratch1,
    622                                                Register scratch2,
    623                                                Label* miss_label) {
    624   // a0 : value
    625   Label exit;
    626 
    627   // Stub never generated for non-global objects that require access
    628   // checks.
    629   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    630 
    631   int index = lookup->GetFieldIndex().field_index();
    632 
    633   // Adjust for the number of properties stored in the object. Even in the
    634   // face of a transition we can use the old map here because the size of the
    635   // object and the number of in-object properties is not going to change.
    636   index -= object->map()->inobject_properties();
    637 
    638   Representation representation = lookup->representation();
    639   ASSERT(!representation.IsNone());
    640   if (FLAG_track_fields && representation.IsSmi()) {
    641     __ JumpIfNotSmi(value_reg, miss_label);
    642   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    643     __ JumpIfSmi(value_reg, miss_label);
    644   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    645     // Load the double storage.
    646     if (index < 0) {
    647       int offset = object->map()->instance_size() + (index * kPointerSize);
    648       __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
    649     } else {
    650       __ lw(scratch1,
    651             FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    652       int offset = index * kPointerSize + FixedArray::kHeaderSize;
    653       __ lw(scratch1, FieldMemOperand(scratch1, offset));
    654     }
    655 
    656     // Store the value into the storage.
    657     Label do_store, heap_number;
    658     __ JumpIfNotSmi(value_reg, &heap_number);
    659     __ SmiUntag(scratch2, value_reg);
    660     __ mtc1(scratch2, f6);
    661     __ cvt_d_w(f4, f6);
    662     __ jmp(&do_store);
    663 
    664     __ bind(&heap_number);
    665     __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
    666                 miss_label, DONT_DO_SMI_CHECK);
    667     __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
    668 
    669     __ bind(&do_store);
    670     __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
    671     // Return the value (register v0).
    672     ASSERT(value_reg.is(a0));
    673     __ Ret(USE_DELAY_SLOT);
    674     __ mov(v0, a0);
    675     return;
    676   }
    677 
    678   // TODO(verwaest): Share this code as a code stub.
    679   SmiCheck smi_check = representation.IsTagged()
    680       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    681   if (index < 0) {
    682     // Set the property straight into the object.
    683     int offset = object->map()->instance_size() + (index * kPointerSize);
    684     __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
    685 
    686     if (!FLAG_track_fields || !representation.IsSmi()) {
    687       // Skip updating write barrier if storing a smi.
    688       __ JumpIfSmi(value_reg, &exit);
    689 
    690       // Update the write barrier for the array address.
    691       // Pass the now unused name_reg as a scratch register.
    692       __ mov(name_reg, value_reg);
    693       __ RecordWriteField(receiver_reg,
    694                           offset,
    695                           name_reg,
    696                           scratch1,
    697                           kRAHasNotBeenSaved,
    698                           kDontSaveFPRegs,
    699                           EMIT_REMEMBERED_SET,
    700                           smi_check);
    701     }
    702   } else {
    703     // Write to the properties array.
    704     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    705     // Get the properties array.
    706     __ lw(scratch1,
    707           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
    708     __ sw(value_reg, FieldMemOperand(scratch1, offset));
    709 
    710     if (!FLAG_track_fields || !representation.IsSmi()) {
    711       // Skip updating write barrier if storing a smi.
    712       __ JumpIfSmi(value_reg, &exit);
    713 
    714       // Update the write barrier for the array address.
    715       // Ok to clobber receiver_reg and name_reg, since we return.
    716       __ mov(name_reg, value_reg);
    717       __ RecordWriteField(scratch1,
    718                           offset,
    719                           name_reg,
    720                           receiver_reg,
    721                           kRAHasNotBeenSaved,
    722                           kDontSaveFPRegs,
    723                           EMIT_REMEMBERED_SET,
    724                           smi_check);
    725     }
    726   }
    727 
    728   // Return the value (register v0).
    729   ASSERT(value_reg.is(a0));
    730   __ bind(&exit);
    731   __ Ret(USE_DELAY_SLOT);
    732   __ mov(v0, a0);
    733 }
    734 
    735 
    736 void BaseStoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
    737                                                 Label* label,
    738                                                 Handle<Name> name) {
    739   if (!label->is_unused()) {
    740     __ bind(label);
    741     __ li(this->name(), Operand(name));
    742   }
    743 }
    744 
    745 
    746 static void GenerateCallFunction(MacroAssembler* masm,
    747                                  Handle<Object> object,
    748                                  const ParameterCount& arguments,
    749                                  Label* miss,
    750                                  Code::ExtraICState extra_ic_state) {
    751   // ----------- S t a t e -------------
    752   //  -- a0: receiver
    753   //  -- a1: function to call
    754   // -----------------------------------
    755   // Check that the function really is a function.
    756   __ JumpIfSmi(a1, miss);
    757   __ GetObjectType(a1, a3, a3);
    758   __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
    759 
    760   // Patch the receiver on the stack with the global proxy if
    761   // necessary.
    762   if (object->IsGlobalObject()) {
    763     __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
    764     __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
    765   }
    766 
    767   // Invoke the function.
    768   CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
    769       ? CALL_AS_FUNCTION
    770       : CALL_AS_METHOD;
    771   __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
    772 }
    773 
    774 
    775 static void PushInterceptorArguments(MacroAssembler* masm,
    776                                      Register receiver,
    777                                      Register holder,
    778                                      Register name,
    779                                      Handle<JSObject> holder_obj) {
    780   __ push(name);
    781   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
    782   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
    783   Register scratch = name;
    784   __ li(scratch, Operand(interceptor));
    785   __ Push(scratch, receiver, holder);
    786   __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
    787   __ push(scratch);
    788   __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
    789   __ push(scratch);
    790 }
    791 
    792 
    793 static void CompileCallLoadPropertyWithInterceptor(
    794     MacroAssembler* masm,
    795     Register receiver,
    796     Register holder,
    797     Register name,
    798     Handle<JSObject> holder_obj) {
    799   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    800 
    801   ExternalReference ref =
    802       ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
    803           masm->isolate());
    804   __ PrepareCEntryArgs(6);
    805   __ PrepareCEntryFunction(ref);
    806 
    807   CEntryStub stub(1);
    808   __ CallStub(&stub);
    809 }
    810 
    811 
    812 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
    813 
    814 // Reserves space for the extra arguments to API function in the
    815 // caller's frame.
    816 //
    817 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
    818 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
    819                                        Register scratch) {
    820   ASSERT(Smi::FromInt(0) == 0);
    821   for (int i = 0; i < kFastApiCallArguments; i++) {
    822     __ push(zero_reg);
    823   }
    824 }
    825 
    826 
    827 // Undoes the effects of ReserveSpaceForFastApiCall.
    828 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
    829   __ Drop(kFastApiCallArguments);
    830 }
    831 
    832 
    833 static void GenerateFastApiDirectCall(MacroAssembler* masm,
    834                                       const CallOptimization& optimization,
    835                                       int argc) {
    836   // ----------- S t a t e -------------
    837   //  -- sp[0]              : holder (set by CheckPrototypes)
    838   //  -- sp[4]              : callee JS function
    839   //  -- sp[8]              : call data
    840   //  -- sp[12]             : isolate
    841   //  -- sp[16]             : ReturnValue default value
    842   //  -- sp[20]             : ReturnValue
    843   //  -- sp[24]             : last JS argument
    844   //  -- ...
    845   //  -- sp[(argc + 5) * 4] : first JS argument
    846   //  -- sp[(argc + 6) * 4] : receiver
    847   // -----------------------------------
    848   // Get the function and setup the context.
    849   Handle<JSFunction> function = optimization.constant_function();
    850   __ LoadHeapObject(t1, function);
    851   __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
    852 
    853   // Pass the additional arguments.
    854   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    855   Handle<Object> call_data(api_call_info->data(), masm->isolate());
    856   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
    857     __ li(a0, api_call_info);
    858     __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
    859   } else {
    860     __ li(t2, call_data);
    861   }
    862 
    863   __ li(t3, Operand(ExternalReference::isolate_address(masm->isolate())));
    864   // Store JS function, call data, isolate ReturnValue default and ReturnValue.
    865   __ sw(t1, MemOperand(sp, 1 * kPointerSize));
    866   __ sw(t2, MemOperand(sp, 2 * kPointerSize));
    867   __ sw(t3, MemOperand(sp, 3 * kPointerSize));
    868   __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
    869   __ sw(t1, MemOperand(sp, 4 * kPointerSize));
    870   __ sw(t1, MemOperand(sp, 5 * kPointerSize));
    871 
    872   // Prepare arguments.
    873   __ Addu(a2, sp, Operand(5 * kPointerSize));
    874 
    875   // Allocate the v8::Arguments structure in the arguments' space since
    876   // it's not controlled by GC.
    877   const int kApiStackSpace = 4;
    878 
    879   FrameScope frame_scope(masm, StackFrame::MANUAL);
    880   __ EnterExitFrame(false, kApiStackSpace);
    881 
    882   // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
    883   // struct from the function (which is currently the case). This means we pass
    884   // the first argument in a1 instead of a0, if returns_handle is true.
    885   // CallApiFunctionAndReturn will set up a0.
    886 
    887   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    888   bool returns_handle =
    889       !CallbackTable::ReturnsVoid(masm->isolate(), function_address);
    890 
    891   Register first_arg = returns_handle ? a1 : a0;
    892   Register second_arg = returns_handle ? a2 : a1;
    893 
    894   // first_arg = v8::Arguments&
    895   // Arguments is built at sp + 1 (sp is a reserved spot for ra).
    896   __ Addu(first_arg, sp, kPointerSize);
    897 
    898   // v8::Arguments::implicit_args_
    899   __ sw(a2, MemOperand(first_arg, 0 * kPointerSize));
    900   // v8::Arguments::values_
    901   __ Addu(t0, a2, Operand(argc * kPointerSize));
    902   __ sw(t0, MemOperand(first_arg, 1 * kPointerSize));
    903   // v8::Arguments::length_ = argc
    904   __ li(t0, Operand(argc));
    905   __ sw(t0, MemOperand(first_arg, 2 * kPointerSize));
    906   // v8::Arguments::is_construct_call = 0
    907   __ sw(zero_reg, MemOperand(first_arg, 3 * kPointerSize));
    908 
    909   const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
    910   ApiFunction fun(function_address);
    911   ExternalReference::Type type =
    912       returns_handle ?
    913           ExternalReference::DIRECT_API_CALL :
    914           ExternalReference::DIRECT_API_CALL_NEW;
    915   ExternalReference ref =
    916       ExternalReference(&fun,
    917                         type,
    918                         masm->isolate());
    919 
    920   Address thunk_address = returns_handle
    921       ? FUNCTION_ADDR(&InvokeInvocationCallback)
    922       : FUNCTION_ADDR(&InvokeFunctionCallback);
    923   ExternalReference::Type thunk_type =
    924       returns_handle ?
    925           ExternalReference::PROFILING_API_CALL :
    926           ExternalReference::PROFILING_API_CALL_NEW;
    927   ApiFunction thunk_fun(thunk_address);
    928   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
    929       masm->isolate());
    930 
    931   AllowExternalCallThatCantCauseGC scope(masm);
    932   __ CallApiFunctionAndReturn(ref,
    933                               function_address,
    934                               thunk_ref,
    935                               second_arg,
    936                               kStackUnwindSpace,
    937                               returns_handle,
    938                               kFastApiCallArguments + 1);
    939 }
    940 
    941 class CallInterceptorCompiler BASE_EMBEDDED {
    942  public:
    943   CallInterceptorCompiler(StubCompiler* stub_compiler,
    944                           const ParameterCount& arguments,
    945                           Register name,
    946                           Code::ExtraICState extra_ic_state)
    947       : stub_compiler_(stub_compiler),
    948         arguments_(arguments),
    949         name_(name),
    950         extra_ic_state_(extra_ic_state) {}
    951 
    952   void Compile(MacroAssembler* masm,
    953                Handle<JSObject> object,
    954                Handle<JSObject> holder,
    955                Handle<Name> name,
    956                LookupResult* lookup,
    957                Register receiver,
    958                Register scratch1,
    959                Register scratch2,
    960                Register scratch3,
    961                Label* miss) {
    962     ASSERT(holder->HasNamedInterceptor());
    963     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
    964 
    965     // Check that the receiver isn't a smi.
    966     __ JumpIfSmi(receiver, miss);
    967     CallOptimization optimization(lookup);
    968     if (optimization.is_constant_call()) {
    969       CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
    970                        holder, lookup, name, optimization, miss);
    971     } else {
    972       CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
    973                      name, holder, miss);
    974     }
    975   }
    976 
    977  private:
    978   void CompileCacheable(MacroAssembler* masm,
    979                         Handle<JSObject> object,
    980                         Register receiver,
    981                         Register scratch1,
    982                         Register scratch2,
    983                         Register scratch3,
    984                         Handle<JSObject> interceptor_holder,
    985                         LookupResult* lookup,
    986                         Handle<Name> name,
    987                         const CallOptimization& optimization,
    988                         Label* miss_label) {
    989     ASSERT(optimization.is_constant_call());
    990     ASSERT(!lookup->holder()->IsGlobalObject());
    991     Counters* counters = masm->isolate()->counters();
    992     int depth1 = kInvalidProtoDepth;
    993     int depth2 = kInvalidProtoDepth;
    994     bool can_do_fast_api_call = false;
    995     if (optimization.is_simple_api_call() &&
    996           !lookup->holder()->IsGlobalObject()) {
    997       depth1 = optimization.GetPrototypeDepthOfExpectedType(
    998           object, interceptor_holder);
    999       if (depth1 == kInvalidProtoDepth) {
   1000         depth2 = optimization.GetPrototypeDepthOfExpectedType(
   1001             interceptor_holder, Handle<JSObject>(lookup->holder()));
   1002       }
   1003       can_do_fast_api_call =
   1004           depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
   1005     }
   1006 
   1007     __ IncrementCounter(counters->call_const_interceptor(), 1,
   1008                         scratch1, scratch2);
   1009 
   1010     if (can_do_fast_api_call) {
   1011       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
   1012                           scratch1, scratch2);
   1013       ReserveSpaceForFastApiCall(masm, scratch1);
   1014     }
   1015 
   1016     // Check that the maps from receiver to interceptor's holder
   1017     // haven't changed and thus we can invoke interceptor.
   1018     Label miss_cleanup;
   1019     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
   1020     Register holder =
   1021         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
   1022                                         scratch1, scratch2, scratch3,
   1023                                         name, depth1, miss);
   1024 
   1025     // Invoke an interceptor and if it provides a value,
   1026     // branch to |regular_invoke|.
   1027     Label regular_invoke;
   1028     LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
   1029                         &regular_invoke);
   1030 
   1031     // Interceptor returned nothing for this property.  Try to use cached
   1032     // constant function.
   1033 
   1034     // Check that the maps from interceptor's holder to constant function's
   1035     // holder haven't changed and thus we can use cached constant function.
   1036     if (*interceptor_holder != lookup->holder()) {
   1037       stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
   1038                                       Handle<JSObject>(lookup->holder()),
   1039                                       scratch1, scratch2, scratch3,
   1040                                       name, depth2, miss);
   1041     } else {
   1042       // CheckPrototypes has a side effect of fetching a 'holder'
   1043       // for API (object which is instanceof for the signature).  It's
   1044       // safe to omit it here, as if present, it should be fetched
   1045       // by the previous CheckPrototypes.
   1046       ASSERT(depth2 == kInvalidProtoDepth);
   1047     }
   1048 
   1049     // Invoke function.
   1050     if (can_do_fast_api_call) {
   1051       GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
   1052     } else {
   1053       CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
   1054           ? CALL_AS_FUNCTION
   1055           : CALL_AS_METHOD;
   1056       Handle<JSFunction> function = optimization.constant_function();
   1057       ParameterCount expected(function);
   1058       __ InvokeFunction(function, expected, arguments_,
   1059                         JUMP_FUNCTION, NullCallWrapper(), call_kind);
   1060     }
   1061 
   1062     // Deferred code for fast API call case---clean preallocated space.
   1063     if (can_do_fast_api_call) {
   1064       __ bind(&miss_cleanup);
   1065       FreeSpaceForFastApiCall(masm);
   1066       __ Branch(miss_label);
   1067     }
   1068 
   1069     // Invoke a regular function.
   1070     __ bind(&regular_invoke);
   1071     if (can_do_fast_api_call) {
   1072       FreeSpaceForFastApiCall(masm);
   1073     }
   1074   }
   1075 
   1076   void CompileRegular(MacroAssembler* masm,
   1077                       Handle<JSObject> object,
   1078                       Register receiver,
   1079                       Register scratch1,
   1080                       Register scratch2,
   1081                       Register scratch3,
   1082                       Handle<Name> name,
   1083                       Handle<JSObject> interceptor_holder,
   1084                       Label* miss_label) {
   1085     Register holder =
   1086         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
   1087                                         scratch1, scratch2, scratch3,
   1088                                         name, miss_label);
   1089 
   1090     // Call a runtime function to load the interceptor property.
   1091     FrameScope scope(masm, StackFrame::INTERNAL);
   1092     // Save the name_ register across the call.
   1093     __ push(name_);
   1094 
   1095     PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
   1096 
   1097     __ CallExternalReference(
   1098           ExternalReference(
   1099               IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
   1100               masm->isolate()),
   1101           6);
   1102     // Restore the name_ register.
   1103     __ pop(name_);
   1104     // Leave the internal frame.
   1105   }
   1106 
   1107   void LoadWithInterceptor(MacroAssembler* masm,
   1108                            Register receiver,
   1109                            Register holder,
   1110                            Handle<JSObject> holder_obj,
   1111                            Register scratch,
   1112                            Label* interceptor_succeeded) {
   1113     {
   1114       FrameScope scope(masm, StackFrame::INTERNAL);
   1115 
   1116       __ Push(holder, name_);
   1117       CompileCallLoadPropertyWithInterceptor(masm,
   1118                                              receiver,
   1119                                              holder,
   1120                                              name_,
   1121                                              holder_obj);
   1122       __ pop(name_);  // Restore the name.
   1123       __ pop(receiver);  // Restore the holder.
   1124     }
   1125     // If interceptor returns no-result sentinel, call the constant function.
   1126     __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
   1127     __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
   1128   }
   1129 
   1130   StubCompiler* stub_compiler_;
   1131   const ParameterCount& arguments_;
   1132   Register name_;
   1133   Code::ExtraICState extra_ic_state_;
   1134 };
   1135 
   1136 
   1137 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
   1138 // from object to (but not including) holder.
   1139 static void GenerateCheckPropertyCells(MacroAssembler* masm,
   1140                                        Handle<JSObject> object,
   1141                                        Handle<JSObject> holder,
   1142                                        Handle<Name> name,
   1143                                        Register scratch,
   1144                                        Label* miss) {
   1145   Handle<JSObject> current = object;
   1146   while (!current.is_identical_to(holder)) {
   1147     if (current->IsGlobalObject()) {
   1148       GenerateCheckPropertyCell(masm,
   1149                                 Handle<GlobalObject>::cast(current),
   1150                                 name,
   1151                                 scratch,
   1152                                 miss);
   1153     }
   1154     current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
   1155   }
   1156 }
   1157 
   1158 
   1159 // Convert and store int passed in register ival to IEEE 754 single precision
   1160 // floating point value at memory location (dst + 4 * wordoffset)
   1161 // If FPU is available use it for conversion.
   1162 static void StoreIntAsFloat(MacroAssembler* masm,
   1163                             Register dst,
   1164                             Register wordoffset,
   1165                             Register ival,
   1166                             Register scratch1) {
   1167   __ mtc1(ival, f0);
   1168   __ cvt_s_w(f0, f0);
   1169   __ sll(scratch1, wordoffset, 2);
   1170   __ addu(scratch1, dst, scratch1);
   1171   __ swc1(f0, MemOperand(scratch1, 0));
   1172 }
   1173 
   1174 
   1175 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
   1176   __ Jump(code, RelocInfo::CODE_TARGET);
   1177 }
   1178 
   1179 
   1180 #undef __
   1181 #define __ ACCESS_MASM(masm())
   1182 
   1183 
   1184 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
   1185                                        Register object_reg,
   1186                                        Handle<JSObject> holder,
   1187                                        Register holder_reg,
   1188                                        Register scratch1,
   1189                                        Register scratch2,
   1190                                        Handle<Name> name,
   1191                                        int save_at_depth,
   1192                                        Label* miss,
   1193                                        PrototypeCheckType check) {
   1194   // Make sure that the type feedback oracle harvests the receiver map.
   1195   // TODO(svenpanne) Remove this hack when all ICs are reworked.
   1196   __ li(scratch1, Operand(Handle<Map>(object->map())));
   1197 
   1198   Handle<JSObject> first = object;
   1199   // Make sure there's no overlap between holder and object registers.
   1200   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
   1201   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
   1202          && !scratch2.is(scratch1));
   1203 
   1204   // Keep track of the current object in register reg.
   1205   Register reg = object_reg;
   1206   int depth = 0;
   1207 
   1208   if (save_at_depth == depth) {
   1209     __ sw(reg, MemOperand(sp));
   1210   }
   1211 
   1212   // Check the maps in the prototype chain.
   1213   // Traverse the prototype chain from the object and do map checks.
   1214   Handle<JSObject> current = object;
   1215   while (!current.is_identical_to(holder)) {
   1216     ++depth;
   1217 
   1218     // Only global objects and objects that do not require access
   1219     // checks are allowed in stubs.
   1220     ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
   1221 
   1222     Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
   1223     if (!current->HasFastProperties() &&
   1224         !current->IsJSGlobalObject() &&
   1225         !current->IsJSGlobalProxy()) {
   1226       if (!name->IsUniqueName()) {
   1227         ASSERT(name->IsString());
   1228         name = factory()->InternalizeString(Handle<String>::cast(name));
   1229       }
   1230       ASSERT(current->property_dictionary()->FindEntry(*name) ==
   1231              NameDictionary::kNotFound);
   1232 
   1233       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
   1234                                        scratch1, scratch2);
   1235 
   1236       __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
   1237       reg = holder_reg;  // From now on the object will be in holder_reg.
   1238       __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
   1239     } else {
   1240       Register map_reg = scratch1;
   1241       if (!current.is_identical_to(first) || check == CHECK_ALL_MAPS) {
   1242         Handle<Map> current_map(current->map());
   1243         // CheckMap implicitly loads the map of |reg| into |map_reg|.
   1244         __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
   1245       } else {
   1246         __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
   1247       }
   1248       // Check access rights to the global object.  This has to happen after
   1249       // the map check so that we know that the object is actually a global
   1250       // object.
   1251       if (current->IsJSGlobalProxy()) {
   1252         __ CheckAccessGlobalProxy(reg, scratch2, miss);
   1253       }
   1254       reg = holder_reg;  // From now on the object will be in holder_reg.
   1255 
   1256       if (heap()->InNewSpace(*prototype)) {
   1257         // The prototype is in new space; we cannot store a reference to it
   1258         // in the code.  Load it from the map.
   1259         __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
   1260       } else {
   1261         // The prototype is in old space; load it directly.
   1262         __ li(reg, Operand(prototype));
   1263       }
   1264     }
   1265 
   1266     if (save_at_depth == depth) {
   1267       __ sw(reg, MemOperand(sp));
   1268     }
   1269 
   1270     // Go to the next object in the prototype chain.
   1271     current = prototype;
   1272   }
   1273 
   1274   // Log the check depth.
   1275   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
   1276 
   1277   if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
   1278     // Check the holder map.
   1279     __ CheckMap(reg, scratch1, Handle<Map>(holder->map()), miss,
   1280                 DONT_DO_SMI_CHECK);
   1281   }
   1282 
   1283   // Perform security check for access to the global object.
   1284   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   1285   if (holder->IsJSGlobalProxy()) {
   1286     __ CheckAccessGlobalProxy(reg, scratch1, miss);
   1287   }
   1288 
   1289   // If we've skipped any global objects, it's not enough to verify that
   1290   // their maps haven't changed.  We also need to check that the property
   1291   // cell for the property is still empty.
   1292   GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
   1293 
   1294   // Return the register containing the holder.
   1295   return reg;
   1296 }
   1297 
   1298 
   1299 void BaseLoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
   1300                                                  Label* success,
   1301                                                  Label* miss) {
   1302   if (!miss->is_unused()) {
   1303     __ Branch(success);
   1304     __ bind(miss);
   1305     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1306   }
   1307 }
   1308 
   1309 
   1310 void BaseStoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
   1311                                                   Label* success,
   1312                                                   Label* miss) {
   1313   if (!miss->is_unused()) {
   1314     __ b(success);
   1315     GenerateRestoreName(masm(), miss, name);
   1316     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1317   }
   1318 }
   1319 
   1320 
   1321 Register BaseLoadStubCompiler::CallbackHandlerFrontend(
   1322     Handle<JSObject> object,
   1323     Register object_reg,
   1324     Handle<JSObject> holder,
   1325     Handle<Name> name,
   1326     Label* success,
   1327     Handle<ExecutableAccessorInfo> callback) {
   1328   Label miss;
   1329 
   1330   Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
   1331 
   1332   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
   1333     ASSERT(!reg.is(scratch2()));
   1334     ASSERT(!reg.is(scratch3()));
   1335     ASSERT(!reg.is(scratch4()));
   1336 
   1337     // Load the properties dictionary.
   1338     Register dictionary = scratch4();
   1339     __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
   1340 
   1341     // Probe the dictionary.
   1342     Label probe_done;
   1343     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
   1344                                                      &miss,
   1345                                                      &probe_done,
   1346                                                      dictionary,
   1347                                                      this->name(),
   1348                                                      scratch2(),
   1349                                                      scratch3());
   1350     __ bind(&probe_done);
   1351 
   1352     // If probing finds an entry in the dictionary, scratch3 contains the
   1353     // pointer into the dictionary. Check that the value is the callback.
   1354     Register pointer = scratch3();
   1355     const int kElementsStartOffset = NameDictionary::kHeaderSize +
   1356         NameDictionary::kElementsStartIndex * kPointerSize;
   1357     const int kValueOffset = kElementsStartOffset + kPointerSize;
   1358     __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
   1359     __ Branch(&miss, ne, scratch2(), Operand(callback));
   1360   }
   1361 
   1362   HandlerFrontendFooter(name, success, &miss);
   1363   return reg;
   1364 }
   1365 
   1366 
   1367 void BaseLoadStubCompiler::NonexistentHandlerFrontend(
   1368     Handle<JSObject> object,
   1369     Handle<JSObject> last,
   1370     Handle<Name> name,
   1371     Label* success,
   1372     Handle<GlobalObject> global) {
   1373   Label miss;
   1374 
   1375   HandlerFrontendHeader(object, receiver(), last, name, &miss);
   1376 
   1377   // If the last object in the prototype chain is a global object,
   1378   // check that the global property cell is empty.
   1379   if (!global.is_null()) {
   1380     GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
   1381   }
   1382 
   1383   HandlerFrontendFooter(name, success, &miss);
   1384 }
   1385 
   1386 
   1387 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
   1388                                              Handle<JSObject> holder,
   1389                                              PropertyIndex field,
   1390                                              Representation representation) {
   1391   if (!reg.is(receiver())) __ mov(receiver(), reg);
   1392   if (kind() == Code::LOAD_IC) {
   1393     LoadFieldStub stub(field.is_inobject(holder),
   1394                        field.translate(holder),
   1395                        representation);
   1396     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1397   } else {
   1398     KeyedLoadFieldStub stub(field.is_inobject(holder),
   1399                             field.translate(holder),
   1400                             representation);
   1401     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1402   }
   1403 }
   1404 
   1405 
   1406 void BaseLoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
   1407   // Return the constant value.
   1408   __ LoadObject(v0, value);
   1409   __ Ret();
   1410 }
   1411 
   1412 
   1413 void BaseLoadStubCompiler::GenerateLoadCallback(
   1414     Register reg,
   1415     Handle<ExecutableAccessorInfo> callback) {
   1416   // Build AccessorInfo::args_ list on the stack and push property name below
   1417   // the exit frame to make GC aware of them and store pointers to them.
   1418   __ push(receiver());
   1419   __ mov(scratch2(), sp);  // scratch2 = AccessorInfo::args_
   1420   if (heap()->InNewSpace(callback->data())) {
   1421     __ li(scratch3(), callback);
   1422     __ lw(scratch3(), FieldMemOperand(scratch3(),
   1423                                       ExecutableAccessorInfo::kDataOffset));
   1424   } else {
   1425     __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
   1426   }
   1427   __ Subu(sp, sp, 6 * kPointerSize);
   1428   __ sw(reg, MemOperand(sp, 5 * kPointerSize));
   1429   __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
   1430   __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
   1431   __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
   1432   __ sw(scratch3(), MemOperand(sp, 2 * kPointerSize));
   1433   __ li(scratch4(),
   1434         Operand(ExternalReference::isolate_address(isolate())));
   1435   __ sw(scratch4(), MemOperand(sp, 1 * kPointerSize));
   1436   __ sw(name(), MemOperand(sp, 0 * kPointerSize));
   1437 
   1438   Address getter_address = v8::ToCData<Address>(callback->getter());
   1439   bool returns_handle =
   1440       !CallbackTable::ReturnsVoid(isolate(), getter_address);
   1441 
   1442   Register first_arg = returns_handle ? a1 : a0;
   1443   Register second_arg = returns_handle ? a2 : a1;
   1444   Register third_arg = returns_handle ? a3 : a2;
   1445 
   1446   __ mov(a2, scratch2());  // Saved in case scratch2 == a1.
   1447   __ mov(first_arg, sp);  // (first argument - see note below) = Handle<Name>
   1448 
   1449   // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
   1450   // struct from the function (which is currently the case). This means we pass
   1451   // the arguments in a1-a2 instead of a0-a1, if returns_handle is true.
   1452   // CallApiFunctionAndReturn will set up a0.
   1453 
   1454   const int kApiStackSpace = 1;
   1455   FrameScope frame_scope(masm(), StackFrame::MANUAL);
   1456   __ EnterExitFrame(false, kApiStackSpace);
   1457 
   1458   // Create AccessorInfo instance on the stack above the exit frame with
   1459   // scratch2 (internal::Object** args_) as the data.
   1460   __ sw(a2, MemOperand(sp, kPointerSize));
   1461   // (second argument - see note above) = AccessorInfo&
   1462   __ Addu(second_arg, sp, kPointerSize);
   1463 
   1464   const int kStackUnwindSpace = kFastApiCallArguments + 1;
   1465 
   1466   ApiFunction fun(getter_address);
   1467   ExternalReference::Type type =
   1468       returns_handle ?
   1469           ExternalReference::DIRECT_GETTER_CALL :
   1470           ExternalReference::DIRECT_GETTER_CALL_NEW;
   1471   ExternalReference ref = ExternalReference(&fun, type, isolate());
   1472 
   1473   Address thunk_address = returns_handle
   1474       ? FUNCTION_ADDR(&InvokeAccessorGetter)
   1475       : FUNCTION_ADDR(&InvokeAccessorGetterCallback);
   1476   ExternalReference::Type thunk_type =
   1477       returns_handle ?
   1478           ExternalReference::PROFILING_GETTER_CALL :
   1479           ExternalReference::PROFILING_GETTER_CALL_NEW;
   1480   ApiFunction thunk_fun(thunk_address);
   1481   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
   1482       isolate());
   1483   __ CallApiFunctionAndReturn(ref,
   1484                               getter_address,
   1485                               thunk_ref,
   1486                               third_arg,
   1487                               kStackUnwindSpace,
   1488                               returns_handle,
   1489                               5);
   1490 }
   1491 
   1492 
   1493 void BaseLoadStubCompiler::GenerateLoadInterceptor(
   1494     Register holder_reg,
   1495     Handle<JSObject> object,
   1496     Handle<JSObject> interceptor_holder,
   1497     LookupResult* lookup,
   1498     Handle<Name> name) {
   1499   ASSERT(interceptor_holder->HasNamedInterceptor());
   1500   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
   1501 
   1502   // So far the most popular follow ups for interceptor loads are FIELD
   1503   // and CALLBACKS, so inline only them, other cases may be added
   1504   // later.
   1505   bool compile_followup_inline = false;
   1506   if (lookup->IsFound() && lookup->IsCacheable()) {
   1507     if (lookup->IsField()) {
   1508       compile_followup_inline = true;
   1509     } else if (lookup->type() == CALLBACKS &&
   1510         lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
   1511       ExecutableAccessorInfo* callback =
   1512           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
   1513       compile_followup_inline = callback->getter() != NULL &&
   1514           callback->IsCompatibleReceiver(*object);
   1515     }
   1516   }
   1517 
   1518   if (compile_followup_inline) {
   1519     // Compile the interceptor call, followed by inline code to load the
   1520     // property from further up the prototype chain if the call fails.
   1521     // Check that the maps haven't changed.
   1522     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
   1523 
   1524     // Preserve the receiver register explicitly whenever it is different from
   1525     // the holder and it is needed should the interceptor return without any
   1526     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
   1527     // the FIELD case might cause a miss during the prototype check.
   1528     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
   1529     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
   1530         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
   1531 
   1532     // Save necessary data before invoking an interceptor.
   1533     // Requires a frame to make GC aware of pushed pointers.
   1534     {
   1535       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
   1536       if (must_preserve_receiver_reg) {
   1537         __ Push(receiver(), holder_reg, this->name());
   1538       } else {
   1539         __ Push(holder_reg, this->name());
   1540       }
   1541       // Invoke an interceptor.  Note: map checks from receiver to
   1542       // interceptor's holder has been compiled before (see a caller
   1543       // of this method).
   1544       CompileCallLoadPropertyWithInterceptor(masm(),
   1545                                              receiver(),
   1546                                              holder_reg,
   1547                                              this->name(),
   1548                                              interceptor_holder);
   1549       // Check if interceptor provided a value for property.  If it's
   1550       // the case, return immediately.
   1551       Label interceptor_failed;
   1552       __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
   1553       __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
   1554       frame_scope.GenerateLeaveFrame();
   1555       __ Ret();
   1556 
   1557       __ bind(&interceptor_failed);
   1558       __ pop(this->name());
   1559       __ pop(holder_reg);
   1560       if (must_preserve_receiver_reg) {
   1561         __ pop(receiver());
   1562       }
   1563       // Leave the internal frame.
   1564     }
   1565     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
   1566   } else {  // !compile_followup_inline
   1567     // Call the runtime system to load the interceptor.
   1568     // Check that the maps haven't changed.
   1569     PushInterceptorArguments(masm(), receiver(), holder_reg,
   1570                              this->name(), interceptor_holder);
   1571 
   1572     ExternalReference ref = ExternalReference(
   1573         IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
   1574     __ TailCallExternalReference(ref, 6, 1);
   1575   }
   1576 }
   1577 
   1578 
   1579 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
   1580   if (kind_ == Code::KEYED_CALL_IC) {
   1581     __ Branch(miss, ne, a2, Operand(name));
   1582   }
   1583 }
   1584 
   1585 
   1586 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
   1587                                                    Handle<JSObject> holder,
   1588                                                    Handle<Name> name,
   1589                                                    Label* miss) {
   1590   ASSERT(holder->IsGlobalObject());
   1591 
   1592   // Get the number of arguments.
   1593   const int argc = arguments().immediate();
   1594 
   1595   // Get the receiver from the stack.
   1596   __ lw(a0, MemOperand(sp, argc * kPointerSize));
   1597 
   1598   // Check that the maps haven't changed.
   1599   __ JumpIfSmi(a0, miss);
   1600   CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
   1601 }
   1602 
   1603 
   1604 void CallStubCompiler::GenerateLoadFunctionFromCell(
   1605     Handle<Cell> cell,
   1606     Handle<JSFunction> function,
   1607     Label* miss) {
   1608   // Get the value from the cell.
   1609   __ li(a3, Operand(cell));
   1610   __ lw(a1, FieldMemOperand(a3, Cell::kValueOffset));
   1611 
   1612   // Check that the cell contains the same function.
   1613   if (heap()->InNewSpace(*function)) {
   1614     // We can't embed a pointer to a function in new space so we have
   1615     // to verify that the shared function info is unchanged. This has
   1616     // the nice side effect that multiple closures based on the same
   1617     // function can all use this call IC. Before we load through the
   1618     // function, we have to verify that it still is a function.
   1619     __ JumpIfSmi(a1, miss);
   1620     __ GetObjectType(a1, a3, a3);
   1621     __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
   1622 
   1623     // Check the shared function info. Make sure it hasn't changed.
   1624     __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
   1625     __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
   1626     __ Branch(miss, ne, t0, Operand(a3));
   1627   } else {
   1628     __ Branch(miss, ne, a1, Operand(function));
   1629   }
   1630 }
   1631 
   1632 
   1633 void CallStubCompiler::GenerateMissBranch() {
   1634   Handle<Code> code =
   1635       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
   1636                                                kind_,
   1637                                                extra_state_);
   1638   __ Jump(code, RelocInfo::CODE_TARGET);
   1639 }
   1640 
   1641 
   1642 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   1643                                                 Handle<JSObject> holder,
   1644                                                 PropertyIndex index,
   1645                                                 Handle<Name> name) {
   1646   // ----------- S t a t e -------------
   1647   //  -- a2    : name
   1648   //  -- ra    : return address
   1649   // -----------------------------------
   1650   Label miss;
   1651 
   1652   GenerateNameCheck(name, &miss);
   1653 
   1654   const int argc = arguments().immediate();
   1655 
   1656   // Get the receiver of the function from the stack into a0.
   1657   __ lw(a0, MemOperand(sp, argc * kPointerSize));
   1658   // Check that the receiver isn't a smi.
   1659   __ JumpIfSmi(a0, &miss, t0);
   1660 
   1661   // Do the right check and compute the holder register.
   1662   Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
   1663   GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
   1664                            index.translate(holder), Representation::Tagged());
   1665 
   1666   GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
   1667 
   1668   // Handle call cache miss.
   1669   __ bind(&miss);
   1670   GenerateMissBranch();
   1671 
   1672   // Return the generated code.
   1673   return GetCode(Code::FIELD, name);
   1674 }
   1675 
   1676 
   1677 Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   1678     Handle<Object> object,
   1679     Handle<JSObject> holder,
   1680     Handle<Cell> cell,
   1681     Handle<JSFunction> function,
   1682     Handle<String> name,
   1683     Code::StubType type) {
   1684   Label miss;
   1685 
   1686   // Check that function is still array.
   1687   const int argc = arguments().immediate();
   1688   GenerateNameCheck(name, &miss);
   1689   Register receiver = a1;
   1690 
   1691   if (cell.is_null()) {
   1692     __ lw(receiver, MemOperand(sp, argc * kPointerSize));
   1693 
   1694     // Check that the receiver isn't a smi.
   1695     __ JumpIfSmi(receiver, &miss);
   1696 
   1697     // Check that the maps haven't changed.
   1698     CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, a0,
   1699                     t0, name, &miss);
   1700   } else {
   1701     ASSERT(cell->value() == *function);
   1702     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   1703                                 &miss);
   1704     GenerateLoadFunctionFromCell(cell, function, &miss);
   1705   }
   1706 
   1707   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
   1708   site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
   1709   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   1710   __ li(a0, Operand(argc));
   1711   __ li(a2, Operand(site_feedback_cell));
   1712   __ li(a1, Operand(function));
   1713 
   1714   ArrayConstructorStub stub(isolate());
   1715   __ TailCallStub(&stub);
   1716 
   1717   __ bind(&miss);
   1718   GenerateMissBranch();
   1719 
   1720   // Return the generated code.
   1721   return GetCode(type, name);
   1722 }
   1723 
   1724 
   1725 Handle<Code> CallStubCompiler::CompileArrayPushCall(
   1726     Handle<Object> object,
   1727     Handle<JSObject> holder,
   1728     Handle<Cell> cell,
   1729     Handle<JSFunction> function,
   1730     Handle<String> name,
   1731     Code::StubType type) {
   1732   // ----------- S t a t e -------------
   1733   //  -- a2    : name
   1734   //  -- ra    : return address
   1735   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1736   //  -- ...
   1737   //  -- sp[argc * 4]           : receiver
   1738   // -----------------------------------
   1739 
   1740   // If object is not an array, bail out to regular call.
   1741   if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
   1742 
   1743   Label miss;
   1744 
   1745   GenerateNameCheck(name, &miss);
   1746 
   1747   Register receiver = a1;
   1748 
   1749   // Get the receiver from the stack.
   1750   const int argc = arguments().immediate();
   1751   __ lw(receiver, MemOperand(sp, argc * kPointerSize));
   1752 
   1753   // Check that the receiver isn't a smi.
   1754   __ JumpIfSmi(receiver, &miss);
   1755 
   1756   // Check that the maps haven't changed.
   1757   CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, v0, t0,
   1758                   name, &miss);
   1759 
   1760   if (argc == 0) {
   1761     // Nothing to do, just return the length.
   1762     __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1763     __ DropAndRet(argc + 1);
   1764   } else {
   1765     Label call_builtin;
   1766     if (argc == 1) {  // Otherwise fall through to call the builtin.
   1767       Label attempt_to_grow_elements, with_write_barrier, check_double;
   1768 
   1769       Register elements = t2;
   1770       Register end_elements = t1;
   1771       // Get the elements array of the object.
   1772       __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   1773 
   1774       // Check that the elements are in fast mode and writable.
   1775       __ CheckMap(elements,
   1776                   v0,
   1777                   Heap::kFixedArrayMapRootIndex,
   1778                   &check_double,
   1779                   DONT_DO_SMI_CHECK);
   1780 
   1781       // Get the array's length into v0 and calculate new length.
   1782       __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1783       STATIC_ASSERT(kSmiTagSize == 1);
   1784       STATIC_ASSERT(kSmiTag == 0);
   1785       __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
   1786 
   1787       // Get the elements' length.
   1788       __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1789 
   1790       // Check if we could survive without allocation.
   1791       __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
   1792 
   1793       // Check if value is a smi.
   1794       __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
   1795       __ JumpIfNotSmi(t0, &with_write_barrier);
   1796 
   1797       // Save new length.
   1798       __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1799 
   1800       // Store the value.
   1801       // We may need a register containing the address end_elements below,
   1802       // so write back the value in end_elements.
   1803       __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
   1804       __ Addu(end_elements, elements, end_elements);
   1805       const int kEndElementsOffset =
   1806           FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
   1807       __ Addu(end_elements, end_elements, kEndElementsOffset);
   1808       __ sw(t0, MemOperand(end_elements));
   1809 
   1810       // Check for a smi.
   1811       __ DropAndRet(argc + 1);
   1812 
   1813       __ bind(&check_double);
   1814 
   1815       // Check that the elements are in fast mode and writable.
   1816       __ CheckMap(elements,
   1817                   a0,
   1818                   Heap::kFixedDoubleArrayMapRootIndex,
   1819                   &call_builtin,
   1820                   DONT_DO_SMI_CHECK);
   1821 
   1822       // Get the array's length into r0 and calculate new length.
   1823       __ lw(a0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1824       STATIC_ASSERT(kSmiTagSize == 1);
   1825       STATIC_ASSERT(kSmiTag == 0);
   1826       __ Addu(a0, a0, Operand(Smi::FromInt(argc)));
   1827 
   1828       // Get the elements' length.
   1829       __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1830 
   1831       // Check if we could survive without allocation.
   1832       __ Branch(&call_builtin, gt, a0, Operand(t0));
   1833 
   1834       __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
   1835       __ StoreNumberToDoubleElements(
   1836           t0, a0, elements, a3, t1, a2, t5,
   1837           &call_builtin, argc * kDoubleSize);
   1838 
   1839       // Save new length.
   1840       __ sw(a0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1841 
   1842       // Check for a smi.
   1843       __ DropAndRet(argc + 1);
   1844 
   1845       __ bind(&with_write_barrier);
   1846 
   1847       __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1848 
   1849       if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
   1850         Label fast_object, not_fast_object;
   1851         __ CheckFastObjectElements(a3, t3, &not_fast_object);
   1852         __ jmp(&fast_object);
   1853         // In case of fast smi-only, convert to fast object, otherwise bail out.
   1854         __ bind(&not_fast_object);
   1855         __ CheckFastSmiElements(a3, t3, &call_builtin);
   1856 
   1857         __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset));
   1858         __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
   1859         __ Branch(&call_builtin, eq, t3, Operand(at));
   1860         // edx: receiver
   1861         // a3: map
   1862         Label try_holey_map;
   1863         __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
   1864                                                FAST_ELEMENTS,
   1865                                                a3,
   1866                                                t3,
   1867                                                &try_holey_map);
   1868         __ mov(a2, receiver);
   1869         ElementsTransitionGenerator::
   1870             GenerateMapChangeElementsTransition(masm(),
   1871                                                 DONT_TRACK_ALLOCATION_SITE,
   1872                                                 NULL);
   1873         __ jmp(&fast_object);
   1874 
   1875         __ bind(&try_holey_map);
   1876         __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
   1877                                                FAST_HOLEY_ELEMENTS,
   1878                                                a3,
   1879                                                t3,
   1880                                                &call_builtin);
   1881         __ mov(a2, receiver);
   1882         ElementsTransitionGenerator::
   1883             GenerateMapChangeElementsTransition(masm(),
   1884                                                 DONT_TRACK_ALLOCATION_SITE,
   1885                                                 NULL);
   1886         __ bind(&fast_object);
   1887       } else {
   1888         __ CheckFastObjectElements(a3, a3, &call_builtin);
   1889       }
   1890 
   1891       // Save new length.
   1892       __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1893 
   1894       // Store the value.
   1895       // We may need a register containing the address end_elements below,
   1896       // so write back the value in end_elements.
   1897       __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
   1898       __ Addu(end_elements, elements, end_elements);
   1899       __ Addu(end_elements, end_elements, kEndElementsOffset);
   1900       __ sw(t0, MemOperand(end_elements));
   1901 
   1902       __ RecordWrite(elements,
   1903                      end_elements,
   1904                      t0,
   1905                      kRAHasNotBeenSaved,
   1906                      kDontSaveFPRegs,
   1907                      EMIT_REMEMBERED_SET,
   1908                      OMIT_SMI_CHECK);
   1909       __ DropAndRet(argc + 1);
   1910 
   1911       __ bind(&attempt_to_grow_elements);
   1912       // v0: array's length + 1.
   1913       // t0: elements' length.
   1914 
   1915       if (!FLAG_inline_new) {
   1916         __ Branch(&call_builtin);
   1917       }
   1918 
   1919       __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
   1920       // Growing elements that are SMI-only requires special handling in case
   1921       // the new element is non-Smi. For now, delegate to the builtin.
   1922       Label no_fast_elements_check;
   1923       __ JumpIfSmi(a2, &no_fast_elements_check);
   1924       __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1925       __ CheckFastObjectElements(t3, t3, &call_builtin);
   1926       __ bind(&no_fast_elements_check);
   1927 
   1928       ExternalReference new_space_allocation_top =
   1929           ExternalReference::new_space_allocation_top_address(isolate());
   1930       ExternalReference new_space_allocation_limit =
   1931           ExternalReference::new_space_allocation_limit_address(isolate());
   1932 
   1933       const int kAllocationDelta = 4;
   1934       // Load top and check if it is the end of elements.
   1935       __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
   1936       __ Addu(end_elements, elements, end_elements);
   1937       __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
   1938       __ li(t3, Operand(new_space_allocation_top));
   1939       __ lw(a3, MemOperand(t3));
   1940       __ Branch(&call_builtin, ne, end_elements, Operand(a3));
   1941 
   1942       __ li(t5, Operand(new_space_allocation_limit));
   1943       __ lw(t5, MemOperand(t5));
   1944       __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
   1945       __ Branch(&call_builtin, hi, a3, Operand(t5));
   1946 
   1947       // We fit and could grow elements.
   1948       // Update new_space_allocation_top.
   1949       __ sw(a3, MemOperand(t3));
   1950       // Push the argument.
   1951       __ sw(a2, MemOperand(end_elements));
   1952       // Fill the rest with holes.
   1953       __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
   1954       for (int i = 1; i < kAllocationDelta; i++) {
   1955         __ sw(a3, MemOperand(end_elements, i * kPointerSize));
   1956       }
   1957 
   1958       // Update elements' and array's sizes.
   1959       __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   1960       __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
   1961       __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
   1962 
   1963       // Elements are in new space, so write barrier is not required.
   1964       __ DropAndRet(argc + 1);
   1965     }
   1966     __ bind(&call_builtin);
   1967     __ TailCallExternalReference(
   1968         ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
   1969   }
   1970 
   1971   // Handle call cache miss.
   1972   __ bind(&miss);
   1973   GenerateMissBranch();
   1974 
   1975   // Return the generated code.
   1976   return GetCode(type, name);
   1977 }
   1978 
   1979 
   1980 Handle<Code> CallStubCompiler::CompileArrayPopCall(
   1981     Handle<Object> object,
   1982     Handle<JSObject> holder,
   1983     Handle<Cell> cell,
   1984     Handle<JSFunction> function,
   1985     Handle<String> name,
   1986     Code::StubType type) {
   1987   // ----------- S t a t e -------------
   1988   //  -- a2    : name
   1989   //  -- ra    : return address
   1990   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   1991   //  -- ...
   1992   //  -- sp[argc * 4]           : receiver
   1993   // -----------------------------------
   1994 
   1995   // If object is not an array, bail out to regular call.
   1996   if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
   1997 
   1998   Label miss, return_undefined, call_builtin;
   1999   Register receiver = a1;
   2000   Register elements = a3;
   2001   GenerateNameCheck(name, &miss);
   2002 
   2003   // Get the receiver from the stack.
   2004   const int argc = arguments().immediate();
   2005   __ lw(receiver, MemOperand(sp, argc * kPointerSize));
   2006   // Check that the receiver isn't a smi.
   2007   __ JumpIfSmi(receiver, &miss);
   2008 
   2009   // Check that the maps haven't changed.
   2010   CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
   2011                   t0, v0, name, &miss);
   2012 
   2013   // Get the elements array of the object.
   2014   __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
   2015 
   2016   // Check that the elements are in fast mode and writable.
   2017   __ CheckMap(elements,
   2018               v0,
   2019               Heap::kFixedArrayMapRootIndex,
   2020               &call_builtin,
   2021               DONT_DO_SMI_CHECK);
   2022 
   2023   // Get the array's length into t0 and calculate new length.
   2024   __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   2025   __ Subu(t0, t0, Operand(Smi::FromInt(1)));
   2026   __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
   2027 
   2028   // Get the last element.
   2029   __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
   2030   STATIC_ASSERT(kSmiTagSize == 1);
   2031   STATIC_ASSERT(kSmiTag == 0);
   2032   // We can't address the last element in one operation. Compute the more
   2033   // expensive shift first, and use an offset later on.
   2034   __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
   2035   __ Addu(elements, elements, t1);
   2036   __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
   2037   __ Branch(&call_builtin, eq, v0, Operand(t2));
   2038 
   2039   // Set the array's length.
   2040   __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
   2041 
   2042   // Fill with the hole.
   2043   __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
   2044   __ DropAndRet(argc + 1);
   2045 
   2046   __ bind(&return_undefined);
   2047   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   2048   __ DropAndRet(argc + 1);
   2049 
   2050   __ bind(&call_builtin);
   2051   __ TailCallExternalReference(
   2052       ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
   2053 
   2054   // Handle call cache miss.
   2055   __ bind(&miss);
   2056   GenerateMissBranch();
   2057 
   2058   // Return the generated code.
   2059   return GetCode(type, name);
   2060 }
   2061 
   2062 
   2063 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
   2064     Handle<Object> object,
   2065     Handle<JSObject> holder,
   2066     Handle<Cell> cell,
   2067     Handle<JSFunction> function,
   2068     Handle<String> name,
   2069     Code::StubType type) {
   2070   // ----------- S t a t e -------------
   2071   //  -- a2                     : function name
   2072   //  -- ra                     : return address
   2073   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2074   //  -- ...
   2075   //  -- sp[argc * 4]           : receiver
   2076   // -----------------------------------
   2077 
   2078   // If object is not a string, bail out to regular call.
   2079   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
   2080 
   2081   const int argc = arguments().immediate();
   2082   Label miss;
   2083   Label name_miss;
   2084   Label index_out_of_range;
   2085 
   2086   Label* index_out_of_range_label = &index_out_of_range;
   2087 
   2088   if (kind_ == Code::CALL_IC &&
   2089       (CallICBase::StringStubState::decode(extra_state_) ==
   2090        DEFAULT_STRING_STUB)) {
   2091     index_out_of_range_label = &miss;
   2092   }
   2093 
   2094   GenerateNameCheck(name, &name_miss);
   2095 
   2096   // Check that the maps starting from the prototype haven't changed.
   2097   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   2098                                             Context::STRING_FUNCTION_INDEX,
   2099                                             v0,
   2100                                             &miss);
   2101   ASSERT(!object.is_identical_to(holder));
   2102   CheckPrototypes(
   2103       Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2104       v0, holder, a1, a3, t0, name, &miss);
   2105 
   2106   Register receiver = a1;
   2107   Register index = t1;
   2108   Register result = v0;
   2109   __ lw(receiver, MemOperand(sp, argc * kPointerSize));
   2110   if (argc > 0) {
   2111     __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
   2112   } else {
   2113     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   2114   }
   2115 
   2116   StringCharCodeAtGenerator generator(receiver,
   2117                                       index,
   2118                                       result,
   2119                                       &miss,  // When not a string.
   2120                                       &miss,  // When not a number.
   2121                                       index_out_of_range_label,
   2122                                       STRING_INDEX_IS_NUMBER);
   2123   generator.GenerateFast(masm());
   2124   __ DropAndRet(argc + 1);
   2125 
   2126   StubRuntimeCallHelper call_helper;
   2127   generator.GenerateSlow(masm(), call_helper);
   2128 
   2129   if (index_out_of_range.is_linked()) {
   2130     __ bind(&index_out_of_range);
   2131     __ LoadRoot(v0, Heap::kNanValueRootIndex);
   2132     __ DropAndRet(argc + 1);
   2133   }
   2134 
   2135   __ bind(&miss);
   2136   // Restore function name in a2.
   2137   __ li(a2, name);
   2138   __ bind(&name_miss);
   2139   GenerateMissBranch();
   2140 
   2141   // Return the generated code.
   2142   return GetCode(type, name);
   2143 }
   2144 
   2145 
   2146 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
   2147     Handle<Object> object,
   2148     Handle<JSObject> holder,
   2149     Handle<Cell> cell,
   2150     Handle<JSFunction> function,
   2151     Handle<String> name,
   2152     Code::StubType type) {
   2153   // ----------- S t a t e -------------
   2154   //  -- a2                     : function name
   2155   //  -- ra                     : return address
   2156   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2157   //  -- ...
   2158   //  -- sp[argc * 4]           : receiver
   2159   // -----------------------------------
   2160 
   2161   // If object is not a string, bail out to regular call.
   2162   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
   2163 
   2164   const int argc = arguments().immediate();
   2165   Label miss;
   2166   Label name_miss;
   2167   Label index_out_of_range;
   2168   Label* index_out_of_range_label = &index_out_of_range;
   2169   if (kind_ == Code::CALL_IC &&
   2170       (CallICBase::StringStubState::decode(extra_state_) ==
   2171        DEFAULT_STRING_STUB)) {
   2172     index_out_of_range_label = &miss;
   2173   }
   2174   GenerateNameCheck(name, &name_miss);
   2175 
   2176   // Check that the maps starting from the prototype haven't changed.
   2177   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   2178                                             Context::STRING_FUNCTION_INDEX,
   2179                                             v0,
   2180                                             &miss);
   2181   ASSERT(!object.is_identical_to(holder));
   2182   CheckPrototypes(
   2183       Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2184       v0, holder, a1, a3, t0, name, &miss);
   2185 
   2186   Register receiver = v0;
   2187   Register index = t1;
   2188   Register scratch = a3;
   2189   Register result = v0;
   2190   __ lw(receiver, MemOperand(sp, argc * kPointerSize));
   2191   if (argc > 0) {
   2192     __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
   2193   } else {
   2194     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
   2195   }
   2196 
   2197   StringCharAtGenerator generator(receiver,
   2198                                   index,
   2199                                   scratch,
   2200                                   result,
   2201                                   &miss,  // When not a string.
   2202                                   &miss,  // When not a number.
   2203                                   index_out_of_range_label,
   2204                                   STRING_INDEX_IS_NUMBER);
   2205   generator.GenerateFast(masm());
   2206   __ DropAndRet(argc + 1);
   2207 
   2208   StubRuntimeCallHelper call_helper;
   2209   generator.GenerateSlow(masm(), call_helper);
   2210 
   2211   if (index_out_of_range.is_linked()) {
   2212     __ bind(&index_out_of_range);
   2213     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
   2214     __ DropAndRet(argc + 1);
   2215   }
   2216 
   2217   __ bind(&miss);
   2218   // Restore function name in a2.
   2219   __ li(a2, name);
   2220   __ bind(&name_miss);
   2221   GenerateMissBranch();
   2222 
   2223   // Return the generated code.
   2224   return GetCode(type, name);
   2225 }
   2226 
   2227 
   2228 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
   2229     Handle<Object> object,
   2230     Handle<JSObject> holder,
   2231     Handle<Cell> cell,
   2232     Handle<JSFunction> function,
   2233     Handle<String> name,
   2234     Code::StubType type) {
   2235   // ----------- S t a t e -------------
   2236   //  -- a2                     : function name
   2237   //  -- ra                     : return address
   2238   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2239   //  -- ...
   2240   //  -- sp[argc * 4]           : receiver
   2241   // -----------------------------------
   2242 
   2243   const int argc = arguments().immediate();
   2244 
   2245   // If the object is not a JSObject or we got an unexpected number of
   2246   // arguments, bail out to the regular call.
   2247   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2248 
   2249   Label miss;
   2250   GenerateNameCheck(name, &miss);
   2251 
   2252   if (cell.is_null()) {
   2253     __ lw(a1, MemOperand(sp, 1 * kPointerSize));
   2254 
   2255     STATIC_ASSERT(kSmiTag == 0);
   2256     __ JumpIfSmi(a1, &miss);
   2257 
   2258     CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
   2259                     name, &miss);
   2260   } else {
   2261     ASSERT(cell->value() == *function);
   2262     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2263                                 &miss);
   2264     GenerateLoadFunctionFromCell(cell, function, &miss);
   2265   }
   2266 
   2267   // Load the char code argument.
   2268   Register code = a1;
   2269   __ lw(code, MemOperand(sp, 0 * kPointerSize));
   2270 
   2271   // Check the code is a smi.
   2272   Label slow;
   2273   STATIC_ASSERT(kSmiTag == 0);
   2274   __ JumpIfNotSmi(code, &slow);
   2275 
   2276   // Convert the smi code to uint16.
   2277   __ And(code, code, Operand(Smi::FromInt(0xffff)));
   2278 
   2279   StringCharFromCodeGenerator generator(code, v0);
   2280   generator.GenerateFast(masm());
   2281   __ DropAndRet(argc + 1);
   2282 
   2283   StubRuntimeCallHelper call_helper;
   2284   generator.GenerateSlow(masm(), call_helper);
   2285 
   2286   // Tail call the full function. We do not have to patch the receiver
   2287   // because the function makes no use of it.
   2288   __ bind(&slow);
   2289   ParameterCount expected(function);
   2290   __ InvokeFunction(function, expected, arguments(),
   2291                     JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2292 
   2293   __ bind(&miss);
   2294   // a2: function name.
   2295   GenerateMissBranch();
   2296 
   2297   // Return the generated code.
   2298   return GetCode(type, name);
   2299 }
   2300 
   2301 
   2302 Handle<Code> CallStubCompiler::CompileMathFloorCall(
   2303     Handle<Object> object,
   2304     Handle<JSObject> holder,
   2305     Handle<Cell> cell,
   2306     Handle<JSFunction> function,
   2307     Handle<String> name,
   2308     Code::StubType type) {
   2309   // ----------- S t a t e -------------
   2310   //  -- a2                     : function name
   2311   //  -- ra                     : return address
   2312   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2313   //  -- ...
   2314   //  -- sp[argc * 4]           : receiver
   2315   // -----------------------------------
   2316 
   2317 
   2318   const int argc = arguments().immediate();
   2319   // If the object is not a JSObject or we got an unexpected number of
   2320   // arguments, bail out to the regular call.
   2321   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2322 
   2323   Label miss, slow;
   2324   GenerateNameCheck(name, &miss);
   2325 
   2326   if (cell.is_null()) {
   2327     __ lw(a1, MemOperand(sp, 1 * kPointerSize));
   2328     STATIC_ASSERT(kSmiTag == 0);
   2329     __ JumpIfSmi(a1, &miss);
   2330     CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
   2331                     name, &miss);
   2332   } else {
   2333     ASSERT(cell->value() == *function);
   2334     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2335                                 &miss);
   2336     GenerateLoadFunctionFromCell(cell, function, &miss);
   2337   }
   2338 
   2339   // Load the (only) argument into v0.
   2340   __ lw(v0, MemOperand(sp, 0 * kPointerSize));
   2341 
   2342   // If the argument is a smi, just return.
   2343   STATIC_ASSERT(kSmiTag == 0);
   2344   __ And(t0, v0, Operand(kSmiTagMask));
   2345   __ DropAndRet(argc + 1, eq, t0, Operand(zero_reg));
   2346 
   2347   __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   2348 
   2349   Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
   2350 
   2351   // If fpu is enabled, we use the floor instruction.
   2352 
   2353   // Load the HeapNumber value.
   2354   __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
   2355 
   2356   // Backup FCSR.
   2357   __ cfc1(a3, FCSR);
   2358   // Clearing FCSR clears the exception mask with no side-effects.
   2359   __ ctc1(zero_reg, FCSR);
   2360   // Convert the argument to an integer.
   2361   __ floor_w_d(f0, f0);
   2362 
   2363   // Start checking for special cases.
   2364   // Get the argument exponent and clear the sign bit.
   2365   __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
   2366   __ And(t2, t1, Operand(~HeapNumber::kSignMask));
   2367   __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
   2368 
   2369   // Retrieve FCSR and check for fpu errors.
   2370   __ cfc1(t5, FCSR);
   2371   __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
   2372   __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
   2373 
   2374   // Check for NaN, Infinity, and -Infinity.
   2375   // They are invariant through a Math.Floor call, so just
   2376   // return the original argument.
   2377   __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
   2378         >> HeapNumber::kMantissaBitsInTopWord));
   2379   __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
   2380   // We had an overflow or underflow in the conversion. Check if we
   2381   // have a big exponent.
   2382   // If greater or equal, the argument is already round and in v0.
   2383   __ Branch(&restore_fcsr_and_return, ge, t3,
   2384       Operand(HeapNumber::kMantissaBits));
   2385   __ Branch(&wont_fit_smi);
   2386 
   2387   __ bind(&no_fpu_error);
   2388   // Move the result back to v0.
   2389   __ mfc1(v0, f0);
   2390   // Check if the result fits into a smi.
   2391   __ Addu(a1, v0, Operand(0x40000000));
   2392   __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
   2393   // Tag the result.
   2394   STATIC_ASSERT(kSmiTag == 0);
   2395   __ sll(v0, v0, kSmiTagSize);
   2396 
   2397   // Check for -0.
   2398   __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
   2399   // t1 already holds the HeapNumber exponent.
   2400   __ And(t0, t1, Operand(HeapNumber::kSignMask));
   2401   // If our HeapNumber is negative it was -0, so load its address and return.
   2402   // Else v0 is loaded with 0, so we can also just return.
   2403   __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
   2404   __ lw(v0, MemOperand(sp, 0 * kPointerSize));
   2405 
   2406   __ bind(&restore_fcsr_and_return);
   2407   // Restore FCSR and return.
   2408   __ ctc1(a3, FCSR);
   2409 
   2410   __ DropAndRet(argc + 1);
   2411 
   2412   __ bind(&wont_fit_smi);
   2413   // Restore FCSR and fall to slow case.
   2414   __ ctc1(a3, FCSR);
   2415 
   2416   __ bind(&slow);
   2417   // Tail call the full function. We do not have to patch the receiver
   2418   // because the function makes no use of it.
   2419   ParameterCount expected(function);
   2420   __ InvokeFunction(function, expected, arguments(),
   2421                     JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2422 
   2423   __ bind(&miss);
   2424   // a2: function name.
   2425   GenerateMissBranch();
   2426 
   2427   // Return the generated code.
   2428   return GetCode(type, name);
   2429 }
   2430 
   2431 
   2432 Handle<Code> CallStubCompiler::CompileMathAbsCall(
   2433     Handle<Object> object,
   2434     Handle<JSObject> holder,
   2435     Handle<Cell> cell,
   2436     Handle<JSFunction> function,
   2437     Handle<String> name,
   2438     Code::StubType type) {
   2439   // ----------- S t a t e -------------
   2440   //  -- a2                     : function name
   2441   //  -- ra                     : return address
   2442   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
   2443   //  -- ...
   2444   //  -- sp[argc * 4]           : receiver
   2445   // -----------------------------------
   2446 
   2447   const int argc = arguments().immediate();
   2448   // If the object is not a JSObject or we got an unexpected number of
   2449   // arguments, bail out to the regular call.
   2450   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
   2451 
   2452   Label miss;
   2453 
   2454   GenerateNameCheck(name, &miss);
   2455   if (cell.is_null()) {
   2456     __ lw(a1, MemOperand(sp, 1 * kPointerSize));
   2457     STATIC_ASSERT(kSmiTag == 0);
   2458     __ JumpIfSmi(a1, &miss);
   2459     CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
   2460                     name, &miss);
   2461   } else {
   2462     ASSERT(cell->value() == *function);
   2463     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2464                                 &miss);
   2465     GenerateLoadFunctionFromCell(cell, function, &miss);
   2466   }
   2467 
   2468   // Load the (only) argument into v0.
   2469   __ lw(v0, MemOperand(sp, 0 * kPointerSize));
   2470 
   2471   // Check if the argument is a smi.
   2472   Label not_smi;
   2473   STATIC_ASSERT(kSmiTag == 0);
   2474   __ JumpIfNotSmi(v0, &not_smi);
   2475 
   2476   // Do bitwise not or do nothing depending on the sign of the
   2477   // argument.
   2478   __ sra(t0, v0, kBitsPerInt - 1);
   2479   __ Xor(a1, v0, t0);
   2480 
   2481   // Add 1 or do nothing depending on the sign of the argument.
   2482   __ Subu(v0, a1, t0);
   2483 
   2484   // If the result is still negative, go to the slow case.
   2485   // This only happens for the most negative smi.
   2486   Label slow;
   2487   __ Branch(&slow, lt, v0, Operand(zero_reg));
   2488 
   2489   // Smi case done.
   2490   __ DropAndRet(argc + 1);
   2491 
   2492   // Check if the argument is a heap number and load its exponent and
   2493   // sign.
   2494   __ bind(&not_smi);
   2495   __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   2496   __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
   2497 
   2498   // Check the sign of the argument. If the argument is positive,
   2499   // just return it.
   2500   Label negative_sign;
   2501   __ And(t0, a1, Operand(HeapNumber::kSignMask));
   2502   __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
   2503   __ DropAndRet(argc + 1);
   2504 
   2505   // If the argument is negative, clear the sign, and return a new
   2506   // number.
   2507   __ bind(&negative_sign);
   2508   __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
   2509   __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
   2510   __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
   2511   __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
   2512   __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
   2513   __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
   2514   __ DropAndRet(argc + 1);
   2515 
   2516   // Tail call the full function. We do not have to patch the receiver
   2517   // because the function makes no use of it.
   2518   __ bind(&slow);
   2519   ParameterCount expected(function);
   2520   __ InvokeFunction(function, expected, arguments(),
   2521                     JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2522 
   2523   __ bind(&miss);
   2524   // a2: function name.
   2525   GenerateMissBranch();
   2526 
   2527   // Return the generated code.
   2528   return GetCode(type, name);
   2529 }
   2530 
   2531 
   2532 Handle<Code> CallStubCompiler::CompileFastApiCall(
   2533     const CallOptimization& optimization,
   2534     Handle<Object> object,
   2535     Handle<JSObject> holder,
   2536     Handle<Cell> cell,
   2537     Handle<JSFunction> function,
   2538     Handle<String> name) {
   2539 
   2540   Counters* counters = isolate()->counters();
   2541 
   2542   ASSERT(optimization.is_simple_api_call());
   2543   // Bail out if object is a global object as we don't want to
   2544   // repatch it to global receiver.
   2545   if (object->IsGlobalObject()) return Handle<Code>::null();
   2546   if (!cell.is_null()) return Handle<Code>::null();
   2547   if (!object->IsJSObject()) return Handle<Code>::null();
   2548   int depth = optimization.GetPrototypeDepthOfExpectedType(
   2549       Handle<JSObject>::cast(object), holder);
   2550   if (depth == kInvalidProtoDepth) return Handle<Code>::null();
   2551 
   2552   Label miss, miss_before_stack_reserved;
   2553 
   2554   GenerateNameCheck(name, &miss_before_stack_reserved);
   2555 
   2556   // Get the receiver from the stack.
   2557   const int argc = arguments().immediate();
   2558   __ lw(a1, MemOperand(sp, argc * kPointerSize));
   2559 
   2560   // Check that the receiver isn't a smi.
   2561   __ JumpIfSmi(a1, &miss_before_stack_reserved);
   2562 
   2563   __ IncrementCounter(counters->call_const(), 1, a0, a3);
   2564   __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
   2565 
   2566   ReserveSpaceForFastApiCall(masm(), a0);
   2567 
   2568   // Check that the maps haven't changed and find a Holder as a side effect.
   2569   CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0, name,
   2570                   depth, &miss);
   2571 
   2572   GenerateFastApiDirectCall(masm(), optimization, argc);
   2573 
   2574   __ bind(&miss);
   2575   FreeSpaceForFastApiCall(masm());
   2576 
   2577   __ bind(&miss_before_stack_reserved);
   2578   GenerateMissBranch();
   2579 
   2580   // Return the generated code.
   2581   return GetCode(function);
   2582 }
   2583 
   2584 
   2585 void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
   2586                                               Handle<JSObject> holder,
   2587                                               Handle<Name> name,
   2588                                               CheckType check,
   2589                                               Label* success) {
   2590   // ----------- S t a t e -------------
   2591   //  -- a2    : name
   2592   //  -- ra    : return address
   2593   // -----------------------------------
   2594   Label miss;
   2595   GenerateNameCheck(name, &miss);
   2596 
   2597   // Get the receiver from the stack.
   2598   const int argc = arguments().immediate();
   2599   __ lw(a1, MemOperand(sp, argc * kPointerSize));
   2600 
   2601   // Check that the receiver isn't a smi.
   2602   if (check != NUMBER_CHECK) {
   2603     __ JumpIfSmi(a1, &miss);
   2604   }
   2605 
   2606   // Make sure that it's okay not to patch the on stack receiver
   2607   // unless we're doing a receiver map check.
   2608   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
   2609   switch (check) {
   2610     case RECEIVER_MAP_CHECK:
   2611       __ IncrementCounter(isolate()->counters()->call_const(), 1, a0, a3);
   2612 
   2613       // Check that the maps haven't changed.
   2614       CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
   2615                       name, &miss);
   2616 
   2617       // Patch the receiver on the stack with the global proxy if
   2618       // necessary.
   2619       if (object->IsGlobalObject()) {
   2620         __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
   2621         __ sw(a3, MemOperand(sp, argc * kPointerSize));
   2622       }
   2623       break;
   2624 
   2625     case STRING_CHECK:
   2626       // Check that the object is a string.
   2627       __ GetObjectType(a1, a3, a3);
   2628       __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
   2629       // Check that the maps starting from the prototype haven't changed.
   2630       GenerateDirectLoadGlobalFunctionPrototype(
   2631           masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
   2632       CheckPrototypes(
   2633           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2634           a0, holder, a3, a1, t0, name, &miss);
   2635       break;
   2636 
   2637     case SYMBOL_CHECK:
   2638       // Check that the object is a symbol.
   2639       __ GetObjectType(a1, a1, a3);
   2640       __ Branch(&miss, ne, a3, Operand(SYMBOL_TYPE));
   2641       // Check that the maps starting from the prototype haven't changed.
   2642       GenerateDirectLoadGlobalFunctionPrototype(
   2643           masm(), Context::SYMBOL_FUNCTION_INDEX, a0, &miss);
   2644       CheckPrototypes(
   2645           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2646           a0, holder, a3, a1, t0, name, &miss);
   2647       break;
   2648 
   2649     case NUMBER_CHECK: {
   2650       Label fast;
   2651       // Check that the object is a smi or a heap number.
   2652       __ JumpIfSmi(a1, &fast);
   2653       __ GetObjectType(a1, a0, a0);
   2654       __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
   2655       __ bind(&fast);
   2656       // Check that the maps starting from the prototype haven't changed.
   2657       GenerateDirectLoadGlobalFunctionPrototype(
   2658           masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
   2659       CheckPrototypes(
   2660           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2661           a0, holder, a3, a1, t0, name, &miss);
   2662       break;
   2663     }
   2664     case BOOLEAN_CHECK: {
   2665       Label fast;
   2666       // Check that the object is a boolean.
   2667       __ LoadRoot(t0, Heap::kTrueValueRootIndex);
   2668       __ Branch(&fast, eq, a1, Operand(t0));
   2669       __ LoadRoot(t0, Heap::kFalseValueRootIndex);
   2670       __ Branch(&miss, ne, a1, Operand(t0));
   2671       __ bind(&fast);
   2672       // Check that the maps starting from the prototype haven't changed.
   2673       GenerateDirectLoadGlobalFunctionPrototype(
   2674           masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
   2675       CheckPrototypes(
   2676           Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
   2677           a0, holder, a3, a1, t0, name, &miss);
   2678       break;
   2679     }
   2680   }
   2681 
   2682   __ jmp(success);
   2683 
   2684   // Handle call cache miss.
   2685   __ bind(&miss);
   2686 
   2687   GenerateMissBranch();
   2688 }
   2689 
   2690 
   2691 void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
   2692   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2693       ? CALL_AS_FUNCTION
   2694       : CALL_AS_METHOD;
   2695   ParameterCount expected(function);
   2696   __ InvokeFunction(function, expected, arguments(),
   2697                     JUMP_FUNCTION, NullCallWrapper(), call_kind);
   2698 }
   2699 
   2700 
   2701 Handle<Code> CallStubCompiler::CompileCallConstant(
   2702     Handle<Object> object,
   2703     Handle<JSObject> holder,
   2704     Handle<Name> name,
   2705     CheckType check,
   2706     Handle<JSFunction> function) {
   2707   if (HasCustomCallGenerator(function)) {
   2708     Handle<Code> code = CompileCustomCall(object, holder,
   2709                                           Handle<Cell>::null(),
   2710                                           function, Handle<String>::cast(name),
   2711                                           Code::CONSTANT);
   2712     // A null handle means bail out to the regular compiler code below.
   2713     if (!code.is_null()) return code;
   2714   }
   2715 
   2716   Label success;
   2717 
   2718   CompileHandlerFrontend(object, holder, name, check, &success);
   2719   __ bind(&success);
   2720   CompileHandlerBackend(function);
   2721 
   2722   // Return the generated code.
   2723   return GetCode(function);
   2724 }
   2725 
   2726 
   2727 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
   2728                                                       Handle<JSObject> holder,
   2729                                                       Handle<Name> name) {
   2730   // ----------- S t a t e -------------
   2731   //  -- a2    : name
   2732   //  -- ra    : return address
   2733   // -----------------------------------
   2734 
   2735   Label miss;
   2736 
   2737   GenerateNameCheck(name, &miss);
   2738 
   2739   // Get the number of arguments.
   2740   const int argc = arguments().immediate();
   2741   LookupResult lookup(isolate());
   2742   LookupPostInterceptor(holder, name, &lookup);
   2743 
   2744   // Get the receiver from the stack.
   2745   __ lw(a1, MemOperand(sp, argc * kPointerSize));
   2746 
   2747   CallInterceptorCompiler compiler(this, arguments(), a2, extra_state_);
   2748   compiler.Compile(masm(), object, holder, name, &lookup, a1, a3, t0, a0,
   2749                    &miss);
   2750 
   2751   // Move returned value, the function to call, to a1.
   2752   __ mov(a1, v0);
   2753   // Restore receiver.
   2754   __ lw(a0, MemOperand(sp, argc * kPointerSize));
   2755 
   2756   GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
   2757 
   2758   // Handle call cache miss.
   2759   __ bind(&miss);
   2760   GenerateMissBranch();
   2761 
   2762   // Return the generated code.
   2763   return GetCode(Code::INTERCEPTOR, name);
   2764 }
   2765 
   2766 
   2767 Handle<Code> CallStubCompiler::CompileCallGlobal(
   2768     Handle<JSObject> object,
   2769     Handle<GlobalObject> holder,
   2770     Handle<PropertyCell> cell,
   2771     Handle<JSFunction> function,
   2772     Handle<Name> name) {
   2773   // ----------- S t a t e -------------
   2774   //  -- a2    : name
   2775   //  -- ra    : return address
   2776   // -----------------------------------
   2777 
   2778   if (HasCustomCallGenerator(function)) {
   2779     Handle<Code> code = CompileCustomCall(
   2780         object, holder, cell, function, Handle<String>::cast(name),
   2781         Code::NORMAL);
   2782     // A null handle means bail out to the regular compiler code below.
   2783     if (!code.is_null()) return code;
   2784   }
   2785 
   2786   Label miss;
   2787   GenerateNameCheck(name, &miss);
   2788 
   2789   // Get the number of arguments.
   2790   const int argc = arguments().immediate();
   2791   GenerateGlobalReceiverCheck(object, holder, name, &miss);
   2792   GenerateLoadFunctionFromCell(cell, function, &miss);
   2793 
   2794   // Patch the receiver on the stack with the global proxy if
   2795   // necessary.
   2796   if (object->IsGlobalObject()) {
   2797     __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
   2798     __ sw(a3, MemOperand(sp, argc * kPointerSize));
   2799   }
   2800 
   2801   // Set up the context (function already in r1).
   2802   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
   2803 
   2804   // Jump to the cached code (tail call).
   2805   Counters* counters = isolate()->counters();
   2806   __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
   2807   ParameterCount expected(function->shared()->formal_parameter_count());
   2808   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2809       ? CALL_AS_FUNCTION
   2810       : CALL_AS_METHOD;
   2811   // We call indirectly through the code field in the function to
   2812   // allow recompilation to take effect without changing any of the
   2813   // call sites.
   2814   __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
   2815   __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
   2816                 NullCallWrapper(), call_kind);
   2817 
   2818   // Handle call cache miss.
   2819   __ bind(&miss);
   2820   __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
   2821   GenerateMissBranch();
   2822 
   2823   // Return the generated code.
   2824   return GetCode(Code::NORMAL, name);
   2825 }
   2826 
   2827 
   2828 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2829     Handle<JSObject> object,
   2830     Handle<JSObject> holder,
   2831     Handle<Name> name,
   2832     Handle<ExecutableAccessorInfo> callback) {
   2833   Label success;
   2834   HandlerFrontend(object, receiver(), holder, name, &success);
   2835   __ bind(&success);
   2836 
   2837   // Stub never generated for non-global objects that require access
   2838   // checks.
   2839   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   2840 
   2841   __ push(receiver());  // Receiver.
   2842   __ li(at, Operand(callback));  // Callback info.
   2843   __ push(at);
   2844   __ li(at, Operand(name));
   2845   __ Push(at, value());
   2846 
   2847   // Do tail-call to the runtime system.
   2848   ExternalReference store_callback_property =
   2849       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
   2850   __ TailCallExternalReference(store_callback_property, 4, 1);
   2851 
   2852   // Return the generated code.
   2853   return GetCode(kind(), Code::CALLBACKS, name);
   2854 }
   2855 
   2856 
   2857 #undef __
   2858 #define __ ACCESS_MASM(masm)
   2859 
   2860 
   2861 void StoreStubCompiler::GenerateStoreViaSetter(
   2862     MacroAssembler* masm,
   2863     Handle<JSFunction> setter) {
   2864   // ----------- S t a t e -------------
   2865   //  -- a0    : value
   2866   //  -- a1    : receiver
   2867   //  -- a2    : name
   2868   //  -- ra    : return address
   2869   // -----------------------------------
   2870   {
   2871     FrameScope scope(masm, StackFrame::INTERNAL);
   2872 
   2873     // Save value register, so we can restore it later.
   2874     __ push(a0);
   2875 
   2876     if (!setter.is_null()) {
   2877       // Call the JavaScript setter with receiver and value on the stack.
   2878       __ push(a1);
   2879       __ push(a0);
   2880       ParameterCount actual(1);
   2881       ParameterCount expected(setter);
   2882       __ InvokeFunction(setter, expected, actual,
   2883                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2884     } else {
   2885       // If we generate a global code snippet for deoptimization only, remember
   2886       // the place to continue after deoptimization.
   2887       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
   2888     }
   2889 
   2890     // We have to return the passed value, not the return value of the setter.
   2891     __ pop(v0);
   2892 
   2893     // Restore context register.
   2894     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2895   }
   2896   __ Ret();
   2897 }
   2898 
   2899 
   2900 #undef __
   2901 #define __ ACCESS_MASM(masm())
   2902 
   2903 
   2904 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
   2905     Handle<JSObject> object,
   2906     Handle<Name> name) {
   2907   Label miss;
   2908 
   2909   // Check that the map of the object hasn't changed.
   2910   __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
   2911               DO_SMI_CHECK);
   2912 
   2913   // Perform global security token check if needed.
   2914   if (object->IsJSGlobalProxy()) {
   2915     __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
   2916   }
   2917 
   2918   // Stub is never generated for non-global objects that require access
   2919   // checks.
   2920   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
   2921 
   2922   __ Push(receiver(), this->name(), value());
   2923 
   2924   __ li(scratch1(), Operand(Smi::FromInt(strict_mode())));
   2925   __ push(scratch1());  // strict mode
   2926 
   2927   // Do tail-call to the runtime system.
   2928   ExternalReference store_ic_property =
   2929       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
   2930   __ TailCallExternalReference(store_ic_property, 4, 1);
   2931 
   2932   // Handle store cache miss.
   2933   __ bind(&miss);
   2934   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2935 
   2936   // Return the generated code.
   2937   return GetCode(kind(), Code::INTERCEPTOR, name);
   2938 }
   2939 
   2940 
   2941 Handle<Code> StoreStubCompiler::CompileStoreGlobal(
   2942     Handle<GlobalObject> object,
   2943     Handle<PropertyCell> cell,
   2944     Handle<Name> name) {
   2945   Label miss;
   2946 
   2947   // Check that the map of the global has not changed.
   2948   __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
   2949   __ Branch(&miss, ne, scratch1(), Operand(Handle<Map>(object->map())));
   2950 
   2951   // Check that the value in the cell is not the hole. If it is, this
   2952   // cell could have been deleted and reintroducing the global needs
   2953   // to update the property details in the property dictionary of the
   2954   // global object. We bail out to the runtime system to do that.
   2955   __ li(scratch1(), Operand(cell));
   2956   __ LoadRoot(scratch2(), Heap::kTheHoleValueRootIndex);
   2957   __ lw(scratch3(), FieldMemOperand(scratch1(), Cell::kValueOffset));
   2958   __ Branch(&miss, eq, scratch3(), Operand(scratch2()));
   2959 
   2960   // Store the value in the cell.
   2961   __ sw(value(), FieldMemOperand(scratch1(), Cell::kValueOffset));
   2962   __ mov(v0, a0);  // Stored value must be returned in v0.
   2963   // Cells are always rescanned, so no write barrier here.
   2964 
   2965   Counters* counters = isolate()->counters();
   2966   __ IncrementCounter(
   2967       counters->named_store_global_inline(), 1, scratch1(), scratch2());
   2968   __ Ret();
   2969 
   2970   // Handle store cache miss.
   2971   __ bind(&miss);
   2972   __ IncrementCounter(
   2973       counters->named_store_global_inline_miss(), 1, scratch1(), scratch2());
   2974   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2975 
   2976   // Return the generated code.
   2977   return GetICCode(kind(), Code::NORMAL, name);
   2978 }
   2979 
   2980 
   2981 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
   2982     Handle<JSObject> object,
   2983     Handle<JSObject> last,
   2984     Handle<Name> name,
   2985     Handle<GlobalObject> global) {
   2986   Label success;
   2987 
   2988   NonexistentHandlerFrontend(object, last, name, &success, global);
   2989 
   2990   __ bind(&success);
   2991   // Return undefined if maps of the full prototype chain is still the same.
   2992   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   2993   __ Ret();
   2994 
   2995   // Return the generated code.
   2996   return GetCode(kind(), Code::NONEXISTENT, name);
   2997 }
   2998 
   2999 
   3000 Register* LoadStubCompiler::registers() {
   3001   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   3002   static Register registers[] = { a0, a2, a3, a1, t0, t1 };
   3003   return registers;
   3004 }
   3005 
   3006 
   3007 Register* KeyedLoadStubCompiler::registers() {
   3008   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   3009   static Register registers[] = { a1, a0, a2, a3, t0, t1 };
   3010   return registers;
   3011 }
   3012 
   3013 
   3014 Register* StoreStubCompiler::registers() {
   3015   // receiver, name, value, scratch1, scratch2, scratch3.
   3016   static Register registers[] = { a1, a2, a0, a3, t0, t1 };
   3017   return registers;
   3018 }
   3019 
   3020 
   3021 Register* KeyedStoreStubCompiler::registers() {
   3022   // receiver, name, value, scratch1, scratch2, scratch3.
   3023   static Register registers[] = { a2, a1, a0, a3, t0, t1 };
   3024   return registers;
   3025 }
   3026 
   3027 
   3028 void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
   3029                                               Register name_reg,
   3030                                               Label* miss) {
   3031   __ Branch(miss, ne, name_reg, Operand(name));
   3032 }
   3033 
   3034 
   3035 void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
   3036                                                Register name_reg,
   3037                                                Label* miss) {
   3038   __ Branch(miss, ne, name_reg, Operand(name));
   3039 }
   3040 
   3041 
   3042 #undef __
   3043 #define __ ACCESS_MASM(masm)
   3044 
   3045 
   3046 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
   3047                                              Handle<JSFunction> getter) {
   3048   // ----------- S t a t e -------------
   3049   //  -- a0    : receiver
   3050   //  -- a2    : name
   3051   //  -- ra    : return address
   3052   // -----------------------------------
   3053   {
   3054     FrameScope scope(masm, StackFrame::INTERNAL);
   3055 
   3056     if (!getter.is_null()) {
   3057       // Call the JavaScript getter with the receiver on the stack.
   3058       __ push(a0);
   3059       ParameterCount actual(0);
   3060       ParameterCount expected(getter);
   3061       __ InvokeFunction(getter, expected, actual,
   3062                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   3063     } else {
   3064       // If we generate a global code snippet for deoptimization only, remember
   3065       // the place to continue after deoptimization.
   3066       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
   3067     }
   3068 
   3069     // Restore context register.
   3070     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3071   }
   3072   __ Ret();
   3073 }
   3074 
   3075 
   3076 #undef __
   3077 #define __ ACCESS_MASM(masm())
   3078 
   3079 
   3080 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
   3081     Handle<JSObject> object,
   3082     Handle<GlobalObject> global,
   3083     Handle<PropertyCell> cell,
   3084     Handle<Name> name,
   3085     bool is_dont_delete) {
   3086   Label success, miss;
   3087 
   3088   __ CheckMap(
   3089       receiver(), scratch1(), Handle<Map>(object->map()), &miss, DO_SMI_CHECK);
   3090   HandlerFrontendHeader(
   3091       object, receiver(), Handle<JSObject>::cast(global), name, &miss);
   3092 
   3093   // Get the value from the cell.
   3094   __ li(a3, Operand(cell));
   3095   __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
   3096 
   3097   // Check for deleted property if property can actually be deleted.
   3098   if (!is_dont_delete) {
   3099     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   3100     __ Branch(&miss, eq, t0, Operand(at));
   3101   }
   3102 
   3103   HandlerFrontendFooter(name, &success, &miss);
   3104   __ bind(&success);
   3105 
   3106   Counters* counters = isolate()->counters();
   3107   __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
   3108   __ Ret(USE_DELAY_SLOT);
   3109   __ mov(v0, t0);
   3110 
   3111   // Return the generated code.
   3112   return GetICCode(kind(), Code::NORMAL, name);
   3113 }
   3114 
   3115 
   3116 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
   3117     MapHandleList* receiver_maps,
   3118     CodeHandleList* handlers,
   3119     Handle<Name> name,
   3120     Code::StubType type,
   3121     IcCheckType check) {
   3122   Label miss;
   3123 
   3124   if (check == PROPERTY) {
   3125     GenerateNameCheck(name, this->name(), &miss);
   3126   }
   3127 
   3128   __ JumpIfSmi(receiver(), &miss);
   3129   Register map_reg = scratch1();
   3130 
   3131   int receiver_count = receiver_maps->length();
   3132   int number_of_handled_maps = 0;
   3133   __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
   3134   for (int current = 0; current < receiver_count; ++current) {
   3135     Handle<Map> map = receiver_maps->at(current);
   3136     if (!map->is_deprecated()) {
   3137       number_of_handled_maps++;
   3138       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
   3139           eq, map_reg, Operand(receiver_maps->at(current)));
   3140     }
   3141   }
   3142   ASSERT(number_of_handled_maps != 0);
   3143 
   3144   __ bind(&miss);
   3145   TailCallBuiltin(masm(), MissBuiltin(kind()));
   3146 
   3147   // Return the generated code.
   3148   InlineCacheState state =
   3149       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   3150   return GetICCode(kind(), type, name, state);
   3151 }
   3152 
   3153 
   3154 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
   3155     MapHandleList* receiver_maps,
   3156     CodeHandleList* handler_stubs,
   3157     MapHandleList* transitioned_maps) {
   3158   Label miss;
   3159   __ JumpIfSmi(receiver(), &miss);
   3160 
   3161   int receiver_count = receiver_maps->length();
   3162   __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
   3163   for (int i = 0; i < receiver_count; ++i) {
   3164     if (transitioned_maps->at(i).is_null()) {
   3165       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
   3166           scratch1(), Operand(receiver_maps->at(i)));
   3167     } else {
   3168       Label next_map;
   3169       __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i)));
   3170       __ li(transition_map(), Operand(transitioned_maps->at(i)));
   3171       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
   3172       __ bind(&next_map);
   3173     }
   3174   }
   3175 
   3176   __ bind(&miss);
   3177   TailCallBuiltin(masm(), MissBuiltin(kind()));
   3178 
   3179   // Return the generated code.
   3180   return GetICCode(
   3181       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
   3182 }
   3183 
   3184 
   3185 #undef __
   3186 #define __ ACCESS_MASM(masm)
   3187 
   3188 
   3189 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
   3190     MacroAssembler* masm) {
   3191   // ---------- S t a t e --------------
   3192   //  -- ra     : return address
   3193   //  -- a0     : key
   3194   //  -- a1     : receiver
   3195   // -----------------------------------
   3196   Label slow, miss_force_generic;
   3197 
   3198   Register key = a0;
   3199   Register receiver = a1;
   3200 
   3201   __ JumpIfNotSmi(key, &miss_force_generic);
   3202   __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
   3203   __ sra(a2, a0, kSmiTagSize);
   3204   __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
   3205   __ Ret();
   3206 
   3207   // Slow case, key and receiver still in a0 and a1.
   3208   __ bind(&slow);
   3209   __ IncrementCounter(
   3210       masm->isolate()->counters()->keyed_load_external_array_slow(),
   3211       1, a2, a3);
   3212   // Entry registers are intact.
   3213   // ---------- S t a t e --------------
   3214   //  -- ra     : return address
   3215   //  -- a0     : key
   3216   //  -- a1     : receiver
   3217   // -----------------------------------
   3218   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
   3219 
   3220   // Miss case, call the runtime.
   3221   __ bind(&miss_force_generic);
   3222 
   3223   // ---------- S t a t e --------------
   3224   //  -- ra     : return address
   3225   //  -- a0     : key
   3226   //  -- a1     : receiver
   3227   // -----------------------------------
   3228   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric);
   3229 }
   3230 
   3231 
   3232 static void GenerateSmiKeyCheck(MacroAssembler* masm,
   3233                                 Register key,
   3234                                 Register scratch0,
   3235                                 Register scratch1,
   3236                                 FPURegister double_scratch0,
   3237                                 FPURegister double_scratch1,
   3238                                 Label* fail) {
   3239   Label key_ok;
   3240   // Check for smi or a smi inside a heap number.  We convert the heap
   3241   // number and check if the conversion is exact and fits into the smi
   3242   // range.
   3243   __ JumpIfSmi(key, &key_ok);
   3244   __ CheckMap(key,
   3245               scratch0,
   3246               Heap::kHeapNumberMapRootIndex,
   3247               fail,
   3248               DONT_DO_SMI_CHECK);
   3249   __ ldc1(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
   3250   __ EmitFPUTruncate(kRoundToZero,
   3251                      scratch0,
   3252                      double_scratch0,
   3253                      at,
   3254                      double_scratch1,
   3255                      scratch1,
   3256                      kCheckForInexactConversion);
   3257 
   3258   __ Branch(fail, ne, scratch1, Operand(zero_reg));
   3259 
   3260   __ SmiTagCheckOverflow(key, scratch0, scratch1);
   3261   __ BranchOnOverflow(fail, scratch1);
   3262   __ bind(&key_ok);
   3263 }
   3264 
   3265 
   3266 void KeyedStoreStubCompiler::GenerateStoreExternalArray(
   3267     MacroAssembler* masm,
   3268     ElementsKind elements_kind) {
   3269   // ---------- S t a t e --------------
   3270   //  -- a0     : value
   3271   //  -- a1     : key
   3272   //  -- a2     : receiver
   3273   //  -- ra     : return address
   3274   // -----------------------------------
   3275 
   3276   Label slow, check_heap_number, miss_force_generic;
   3277 
   3278   // Register usage.
   3279   Register value = a0;
   3280   Register key = a1;
   3281   Register receiver = a2;
   3282   // a3 mostly holds the elements array or the destination external array.
   3283 
   3284   // This stub is meant to be tail-jumped to, the receiver must already
   3285   // have been verified by the caller to not be a smi.
   3286 
   3287   // Check that the key is a smi or a heap number convertible to a smi.
   3288   GenerateSmiKeyCheck(masm, key, t0, t1, f2, f4, &miss_force_generic);
   3289 
   3290   __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
   3291 
   3292   // Check that the index is in range.
   3293   __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
   3294   // Unsigned comparison catches both negative and too-large values.
   3295   __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
   3296 
   3297   // Handle both smis and HeapNumbers in the fast path. Go to the
   3298   // runtime for all other kinds of values.
   3299   // a3: external array.
   3300 
   3301   if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
   3302     // Double to pixel conversion is only implemented in the runtime for now.
   3303     __ JumpIfNotSmi(value, &slow);
   3304   } else {
   3305     __ JumpIfNotSmi(value, &check_heap_number);
   3306   }
   3307   __ SmiUntag(t1, value);
   3308   __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
   3309 
   3310   // a3: base pointer of external storage.
   3311   // t1: value (integer).
   3312 
   3313   switch (elements_kind) {
   3314     case EXTERNAL_PIXEL_ELEMENTS: {
   3315       // Clamp the value to [0..255].
   3316       // v0 is used as a scratch register here.
   3317       Label done;
   3318       __ li(v0, Operand(255));
   3319       // Normal branch: nop in delay slot.
   3320       __ Branch(&done, gt, t1, Operand(v0));
   3321       // Use delay slot in this branch.
   3322       __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
   3323       __ mov(v0, zero_reg);  // In delay slot.
   3324       __ mov(v0, t1);  // Value is in range 0..255.
   3325       __ bind(&done);
   3326       __ mov(t1, v0);
   3327 
   3328       __ srl(t8, key, 1);
   3329       __ addu(t8, a3, t8);
   3330       __ sb(t1, MemOperand(t8, 0));
   3331       }
   3332       break;
   3333     case EXTERNAL_BYTE_ELEMENTS:
   3334     case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3335       __ srl(t8, key, 1);
   3336       __ addu(t8, a3, t8);
   3337       __ sb(t1, MemOperand(t8, 0));
   3338       break;
   3339     case EXTERNAL_SHORT_ELEMENTS:
   3340     case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3341       __ addu(t8, a3, key);
   3342       __ sh(t1, MemOperand(t8, 0));
   3343       break;
   3344     case EXTERNAL_INT_ELEMENTS:
   3345     case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3346       __ sll(t8, key, 1);
   3347       __ addu(t8, a3, t8);
   3348       __ sw(t1, MemOperand(t8, 0));
   3349       break;
   3350     case EXTERNAL_FLOAT_ELEMENTS:
   3351       // Perform int-to-float conversion and store to memory.
   3352       __ SmiUntag(t0, key);
   3353       StoreIntAsFloat(masm, a3, t0, t1, t2);
   3354       break;
   3355     case EXTERNAL_DOUBLE_ELEMENTS:
   3356       __ sll(t8, key, 2);
   3357       __ addu(a3, a3, t8);
   3358       // a3: effective address of the double element
   3359       FloatingPointHelper::Destination destination;
   3360       destination = FloatingPointHelper::kFPURegisters;
   3361       FloatingPointHelper::ConvertIntToDouble(
   3362           masm, t1, destination,
   3363           f0, t2, t3,  // These are: double_dst, dst_mantissa, dst_exponent.
   3364           t0, f2);  // These are: scratch2, single_scratch.
   3365       __ sdc1(f0, MemOperand(a3, 0));
   3366       break;
   3367     case FAST_ELEMENTS:
   3368     case FAST_SMI_ELEMENTS:
   3369     case FAST_DOUBLE_ELEMENTS:
   3370     case FAST_HOLEY_ELEMENTS:
   3371     case FAST_HOLEY_SMI_ELEMENTS:
   3372     case FAST_HOLEY_DOUBLE_ELEMENTS:
   3373     case DICTIONARY_ELEMENTS:
   3374     case NON_STRICT_ARGUMENTS_ELEMENTS:
   3375       UNREACHABLE();
   3376       break;
   3377   }
   3378 
   3379   // Entry registers are intact, a0 holds the value which is the return value.
   3380   __ Ret(USE_DELAY_SLOT);
   3381   __ mov(v0, a0);
   3382 
   3383   if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
   3384     // a3: external array.
   3385     __ bind(&check_heap_number);
   3386     __ GetObjectType(value, t1, t2);
   3387     __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
   3388 
   3389     __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
   3390 
   3391     // a3: base pointer of external storage.
   3392 
   3393     // The WebGL specification leaves the behavior of storing NaN and
   3394     // +/-Infinity into integer arrays basically undefined. For more
   3395     // reproducible behavior, convert these to zero.
   3396 
   3397 
   3398     __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
   3399 
   3400     if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
   3401       __ cvt_s_d(f0, f0);
   3402       __ sll(t8, key, 1);
   3403       __ addu(t8, a3, t8);
   3404       __ swc1(f0, MemOperand(t8, 0));
   3405     } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
   3406       __ sll(t8, key, 2);
   3407       __ addu(t8, a3, t8);
   3408       __ sdc1(f0, MemOperand(t8, 0));
   3409     } else {
   3410       __ EmitECMATruncate(t3, f0, f2, t2, t1, t5);
   3411 
   3412       switch (elements_kind) {
   3413         case EXTERNAL_BYTE_ELEMENTS:
   3414         case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3415           __ srl(t8, key, 1);
   3416           __ addu(t8, a3, t8);
   3417           __ sb(t3, MemOperand(t8, 0));
   3418           break;
   3419         case EXTERNAL_SHORT_ELEMENTS:
   3420         case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3421           __ addu(t8, a3, key);
   3422           __ sh(t3, MemOperand(t8, 0));
   3423           break;
   3424         case EXTERNAL_INT_ELEMENTS:
   3425         case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3426           __ sll(t8, key, 1);
   3427           __ addu(t8, a3, t8);
   3428           __ sw(t3, MemOperand(t8, 0));
   3429           break;
   3430         case EXTERNAL_PIXEL_ELEMENTS:
   3431         case EXTERNAL_FLOAT_ELEMENTS:
   3432         case EXTERNAL_DOUBLE_ELEMENTS:
   3433         case FAST_ELEMENTS:
   3434         case FAST_SMI_ELEMENTS:
   3435         case FAST_DOUBLE_ELEMENTS:
   3436         case FAST_HOLEY_ELEMENTS:
   3437         case FAST_HOLEY_SMI_ELEMENTS:
   3438         case FAST_HOLEY_DOUBLE_ELEMENTS:
   3439         case DICTIONARY_ELEMENTS:
   3440         case NON_STRICT_ARGUMENTS_ELEMENTS:
   3441           UNREACHABLE();
   3442           break;
   3443       }
   3444     }
   3445 
   3446     // Entry registers are intact, a0 holds the value
   3447     // which is the return value.
   3448     __ Ret(USE_DELAY_SLOT);
   3449     __ mov(v0, a0);
   3450   }
   3451 
   3452   // Slow case, key and receiver still in a0 and a1.
   3453   __ bind(&slow);
   3454   __ IncrementCounter(
   3455       masm->isolate()->counters()->keyed_load_external_array_slow(),
   3456       1, a2, a3);
   3457   // Entry registers are intact.
   3458   // ---------- S t a t e --------------
   3459   //  -- ra     : return address
   3460   //  -- a0     : key
   3461   //  -- a1     : receiver
   3462   // -----------------------------------
   3463   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
   3464 
   3465   // Miss case, call the runtime.
   3466   __ bind(&miss_force_generic);
   3467 
   3468   // ---------- S t a t e --------------
   3469   //  -- ra     : return address
   3470   //  -- a0     : key
   3471   //  -- a1     : receiver
   3472   // -----------------------------------
   3473   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_MissForceGeneric);
   3474 }
   3475 
   3476 
   3477 void KeyedStoreStubCompiler::GenerateStoreFastElement(
   3478     MacroAssembler* masm,
   3479     bool is_js_array,
   3480     ElementsKind elements_kind,
   3481     KeyedAccessStoreMode store_mode) {
   3482   // ----------- S t a t e -------------
   3483   //  -- a0    : value
   3484   //  -- a1    : key
   3485   //  -- a2    : receiver
   3486   //  -- ra    : return address
   3487   //  -- a3    : scratch
   3488   //  -- a4    : scratch (elements)
   3489   // -----------------------------------
   3490   Label miss_force_generic, transition_elements_kind, grow, slow;
   3491   Label finish_store, check_capacity;
   3492 
   3493   Register value_reg = a0;
   3494   Register key_reg = a1;
   3495   Register receiver_reg = a2;
   3496   Register scratch = t0;
   3497   Register elements_reg = a3;
   3498   Register length_reg = t1;
   3499   Register scratch2 = t2;
   3500 
   3501   // This stub is meant to be tail-jumped to, the receiver must already
   3502   // have been verified by the caller to not be a smi.
   3503 
   3504   // Check that the key is a smi or a heap number convertible to a smi.
   3505   GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, f4, &miss_force_generic);
   3506 
   3507   if (IsFastSmiElementsKind(elements_kind)) {
   3508     __ JumpIfNotSmi(value_reg, &transition_elements_kind);
   3509   }
   3510 
   3511   // Check that the key is within bounds.
   3512   __ lw(elements_reg,
   3513         FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3514   if (is_js_array) {
   3515     __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3516   } else {
   3517     __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3518   }
   3519   // Compare smis.
   3520   if (is_js_array && IsGrowStoreMode(store_mode)) {
   3521     __ Branch(&grow, hs, key_reg, Operand(scratch));
   3522   } else {
   3523     __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
   3524   }
   3525 
   3526   // Make sure elements is a fast element array, not 'cow'.
   3527   __ CheckMap(elements_reg,
   3528               scratch,
   3529               Heap::kFixedArrayMapRootIndex,
   3530               &miss_force_generic,
   3531               DONT_DO_SMI_CHECK);
   3532 
   3533   __ bind(&finish_store);
   3534 
   3535   if (IsFastSmiElementsKind(elements_kind)) {
   3536     __ Addu(scratch,
   3537             elements_reg,
   3538             Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3539     STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   3540     __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
   3541     __ Addu(scratch, scratch, scratch2);
   3542     __ sw(value_reg, MemOperand(scratch));
   3543   } else {
   3544     ASSERT(IsFastObjectElementsKind(elements_kind));
   3545     __ Addu(scratch,
   3546             elements_reg,
   3547             Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3548     STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   3549     __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
   3550     __ Addu(scratch, scratch, scratch2);
   3551     __ sw(value_reg, MemOperand(scratch));
   3552     __ mov(receiver_reg, value_reg);
   3553     __ RecordWrite(elements_reg,  // Object.
   3554                    scratch,       // Address.
   3555                    receiver_reg,  // Value.
   3556                    kRAHasNotBeenSaved,
   3557                    kDontSaveFPRegs);
   3558   }
   3559   // value_reg (a0) is preserved.
   3560   // Done.
   3561   __ Ret();
   3562 
   3563   __ bind(&miss_force_generic);
   3564   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_MissForceGeneric);
   3565 
   3566   __ bind(&transition_elements_kind);
   3567   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Miss);
   3568 
   3569   if (is_js_array && IsGrowStoreMode(store_mode)) {
   3570     // Grow the array by a single element if possible.
   3571     __ bind(&grow);
   3572 
   3573     // Make sure the array is only growing by a single element, anything else
   3574     // must be handled by the runtime.
   3575     __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch));
   3576 
   3577     // Check for the empty array, and preallocate a small backing store if
   3578     // possible.
   3579     __ lw(length_reg,
   3580           FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3581     __ lw(elements_reg,
   3582           FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3583     __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
   3584     __ Branch(&check_capacity, ne, elements_reg, Operand(at));
   3585 
   3586     int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
   3587     __ Allocate(size, elements_reg, scratch, scratch2, &slow, TAG_OBJECT);
   3588 
   3589     __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
   3590     __ sw(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
   3591     __ li(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
   3592     __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3593     __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
   3594     for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
   3595       __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
   3596     }
   3597 
   3598     // Store the element at index zero.
   3599     __ sw(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
   3600 
   3601     // Install the new backing store in the JSArray.
   3602     __ sw(elements_reg,
   3603           FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3604     __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
   3605                         scratch, kRAHasNotBeenSaved, kDontSaveFPRegs,
   3606                         EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   3607 
   3608     // Increment the length of the array.
   3609     __ li(length_reg, Operand(Smi::FromInt(1)));
   3610     __ Ret(USE_DELAY_SLOT);
   3611     __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3612 
   3613     __ bind(&check_capacity);
   3614     // Check for cow elements, in general they are not handled by this stub
   3615     __ CheckMap(elements_reg,
   3616                 scratch,
   3617                 Heap::kFixedCOWArrayMapRootIndex,
   3618                 &miss_force_generic,
   3619                 DONT_DO_SMI_CHECK);
   3620 
   3621     __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3622     __ Branch(&slow, hs, length_reg, Operand(scratch));
   3623 
   3624     // Grow the array and finish the store.
   3625     __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
   3626     __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3627     __ jmp(&finish_store);
   3628 
   3629     __ bind(&slow);
   3630     TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
   3631   }
   3632 }
   3633 
   3634 
   3635 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
   3636     MacroAssembler* masm,
   3637     bool is_js_array,
   3638     KeyedAccessStoreMode store_mode) {
   3639   // ----------- S t a t e -------------
   3640   //  -- a0    : value
   3641   //  -- a1    : key
   3642   //  -- a2    : receiver
   3643   //  -- ra    : return address
   3644   //  -- a3    : scratch (elements backing store)
   3645   //  -- t0    : scratch (elements_reg)
   3646   //  -- t1    : scratch (mantissa_reg)
   3647   //  -- t2    : scratch (exponent_reg)
   3648   //  -- t3    : scratch4
   3649   //  -- t4    : scratch
   3650   // -----------------------------------
   3651   Label miss_force_generic, transition_elements_kind, grow, slow;
   3652   Label finish_store, check_capacity;
   3653 
   3654   Register value_reg = a0;
   3655   Register key_reg = a1;
   3656   Register receiver_reg = a2;
   3657   Register elements_reg = a3;
   3658   Register scratch1 = t0;
   3659   Register scratch2 = t1;
   3660   Register scratch3 = t2;
   3661   Register scratch4 = t3;
   3662   Register scratch5 = t4;
   3663   Register length_reg = t3;
   3664 
   3665   // This stub is meant to be tail-jumped to, the receiver must already
   3666   // have been verified by the caller to not be a smi.
   3667 
   3668   // Check that the key is a smi or a heap number convertible to a smi.
   3669   GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, f4, &miss_force_generic);
   3670 
   3671   __ lw(elements_reg,
   3672          FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3673 
   3674   // Check that the key is within bounds.
   3675   if (is_js_array) {
   3676     __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3677   } else {
   3678     __ lw(scratch1,
   3679           FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
   3680   }
   3681   // Compare smis, unsigned compare catches both negative and out-of-bound
   3682   // indexes.
   3683   if (IsGrowStoreMode(store_mode)) {
   3684     __ Branch(&grow, hs, key_reg, Operand(scratch1));
   3685   } else {
   3686     __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1));
   3687   }
   3688 
   3689   __ bind(&finish_store);
   3690 
   3691   __ StoreNumberToDoubleElements(value_reg,
   3692                                  key_reg,
   3693                                  // All registers after this are overwritten.
   3694                                  elements_reg,
   3695                                  scratch1,
   3696                                  scratch2,
   3697                                  scratch3,
   3698                                  scratch4,
   3699                                  &transition_elements_kind);
   3700 
   3701   __ Ret(USE_DELAY_SLOT);
   3702   __ mov(v0, value_reg);  // In delay slot.
   3703 
   3704   // Handle store cache miss, replacing the ic with the generic stub.
   3705   __ bind(&miss_force_generic);
   3706   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_MissForceGeneric);
   3707 
   3708   __ bind(&transition_elements_kind);
   3709   TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Miss);
   3710 
   3711   if (is_js_array && IsGrowStoreMode(store_mode)) {
   3712     // Grow the array by a single element if possible.
   3713     __ bind(&grow);
   3714 
   3715     // Make sure the array is only growing by a single element, anything else
   3716     // must be handled by the runtime.
   3717     __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch1));
   3718 
   3719     // Transition on values that can't be stored in a FixedDoubleArray.
   3720     Label value_is_smi;
   3721     __ JumpIfSmi(value_reg, &value_is_smi);
   3722     __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
   3723     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
   3724     __ Branch(&transition_elements_kind, ne, scratch1, Operand(at));
   3725     __ bind(&value_is_smi);
   3726 
   3727     // Check for the empty array, and preallocate a small backing store if
   3728     // possible.
   3729     __ lw(length_reg,
   3730           FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3731     __ lw(elements_reg,
   3732           FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3733     __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
   3734     __ Branch(&check_capacity, ne, elements_reg, Operand(at));
   3735 
   3736     int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
   3737     __ Allocate(size, elements_reg, scratch1, scratch2, &slow, TAG_OBJECT);
   3738 
   3739     // Initialize the new FixedDoubleArray.
   3740     __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
   3741     __ sw(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
   3742     __ li(scratch1, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
   3743     __ sw(scratch1,
   3744           FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
   3745 
   3746     __ mov(scratch1, elements_reg);
   3747     __ StoreNumberToDoubleElements(value_reg,
   3748                                    key_reg,
   3749                                    // All registers after this are overwritten.
   3750                                    scratch1,
   3751                                    scratch2,
   3752                                    scratch3,
   3753                                    scratch4,
   3754                                    scratch5,
   3755                                    &transition_elements_kind);
   3756 
   3757     __ li(scratch1, Operand(kHoleNanLower32));
   3758     __ li(scratch2, Operand(kHoleNanUpper32));
   3759     for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) {
   3760       int offset = FixedDoubleArray::OffsetOfElementAt(i);
   3761       __ sw(scratch1, FieldMemOperand(elements_reg, offset));
   3762       __ sw(scratch2, FieldMemOperand(elements_reg, offset + kPointerSize));
   3763     }
   3764 
   3765     // Install the new backing store in the JSArray.
   3766     __ sw(elements_reg,
   3767           FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3768     __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
   3769                         scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs,
   3770                         EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   3771 
   3772     // Increment the length of the array.
   3773     __ li(length_reg, Operand(Smi::FromInt(1)));
   3774     __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3775     __ Ret(USE_DELAY_SLOT);
   3776     __ lw(elements_reg,
   3777           FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   3778 
   3779     __ bind(&check_capacity);
   3780     // Make sure that the backing store can hold additional elements.
   3781     __ lw(scratch1,
   3782           FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
   3783     __ Branch(&slow, hs, length_reg, Operand(scratch1));
   3784 
   3785     // Grow the array and finish the store.
   3786     __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
   3787     __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
   3788     __ jmp(&finish_store);
   3789 
   3790     __ bind(&slow);
   3791     TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
   3792   }
   3793 }
   3794 
   3795 
   3796 #undef __
   3797 
   3798 } }  // namespace v8::internal
   3799 
   3800 #endif  // V8_TARGET_ARCH_MIPS
   3801