Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_IA32)
     31 
     32 #include "ic-inl.h"
     33 #include "codegen.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 #define __ ACCESS_MASM(masm)
     40 
     41 
     42 static void ProbeTable(Isolate* isolate,
     43                        MacroAssembler* masm,
     44                        Code::Flags flags,
     45                        StubCache::Table table,
     46                        Register name,
     47                        Register receiver,
     48                        // Number of the cache entry pointer-size scaled.
     49                        Register offset,
     50                        Register extra) {
     51   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     52   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     53   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
     54 
     55   Label miss;
     56 
     57   // Multiply by 3 because there are 3 fields per entry (name, code, map).
     58   __ lea(offset, Operand(offset, offset, times_2, 0));
     59 
     60   if (extra.is_valid()) {
     61     // Get the code entry from the cache.
     62     __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
     63 
     64     // Check that the key in the entry matches the name.
     65     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
     66     __ j(not_equal, &miss);
     67 
     68     // Check the map matches.
     69     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
     70     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
     71     __ j(not_equal, &miss);
     72 
     73     // Check that the flags match what we're looking for.
     74     __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
     75     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
     76     __ cmp(offset, flags);
     77     __ j(not_equal, &miss);
     78 
     79 #ifdef DEBUG
     80     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
     81       __ jmp(&miss);
     82     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
     83       __ jmp(&miss);
     84     }
     85 #endif
     86 
     87     // Jump to the first instruction in the code stub.
     88     __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
     89     __ jmp(extra);
     90 
     91     __ bind(&miss);
     92   } else {
     93     // Save the offset on the stack.
     94     __ push(offset);
     95 
     96     // Check that the key in the entry matches the name.
     97     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
     98     __ j(not_equal, &miss);
     99 
    100     // Check the map matches.
    101     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
    102     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
    103     __ j(not_equal, &miss);
    104 
    105     // Restore offset register.
    106     __ mov(offset, Operand(esp, 0));
    107 
    108     // Get the code entry from the cache.
    109     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
    110 
    111     // Check that the flags match what we're looking for.
    112     __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
    113     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
    114     __ cmp(offset, flags);
    115     __ j(not_equal, &miss);
    116 
    117 #ifdef DEBUG
    118     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
    119       __ jmp(&miss);
    120     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
    121       __ jmp(&miss);
    122     }
    123 #endif
    124 
    125     // Restore offset and re-load code entry from cache.
    126     __ pop(offset);
    127     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
    128 
    129     // Jump to the first instruction in the code stub.
    130     __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
    131     __ jmp(offset);
    132 
    133     // Pop at miss.
    134     __ bind(&miss);
    135     __ pop(offset);
    136   }
    137 }
    138 
    139 
    140 // Helper function used to check that the dictionary doesn't contain
    141 // the property. This function may return false negatives, so miss_label
    142 // must always call a backup property check that is complete.
    143 // This function is safe to call if the receiver has fast properties.
    144 // Name must be a symbol and receiver must be a heap object.
    145 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
    146                                              Label* miss_label,
    147                                              Register receiver,
    148                                              Handle<String> name,
    149                                              Register r0,
    150                                              Register r1) {
    151   ASSERT(name->IsSymbol());
    152   Counters* counters = masm->isolate()->counters();
    153   __ IncrementCounter(counters->negative_lookups(), 1);
    154   __ IncrementCounter(counters->negative_lookups_miss(), 1);
    155 
    156   __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
    157 
    158   const int kInterceptorOrAccessCheckNeededMask =
    159       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    160 
    161   // Bail out if the receiver has a named interceptor or requires access checks.
    162   __ test_b(FieldOperand(r0, Map::kBitFieldOffset),
    163             kInterceptorOrAccessCheckNeededMask);
    164   __ j(not_zero, miss_label);
    165 
    166   // Check that receiver is a JSObject.
    167   __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
    168   __ j(below, miss_label);
    169 
    170   // Load properties array.
    171   Register properties = r0;
    172   __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
    173 
    174   // Check that the properties array is a dictionary.
    175   __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
    176          Immediate(masm->isolate()->factory()->hash_table_map()));
    177   __ j(not_equal, miss_label);
    178 
    179   Label done;
    180   StringDictionaryLookupStub::GenerateNegativeLookup(masm,
    181                                                      miss_label,
    182                                                      &done,
    183                                                      properties,
    184                                                      name,
    185                                                      r1);
    186   __ bind(&done);
    187   __ DecrementCounter(counters->negative_lookups_miss(), 1);
    188 }
    189 
    190 
    191 void StubCache::GenerateProbe(MacroAssembler* masm,
    192                               Code::Flags flags,
    193                               Register receiver,
    194                               Register name,
    195                               Register scratch,
    196                               Register extra,
    197                               Register extra2,
    198                               Register extra3) {
    199   Label miss;
    200 
    201   // Assert that code is valid.  The multiplying code relies on the entry size
    202   // being 12.
    203   ASSERT(sizeof(Entry) == 12);
    204 
    205   // Assert the flags do not name a specific type.
    206   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    207 
    208   // Assert that there are no register conflicts.
    209   ASSERT(!scratch.is(receiver));
    210   ASSERT(!scratch.is(name));
    211   ASSERT(!extra.is(receiver));
    212   ASSERT(!extra.is(name));
    213   ASSERT(!extra.is(scratch));
    214 
    215   // Assert scratch and extra registers are valid, and extra2/3 are unused.
    216   ASSERT(!scratch.is(no_reg));
    217   ASSERT(extra2.is(no_reg));
    218   ASSERT(extra3.is(no_reg));
    219 
    220   Register offset = scratch;
    221   scratch = no_reg;
    222 
    223   Counters* counters = masm->isolate()->counters();
    224   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
    225 
    226   // Check that the receiver isn't a smi.
    227   __ JumpIfSmi(receiver, &miss);
    228 
    229   // Get the map of the receiver and compute the hash.
    230   __ mov(offset, FieldOperand(name, String::kHashFieldOffset));
    231   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
    232   __ xor_(offset, flags);
    233   // We mask out the last two bits because they are not part of the hash and
    234   // they are always 01 for maps.  Also in the two 'and' instructions below.
    235   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
    236   // ProbeTable expects the offset to be pointer scaled, which it is, because
    237   // the heap object tag size is 2 and the pointer size log 2 is also 2.
    238   ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
    239 
    240   // Probe the primary table.
    241   ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
    242 
    243   // Primary miss: Compute hash for secondary probe.
    244   __ mov(offset, FieldOperand(name, String::kHashFieldOffset));
    245   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
    246   __ xor_(offset, flags);
    247   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
    248   __ sub(offset, name);
    249   __ add(offset, Immediate(flags));
    250   __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
    251 
    252   // Probe the secondary table.
    253   ProbeTable(
    254       isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
    255 
    256   // Cache miss: Fall-through and let caller handle the miss by
    257   // entering the runtime system.
    258   __ bind(&miss);
    259   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
    260 }
    261 
    262 
    263 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    264                                                        int index,
    265                                                        Register prototype) {
    266   __ LoadGlobalFunction(index, prototype);
    267   __ LoadGlobalFunctionInitialMap(prototype, prototype);
    268   // Load the prototype from the initial map.
    269   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
    270 }
    271 
    272 
    273 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    274     MacroAssembler* masm,
    275     int index,
    276     Register prototype,
    277     Label* miss) {
    278   // Check we're still in the same context.
    279   __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)),
    280          masm->isolate()->global());
    281   __ j(not_equal, miss);
    282   // Get the global function with the given index.
    283   Handle<JSFunction> function(
    284       JSFunction::cast(masm->isolate()->global_context()->get(index)));
    285   // Load its initial map. The global functions all have initial maps.
    286   __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
    287   // Load the prototype from the initial map.
    288   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
    289 }
    290 
    291 
    292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    293                                            Register receiver,
    294                                            Register scratch,
    295                                            Label* miss_label) {
    296   // Check that the receiver isn't a smi.
    297   __ JumpIfSmi(receiver, miss_label);
    298 
    299   // Check that the object is a JS array.
    300   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
    301   __ j(not_equal, miss_label);
    302 
    303   // Load length directly from the JS array.
    304   __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
    305   __ ret(0);
    306 }
    307 
    308 
    309 // Generate code to check if an object is a string.  If the object is
    310 // a string, the map's instance type is left in the scratch register.
    311 static void GenerateStringCheck(MacroAssembler* masm,
    312                                 Register receiver,
    313                                 Register scratch,
    314                                 Label* smi,
    315                                 Label* non_string_object) {
    316   // Check that the object isn't a smi.
    317   __ JumpIfSmi(receiver, smi);
    318 
    319   // Check that the object is a string.
    320   __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
    321   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
    322   STATIC_ASSERT(kNotStringTag != 0);
    323   __ test(scratch, Immediate(kNotStringTag));
    324   __ j(not_zero, non_string_object);
    325 }
    326 
    327 
    328 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
    329                                             Register receiver,
    330                                             Register scratch1,
    331                                             Register scratch2,
    332                                             Label* miss,
    333                                             bool support_wrappers) {
    334   Label check_wrapper;
    335 
    336   // Check if the object is a string leaving the instance type in the
    337   // scratch register.
    338   GenerateStringCheck(masm, receiver, scratch1, miss,
    339                       support_wrappers ? &check_wrapper : miss);
    340 
    341   // Load length from the string and convert to a smi.
    342   __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
    343   __ ret(0);
    344 
    345   if (support_wrappers) {
    346     // Check if the object is a JSValue wrapper.
    347     __ bind(&check_wrapper);
    348     __ cmp(scratch1, JS_VALUE_TYPE);
    349     __ j(not_equal, miss);
    350 
    351     // Check if the wrapped value is a string and load the length
    352     // directly if it is.
    353     __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
    354     GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
    355     __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
    356     __ ret(0);
    357   }
    358 }
    359 
    360 
    361 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    362                                                  Register receiver,
    363                                                  Register scratch1,
    364                                                  Register scratch2,
    365                                                  Label* miss_label) {
    366   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    367   __ mov(eax, scratch1);
    368   __ ret(0);
    369 }
    370 
    371 
    372 // Load a fast property out of a holder object (src). In-object properties
    373 // are loaded directly otherwise the property is loaded from the properties
    374 // fixed array.
    375 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    376                                             Register dst,
    377                                             Register src,
    378                                             Handle<JSObject> holder,
    379                                             int index) {
    380   // Adjust for the number of properties stored in the holder.
    381   index -= holder->map()->inobject_properties();
    382   if (index < 0) {
    383     // Get the property straight out of the holder.
    384     int offset = holder->map()->instance_size() + (index * kPointerSize);
    385     __ mov(dst, FieldOperand(src, offset));
    386   } else {
    387     // Calculate the offset into the properties array.
    388     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    389     __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
    390     __ mov(dst, FieldOperand(dst, offset));
    391   }
    392 }
    393 
    394 
    395 static void PushInterceptorArguments(MacroAssembler* masm,
    396                                      Register receiver,
    397                                      Register holder,
    398                                      Register name,
    399                                      Handle<JSObject> holder_obj) {
    400   __ push(name);
    401   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
    402   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
    403   Register scratch = name;
    404   __ mov(scratch, Immediate(interceptor));
    405   __ push(scratch);
    406   __ push(receiver);
    407   __ push(holder);
    408   __ push(FieldOperand(scratch, InterceptorInfo::kDataOffset));
    409 }
    410 
    411 
    412 static void CompileCallLoadPropertyWithInterceptor(
    413     MacroAssembler* masm,
    414     Register receiver,
    415     Register holder,
    416     Register name,
    417     Handle<JSObject> holder_obj) {
    418   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    419   __ CallExternalReference(
    420       ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
    421                         masm->isolate()),
    422       5);
    423 }
    424 
    425 
    426 // Number of pointers to be reserved on stack for fast API call.
    427 static const int kFastApiCallArguments = 3;
    428 
    429 
    430 // Reserves space for the extra arguments to API function in the
    431 // caller's frame.
    432 //
    433 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
    434 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
    435   // ----------- S t a t e -------------
    436   //  -- esp[0] : return address
    437   //  -- esp[4] : last argument in the internal frame of the caller
    438   // -----------------------------------
    439   __ pop(scratch);
    440   for (int i = 0; i < kFastApiCallArguments; i++) {
    441     __ push(Immediate(Smi::FromInt(0)));
    442   }
    443   __ push(scratch);
    444 }
    445 
    446 
    447 // Undoes the effects of ReserveSpaceForFastApiCall.
    448 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
    449   // ----------- S t a t e -------------
    450   //  -- esp[0]  : return address.
    451   //  -- esp[4]  : last fast api call extra argument.
    452   //  -- ...
    453   //  -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
    454   //  -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
    455   //                                          frame.
    456   // -----------------------------------
    457   __ pop(scratch);
    458   __ add(esp, Immediate(kPointerSize * kFastApiCallArguments));
    459   __ push(scratch);
    460 }
    461 
    462 
    463 // Generates call to API function.
    464 static void GenerateFastApiCall(MacroAssembler* masm,
    465                                 const CallOptimization& optimization,
    466                                 int argc) {
    467   // ----------- S t a t e -------------
    468   //  -- esp[0]              : return address
    469   //  -- esp[4]              : object passing the type check
    470   //                           (last fast api call extra argument,
    471   //                            set by CheckPrototypes)
    472   //  -- esp[8]              : api function
    473   //                           (first fast api call extra argument)
    474   //  -- esp[12]             : api call data
    475   //  -- esp[16]             : last argument
    476   //  -- ...
    477   //  -- esp[(argc + 3) * 4] : first argument
    478   //  -- esp[(argc + 4) * 4] : receiver
    479   // -----------------------------------
    480   // Get the function and setup the context.
    481   Handle<JSFunction> function = optimization.constant_function();
    482   __ LoadHeapObject(edi, function);
    483   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
    484 
    485   // Pass the additional arguments.
    486   __ mov(Operand(esp, 2 * kPointerSize), edi);
    487   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    488   Handle<Object> call_data(api_call_info->data());
    489   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
    490     __ mov(ecx, api_call_info);
    491     __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
    492     __ mov(Operand(esp, 3 * kPointerSize), ebx);
    493   } else {
    494     __ mov(Operand(esp, 3 * kPointerSize), Immediate(call_data));
    495   }
    496 
    497   // Prepare arguments.
    498   __ lea(eax, Operand(esp, 3 * kPointerSize));
    499 
    500   const int kApiArgc = 1;  // API function gets reference to the v8::Arguments.
    501 
    502   // Allocate the v8::Arguments structure in the arguments' space since
    503   // it's not controlled by GC.
    504   const int kApiStackSpace = 4;
    505 
    506   __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
    507 
    508   __ mov(ApiParameterOperand(1), eax);  // v8::Arguments::implicit_args_.
    509   __ add(eax, Immediate(argc * kPointerSize));
    510   __ mov(ApiParameterOperand(2), eax);  // v8::Arguments::values_.
    511   __ Set(ApiParameterOperand(3), Immediate(argc));  // v8::Arguments::length_.
    512   // v8::Arguments::is_construct_call_.
    513   __ Set(ApiParameterOperand(4), Immediate(0));
    514 
    515   // v8::InvocationCallback's argument.
    516   __ lea(eax, ApiParameterOperand(1));
    517   __ mov(ApiParameterOperand(0), eax);
    518 
    519   // Function address is a foreign pointer outside V8's heap.
    520   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    521   __ CallApiFunctionAndReturn(function_address,
    522                               argc + kFastApiCallArguments + 1);
    523 }
    524 
    525 
    526 class CallInterceptorCompiler BASE_EMBEDDED {
    527  public:
    528   CallInterceptorCompiler(StubCompiler* stub_compiler,
    529                           const ParameterCount& arguments,
    530                           Register name,
    531                           Code::ExtraICState extra_state)
    532       : stub_compiler_(stub_compiler),
    533         arguments_(arguments),
    534         name_(name),
    535         extra_state_(extra_state) {}
    536 
    537   void Compile(MacroAssembler* masm,
    538                Handle<JSObject> object,
    539                Handle<JSObject> holder,
    540                Handle<String> name,
    541                LookupResult* lookup,
    542                Register receiver,
    543                Register scratch1,
    544                Register scratch2,
    545                Register scratch3,
    546                Label* miss) {
    547     ASSERT(holder->HasNamedInterceptor());
    548     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
    549 
    550     // Check that the receiver isn't a smi.
    551     __ JumpIfSmi(receiver, miss);
    552 
    553     CallOptimization optimization(lookup);
    554     if (optimization.is_constant_call()) {
    555       CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
    556                        holder, lookup, name, optimization, miss);
    557     } else {
    558       CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
    559                      name, holder, miss);
    560     }
    561   }
    562 
    563  private:
    564   void CompileCacheable(MacroAssembler* masm,
    565                         Handle<JSObject> object,
    566                         Register receiver,
    567                         Register scratch1,
    568                         Register scratch2,
    569                         Register scratch3,
    570                         Handle<JSObject> interceptor_holder,
    571                         LookupResult* lookup,
    572                         Handle<String> name,
    573                         const CallOptimization& optimization,
    574                         Label* miss_label) {
    575     ASSERT(optimization.is_constant_call());
    576     ASSERT(!lookup->holder()->IsGlobalObject());
    577 
    578     int depth1 = kInvalidProtoDepth;
    579     int depth2 = kInvalidProtoDepth;
    580     bool can_do_fast_api_call = false;
    581     if (optimization.is_simple_api_call() &&
    582         !lookup->holder()->IsGlobalObject()) {
    583       depth1 = optimization.GetPrototypeDepthOfExpectedType(
    584           object, interceptor_holder);
    585       if (depth1 == kInvalidProtoDepth) {
    586         depth2 = optimization.GetPrototypeDepthOfExpectedType(
    587             interceptor_holder, Handle<JSObject>(lookup->holder()));
    588       }
    589       can_do_fast_api_call =
    590           depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
    591     }
    592 
    593     Counters* counters = masm->isolate()->counters();
    594     __ IncrementCounter(counters->call_const_interceptor(), 1);
    595 
    596     if (can_do_fast_api_call) {
    597       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
    598       ReserveSpaceForFastApiCall(masm, scratch1);
    599     }
    600 
    601     // Check that the maps from receiver to interceptor's holder
    602     // haven't changed and thus we can invoke interceptor.
    603     Label miss_cleanup;
    604     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
    605     Register holder =
    606         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
    607                                         scratch1, scratch2, scratch3,
    608                                         name, depth1, miss);
    609 
    610     // Invoke an interceptor and if it provides a value,
    611     // branch to |regular_invoke|.
    612     Label regular_invoke;
    613     LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
    614                         &regular_invoke);
    615 
    616     // Interceptor returned nothing for this property.  Try to use cached
    617     // constant function.
    618 
    619     // Check that the maps from interceptor's holder to constant function's
    620     // holder haven't changed and thus we can use cached constant function.
    621     if (*interceptor_holder != lookup->holder()) {
    622       stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
    623                                       Handle<JSObject>(lookup->holder()),
    624                                       scratch1, scratch2, scratch3,
    625                                       name, depth2, miss);
    626     } else {
    627       // CheckPrototypes has a side effect of fetching a 'holder'
    628       // for API (object which is instanceof for the signature).  It's
    629       // safe to omit it here, as if present, it should be fetched
    630       // by the previous CheckPrototypes.
    631       ASSERT(depth2 == kInvalidProtoDepth);
    632     }
    633 
    634     // Invoke function.
    635     if (can_do_fast_api_call) {
    636       GenerateFastApiCall(masm, optimization, arguments_.immediate());
    637     } else {
    638       CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
    639           ? CALL_AS_FUNCTION
    640           : CALL_AS_METHOD;
    641       __ InvokeFunction(optimization.constant_function(), arguments_,
    642                         JUMP_FUNCTION, NullCallWrapper(), call_kind);
    643     }
    644 
    645     // Deferred code for fast API call case---clean preallocated space.
    646     if (can_do_fast_api_call) {
    647       __ bind(&miss_cleanup);
    648       FreeSpaceForFastApiCall(masm, scratch1);
    649       __ jmp(miss_label);
    650     }
    651 
    652     // Invoke a regular function.
    653     __ bind(&regular_invoke);
    654     if (can_do_fast_api_call) {
    655       FreeSpaceForFastApiCall(masm, scratch1);
    656     }
    657   }
    658 
    659   void CompileRegular(MacroAssembler* masm,
    660                       Handle<JSObject> object,
    661                       Register receiver,
    662                       Register scratch1,
    663                       Register scratch2,
    664                       Register scratch3,
    665                       Handle<String> name,
    666                       Handle<JSObject> interceptor_holder,
    667                       Label* miss_label) {
    668     Register holder =
    669         stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
    670                                         scratch1, scratch2, scratch3,
    671                                         name, miss_label);
    672 
    673     FrameScope scope(masm, StackFrame::INTERNAL);
    674     // Save the name_ register across the call.
    675     __ push(name_);
    676 
    677     PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
    678 
    679     __ CallExternalReference(
    680         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
    681                           masm->isolate()),
    682         5);
    683 
    684     // Restore the name_ register.
    685     __ pop(name_);
    686 
    687     // Leave the internal frame.
    688   }
    689 
    690   void LoadWithInterceptor(MacroAssembler* masm,
    691                            Register receiver,
    692                            Register holder,
    693                            Handle<JSObject> holder_obj,
    694                            Label* interceptor_succeeded) {
    695     {
    696       FrameScope scope(masm, StackFrame::INTERNAL);
    697       __ push(holder);  // Save the holder.
    698       __ push(name_);  // Save the name.
    699 
    700       CompileCallLoadPropertyWithInterceptor(masm,
    701                                              receiver,
    702                                              holder,
    703                                              name_,
    704                                              holder_obj);
    705 
    706       __ pop(name_);  // Restore the name.
    707       __ pop(receiver);  // Restore the holder.
    708       // Leave the internal frame.
    709     }
    710 
    711     __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
    712     __ j(not_equal, interceptor_succeeded);
    713   }
    714 
    715   StubCompiler* stub_compiler_;
    716   const ParameterCount& arguments_;
    717   Register name_;
    718   Code::ExtraICState extra_state_;
    719 };
    720 
    721 
    722 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
    723   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
    724   Handle<Code> code = (kind == Code::LOAD_IC)
    725       ? masm->isolate()->builtins()->LoadIC_Miss()
    726       : masm->isolate()->builtins()->KeyedLoadIC_Miss();
    727   __ jmp(code, RelocInfo::CODE_TARGET);
    728 }
    729 
    730 
    731 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
    732   Handle<Code> code =
    733       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
    734   __ jmp(code, RelocInfo::CODE_TARGET);
    735 }
    736 
    737 
    738 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
    739 // but may be destroyed if store is successful.
    740 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
    741                                       Handle<JSObject> object,
    742                                       int index,
    743                                       Handle<Map> transition,
    744                                       Register receiver_reg,
    745                                       Register name_reg,
    746                                       Register scratch,
    747                                       Label* miss_label) {
    748   // Check that the map of the object hasn't changed.
    749   CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
    750                                              : REQUIRE_EXACT_MAP;
    751   __ CheckMap(receiver_reg, Handle<Map>(object->map()),
    752               miss_label, DO_SMI_CHECK, mode);
    753 
    754   // Perform global security token check if needed.
    755   if (object->IsJSGlobalProxy()) {
    756     __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
    757   }
    758 
    759   // Stub never generated for non-global objects that require access
    760   // checks.
    761   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    762 
    763   // Perform map transition for the receiver if necessary.
    764   if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
    765     // The properties must be extended before we can store the value.
    766     // We jump to a runtime call that extends the properties array.
    767     __ pop(scratch);  // Return address.
    768     __ push(receiver_reg);
    769     __ push(Immediate(transition));
    770     __ push(eax);
    771     __ push(scratch);
    772     __ TailCallExternalReference(
    773         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    774                           masm->isolate()),
    775         3,
    776         1);
    777     return;
    778   }
    779 
    780   if (!transition.is_null()) {
    781     // Update the map of the object; no write barrier updating is
    782     // needed because the map is never in new space.
    783     __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset),
    784            Immediate(transition));
    785   }
    786 
    787   // Adjust for the number of properties stored in the object. Even in the
    788   // face of a transition we can use the old map here because the size of the
    789   // object and the number of in-object properties is not going to change.
    790   index -= object->map()->inobject_properties();
    791 
    792   if (index < 0) {
    793     // Set the property straight into the object.
    794     int offset = object->map()->instance_size() + (index * kPointerSize);
    795     __ mov(FieldOperand(receiver_reg, offset), eax);
    796 
    797     // Update the write barrier for the array address.
    798     // Pass the value being stored in the now unused name_reg.
    799     __ mov(name_reg, eax);
    800     __ RecordWriteField(receiver_reg,
    801                         offset,
    802                         name_reg,
    803                         scratch,
    804                         kDontSaveFPRegs);
    805   } else {
    806     // Write to the properties array.
    807     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    808     // Get the properties array (optimistically).
    809     __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
    810     __ mov(FieldOperand(scratch, offset), eax);
    811 
    812     // Update the write barrier for the array address.
    813     // Pass the value being stored in the now unused name_reg.
    814     __ mov(name_reg, eax);
    815     __ RecordWriteField(scratch,
    816                         offset,
    817                         name_reg,
    818                         receiver_reg,
    819                         kDontSaveFPRegs);
    820   }
    821 
    822   // Return the value (register eax).
    823   __ ret(0);
    824 }
    825 
    826 
    827 // Generate code to check that a global property cell is empty. Create
    828 // the property cell at compilation time if no cell exists for the
    829 // property.
    830 static void GenerateCheckPropertyCell(MacroAssembler* masm,
    831                                       Handle<GlobalObject> global,
    832                                       Handle<String> name,
    833                                       Register scratch,
    834                                       Label* miss) {
    835   Handle<JSGlobalPropertyCell> cell =
    836       GlobalObject::EnsurePropertyCell(global, name);
    837   ASSERT(cell->value()->IsTheHole());
    838   Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
    839   if (Serializer::enabled()) {
    840     __ mov(scratch, Immediate(cell));
    841     __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
    842            Immediate(the_hole));
    843   } else {
    844     __ cmp(Operand::Cell(cell), Immediate(the_hole));
    845   }
    846   __ j(not_equal, miss);
    847 }
    848 
    849 
    850 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
    851 // from object to (but not including) holder.
    852 static void GenerateCheckPropertyCells(MacroAssembler* masm,
    853                                        Handle<JSObject> object,
    854                                        Handle<JSObject> holder,
    855                                        Handle<String> name,
    856                                        Register scratch,
    857                                        Label* miss) {
    858   Handle<JSObject> current = object;
    859   while (!current.is_identical_to(holder)) {
    860     if (current->IsGlobalObject()) {
    861       GenerateCheckPropertyCell(masm,
    862                                 Handle<GlobalObject>::cast(current),
    863                                 name,
    864                                 scratch,
    865                                 miss);
    866     }
    867     current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
    868   }
    869 }
    870 
    871 #undef __
    872 #define __ ACCESS_MASM(masm())
    873 
    874 
    875 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
    876                                        Register object_reg,
    877                                        Handle<JSObject> holder,
    878                                        Register holder_reg,
    879                                        Register scratch1,
    880                                        Register scratch2,
    881                                        Handle<String> name,
    882                                        int save_at_depth,
    883                                        Label* miss) {
    884   // Make sure there's no overlap between holder and object registers.
    885   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
    886   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
    887          && !scratch2.is(scratch1));
    888 
    889   // Keep track of the current object in register reg.
    890   Register reg = object_reg;
    891   Handle<JSObject> current = object;
    892   int depth = 0;
    893 
    894   if (save_at_depth == depth) {
    895     __ mov(Operand(esp, kPointerSize), reg);
    896   }
    897 
    898   // Traverse the prototype chain and check the maps in the prototype chain for
    899   // fast and global objects or do negative lookup for normal objects.
    900   while (!current.is_identical_to(holder)) {
    901     ++depth;
    902 
    903     // Only global objects and objects that do not require access
    904     // checks are allowed in stubs.
    905     ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
    906 
    907     Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
    908     if (!current->HasFastProperties() &&
    909         !current->IsJSGlobalObject() &&
    910         !current->IsJSGlobalProxy()) {
    911       if (!name->IsSymbol()) {
    912         name = factory()->LookupSymbol(name);
    913       }
    914       ASSERT(current->property_dictionary()->FindEntry(*name) ==
    915              StringDictionary::kNotFound);
    916 
    917       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
    918                                        scratch1, scratch2);
    919 
    920       __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
    921       reg = holder_reg;  // From now on the object will be in holder_reg.
    922       __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
    923     } else {
    924       bool in_new_space = heap()->InNewSpace(*prototype);
    925       Handle<Map> current_map(current->map());
    926       if (in_new_space) {
    927         // Save the map in scratch1 for later.
    928         __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
    929       }
    930       __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK,
    931                   ALLOW_ELEMENT_TRANSITION_MAPS);
    932 
    933       // Check access rights to the global object.  This has to happen after
    934       // the map check so that we know that the object is actually a global
    935       // object.
    936       if (current->IsJSGlobalProxy()) {
    937         __ CheckAccessGlobalProxy(reg, scratch2, miss);
    938       }
    939       reg = holder_reg;  // From now on the object will be in holder_reg.
    940 
    941       if (in_new_space) {
    942         // The prototype is in new space; we cannot store a reference to it
    943         // in the code.  Load it from the map.
    944         __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
    945       } else {
    946         // The prototype is in old space; load it directly.
    947         __ mov(reg, prototype);
    948       }
    949     }
    950 
    951     if (save_at_depth == depth) {
    952       __ mov(Operand(esp, kPointerSize), reg);
    953     }
    954 
    955     // Go to the next object in the prototype chain.
    956     current = prototype;
    957   }
    958   ASSERT(current.is_identical_to(holder));
    959 
    960   // Log the check depth.
    961   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
    962 
    963   // Check the holder map.
    964   __ CheckMap(reg, Handle<Map>(holder->map()),
    965               miss, DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
    966 
    967   // Perform security check for access to the global object.
    968   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
    969   if (holder->IsJSGlobalProxy()) {
    970     __ CheckAccessGlobalProxy(reg, scratch1, miss);
    971   }
    972 
    973   // If we've skipped any global objects, it's not enough to verify that
    974   // their maps haven't changed.  We also need to check that the property
    975   // cell for the property is still empty.
    976   GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
    977 
    978   // Return the register containing the holder.
    979   return reg;
    980 }
    981 
    982 
    983 void StubCompiler::GenerateLoadField(Handle<JSObject> object,
    984                                      Handle<JSObject> holder,
    985                                      Register receiver,
    986                                      Register scratch1,
    987                                      Register scratch2,
    988                                      Register scratch3,
    989                                      int index,
    990                                      Handle<String> name,
    991                                      Label* miss) {
    992   // Check that the receiver isn't a smi.
    993   __ JumpIfSmi(receiver, miss);
    994 
    995   // Check the prototype chain.
    996   Register reg = CheckPrototypes(
    997       object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
    998 
    999   // Get the value from the properties.
   1000   GenerateFastPropertyLoad(masm(), eax, reg, holder, index);
   1001   __ ret(0);
   1002 }
   1003 
   1004 
   1005 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
   1006                                         Handle<JSObject> holder,
   1007                                         Register receiver,
   1008                                         Register name_reg,
   1009                                         Register scratch1,
   1010                                         Register scratch2,
   1011                                         Register scratch3,
   1012                                         Handle<AccessorInfo> callback,
   1013                                         Handle<String> name,
   1014                                         Label* miss) {
   1015   // Check that the receiver isn't a smi.
   1016   __ JumpIfSmi(receiver, miss);
   1017 
   1018   // Check that the maps haven't changed.
   1019   Register reg = CheckPrototypes(object, receiver, holder, scratch1,
   1020                                  scratch2, scratch3, name, miss);
   1021 
   1022   // Insert additional parameters into the stack frame above return address.
   1023   ASSERT(!scratch3.is(reg));
   1024   __ pop(scratch3);  // Get return address to place it below.
   1025 
   1026   __ push(receiver);  // receiver
   1027   __ mov(scratch2, esp);
   1028   ASSERT(!scratch2.is(reg));
   1029   __ push(reg);  // holder
   1030   // Push data from AccessorInfo.
   1031   if (isolate()->heap()->InNewSpace(callback->data())) {
   1032     __ mov(scratch1, Immediate(callback));
   1033     __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset));
   1034   } else {
   1035     __ push(Immediate(Handle<Object>(callback->data())));
   1036   }
   1037 
   1038   // Save a pointer to where we pushed the arguments pointer.
   1039   // This will be passed as the const AccessorInfo& to the C++ callback.
   1040   __ push(scratch2);
   1041 
   1042   __ push(name_reg);  // name
   1043   __ mov(ebx, esp);  // esp points to reference to name (handler).
   1044 
   1045   __ push(scratch3);  // Restore return address.
   1046 
   1047   // 3 elements array for v8::Arguments::values_, handler for name and pointer
   1048   // to the values (it considered as smi in GC).
   1049   const int kStackSpace = 5;
   1050   const int kApiArgc = 2;
   1051 
   1052   __ PrepareCallApiFunction(kApiArgc);
   1053   __ mov(ApiParameterOperand(0), ebx);  // name.
   1054   __ add(ebx, Immediate(kPointerSize));
   1055   __ mov(ApiParameterOperand(1), ebx);  // arguments pointer.
   1056 
   1057   // Emitting a stub call may try to allocate (if the code is not
   1058   // already generated).  Do not allow the assembler to perform a
   1059   // garbage collection but instead return the allocation failure
   1060   // object.
   1061   Address getter_address = v8::ToCData<Address>(callback->getter());
   1062   __ CallApiFunctionAndReturn(getter_address, kStackSpace);
   1063 }
   1064 
   1065 
   1066 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
   1067                                         Handle<JSObject> holder,
   1068                                         Register receiver,
   1069                                         Register scratch1,
   1070                                         Register scratch2,
   1071                                         Register scratch3,
   1072                                         Handle<JSFunction> value,
   1073                                         Handle<String> name,
   1074                                         Label* miss) {
   1075   // Check that the receiver isn't a smi.
   1076   __ JumpIfSmi(receiver, miss);
   1077 
   1078   // Check that the maps haven't changed.
   1079   CheckPrototypes(
   1080       object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
   1081 
   1082   // Return the constant value.
   1083   __ LoadHeapObject(eax, value);
   1084   __ ret(0);
   1085 }
   1086 
   1087 
   1088 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
   1089                                            Handle<JSObject> interceptor_holder,
   1090                                            LookupResult* lookup,
   1091                                            Register receiver,
   1092                                            Register name_reg,
   1093                                            Register scratch1,
   1094                                            Register scratch2,
   1095                                            Register scratch3,
   1096                                            Handle<String> name,
   1097                                            Label* miss) {
   1098   ASSERT(interceptor_holder->HasNamedInterceptor());
   1099   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
   1100 
   1101   // Check that the receiver isn't a smi.
   1102   __ JumpIfSmi(receiver, miss);
   1103 
   1104   // So far the most popular follow ups for interceptor loads are FIELD
   1105   // and CALLBACKS, so inline only them, other cases may be added
   1106   // later.
   1107   bool compile_followup_inline = false;
   1108   if (lookup->IsFound() && lookup->IsCacheable()) {
   1109     if (lookup->type() == FIELD) {
   1110       compile_followup_inline = true;
   1111     } else if (lookup->type() == CALLBACKS &&
   1112                lookup->GetCallbackObject()->IsAccessorInfo()) {
   1113       compile_followup_inline =
   1114           AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
   1115     }
   1116   }
   1117 
   1118   if (compile_followup_inline) {
   1119     // Compile the interceptor call, followed by inline code to load the
   1120     // property from further up the prototype chain if the call fails.
   1121     // Check that the maps haven't changed.
   1122     Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
   1123                                           scratch1, scratch2, scratch3,
   1124                                           name, miss);
   1125     ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
   1126 
   1127     // Preserve the receiver register explicitly whenever it is different from
   1128     // the holder and it is needed should the interceptor return without any
   1129     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
   1130     // the FIELD case might cause a miss during the prototype check.
   1131     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
   1132     bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
   1133         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
   1134 
   1135     // Save necessary data before invoking an interceptor.
   1136     // Requires a frame to make GC aware of pushed pointers.
   1137     {
   1138       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
   1139 
   1140       if (must_preserve_receiver_reg) {
   1141         __ push(receiver);
   1142       }
   1143       __ push(holder_reg);
   1144       __ push(name_reg);
   1145 
   1146       // Invoke an interceptor.  Note: map checks from receiver to
   1147       // interceptor's holder has been compiled before (see a caller
   1148       // of this method.)
   1149       CompileCallLoadPropertyWithInterceptor(masm(),
   1150                                              receiver,
   1151                                              holder_reg,
   1152                                              name_reg,
   1153                                              interceptor_holder);
   1154 
   1155       // Check if interceptor provided a value for property.  If it's
   1156       // the case, return immediately.
   1157       Label interceptor_failed;
   1158       __ cmp(eax, factory()->no_interceptor_result_sentinel());
   1159       __ j(equal, &interceptor_failed);
   1160       frame_scope.GenerateLeaveFrame();
   1161       __ ret(0);
   1162 
   1163       // Clobber registers when generating debug-code to provoke errors.
   1164       __ bind(&interceptor_failed);
   1165       if (FLAG_debug_code) {
   1166         __ mov(receiver, Immediate(BitCast<int32_t>(kZapValue)));
   1167         __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
   1168         __ mov(name_reg, Immediate(BitCast<int32_t>(kZapValue)));
   1169       }
   1170 
   1171       __ pop(name_reg);
   1172       __ pop(holder_reg);
   1173       if (must_preserve_receiver_reg) {
   1174         __ pop(receiver);
   1175       }
   1176 
   1177       // Leave the internal frame.
   1178     }
   1179 
   1180     // Check that the maps from interceptor's holder to lookup's holder
   1181     // haven't changed.  And load lookup's holder into holder_reg.
   1182     if (must_perfrom_prototype_check) {
   1183       holder_reg = CheckPrototypes(interceptor_holder,
   1184                                    holder_reg,
   1185                                    Handle<JSObject>(lookup->holder()),
   1186                                    scratch1,
   1187                                    scratch2,
   1188                                    scratch3,
   1189                                    name,
   1190                                    miss);
   1191     }
   1192 
   1193     if (lookup->type() == FIELD) {
   1194       // We found FIELD property in prototype chain of interceptor's holder.
   1195       // Retrieve a field from field's holder.
   1196       GenerateFastPropertyLoad(masm(), eax, holder_reg,
   1197                                Handle<JSObject>(lookup->holder()),
   1198                                lookup->GetFieldIndex());
   1199       __ ret(0);
   1200     } else {
   1201       // We found CALLBACKS property in prototype chain of interceptor's
   1202       // holder.
   1203       ASSERT(lookup->type() == CALLBACKS);
   1204       Handle<AccessorInfo> callback(
   1205           AccessorInfo::cast(lookup->GetCallbackObject()));
   1206       ASSERT(callback->getter() != NULL);
   1207 
   1208       // Tail call to runtime.
   1209       // Important invariant in CALLBACKS case: the code above must be
   1210       // structured to never clobber |receiver| register.
   1211       __ pop(scratch2);  // return address
   1212       __ push(receiver);
   1213       __ push(holder_reg);
   1214       __ mov(holder_reg, Immediate(callback));
   1215       __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
   1216       __ push(holder_reg);
   1217       __ push(name_reg);
   1218       __ push(scratch2);  // restore return address
   1219 
   1220       ExternalReference ref =
   1221           ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
   1222                             masm()->isolate());
   1223       __ TailCallExternalReference(ref, 5, 1);
   1224     }
   1225   } else {  // !compile_followup_inline
   1226     // Call the runtime system to load the interceptor.
   1227     // Check that the maps haven't changed.
   1228     Register holder_reg =
   1229         CheckPrototypes(object, receiver, interceptor_holder,
   1230                         scratch1, scratch2, scratch3, name, miss);
   1231     __ pop(scratch2);  // save old return address
   1232     PushInterceptorArguments(masm(), receiver, holder_reg,
   1233                              name_reg, interceptor_holder);
   1234     __ push(scratch2);  // restore old return address
   1235 
   1236     ExternalReference ref =
   1237         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
   1238                           isolate());
   1239     __ TailCallExternalReference(ref, 5, 1);
   1240   }
   1241 }
   1242 
   1243 
   1244 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
   1245   if (kind_ == Code::KEYED_CALL_IC) {
   1246     __ cmp(ecx, Immediate(name));
   1247     __ j(not_equal, miss);
   1248   }
   1249 }
   1250 
   1251 
   1252 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
   1253                                                    Handle<JSObject> holder,
   1254                                                    Handle<String> name,
   1255                                                    Label* miss) {
   1256   ASSERT(holder->IsGlobalObject());
   1257 
   1258   // Get the number of arguments.
   1259   const int argc = arguments().immediate();
   1260 
   1261   // Get the receiver from the stack.
   1262   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   1263 
   1264 
   1265   // Check that the maps haven't changed.
   1266   __ JumpIfSmi(edx, miss);
   1267   CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss);
   1268 }
   1269 
   1270 
   1271 void CallStubCompiler::GenerateLoadFunctionFromCell(
   1272     Handle<JSGlobalPropertyCell> cell,
   1273     Handle<JSFunction> function,
   1274     Label* miss) {
   1275   // Get the value from the cell.
   1276   if (Serializer::enabled()) {
   1277     __ mov(edi, Immediate(cell));
   1278     __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
   1279   } else {
   1280     __ mov(edi, Operand::Cell(cell));
   1281   }
   1282 
   1283   // Check that the cell contains the same function.
   1284   if (isolate()->heap()->InNewSpace(*function)) {
   1285     // We can't embed a pointer to a function in new space so we have
   1286     // to verify that the shared function info is unchanged. This has
   1287     // the nice side effect that multiple closures based on the same
   1288     // function can all use this call IC. Before we load through the
   1289     // function, we have to verify that it still is a function.
   1290     __ JumpIfSmi(edi, miss);
   1291     __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
   1292     __ j(not_equal, miss);
   1293 
   1294     // Check the shared function info. Make sure it hasn't changed.
   1295     __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
   1296            Immediate(Handle<SharedFunctionInfo>(function->shared())));
   1297   } else {
   1298     __ cmp(edi, Immediate(function));
   1299   }
   1300   __ j(not_equal, miss);
   1301 }
   1302 
   1303 
   1304 void CallStubCompiler::GenerateMissBranch() {
   1305   Handle<Code> code =
   1306       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
   1307                                                kind_,
   1308                                                extra_state_);
   1309   __ jmp(code, RelocInfo::CODE_TARGET);
   1310 }
   1311 
   1312 
   1313 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   1314                                                 Handle<JSObject> holder,
   1315                                                 int index,
   1316                                                 Handle<String> name) {
   1317   // ----------- S t a t e -------------
   1318   //  -- ecx                 : name
   1319   //  -- esp[0]              : return address
   1320   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1321   //  -- ...
   1322   //  -- esp[(argc + 1) * 4] : receiver
   1323   // -----------------------------------
   1324   Label miss;
   1325 
   1326   GenerateNameCheck(name, &miss);
   1327 
   1328   // Get the receiver from the stack.
   1329   const int argc = arguments().immediate();
   1330   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   1331 
   1332   // Check that the receiver isn't a smi.
   1333   __ JumpIfSmi(edx, &miss);
   1334 
   1335   // Do the right check and compute the holder register.
   1336   Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi,
   1337                                  name, &miss);
   1338 
   1339   GenerateFastPropertyLoad(masm(), edi, reg, holder, index);
   1340 
   1341   // Check that the function really is a function.
   1342   __ JumpIfSmi(edi, &miss);
   1343   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
   1344   __ j(not_equal, &miss);
   1345 
   1346   // Patch the receiver on the stack with the global proxy if
   1347   // necessary.
   1348   if (object->IsGlobalObject()) {
   1349     __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
   1350     __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
   1351   }
   1352 
   1353   // Invoke the function.
   1354   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   1355       ? CALL_AS_FUNCTION
   1356       : CALL_AS_METHOD;
   1357   __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
   1358                     NullCallWrapper(), call_kind);
   1359 
   1360   // Handle call cache miss.
   1361   __ bind(&miss);
   1362   GenerateMissBranch();
   1363 
   1364   // Return the generated code.
   1365   return GetCode(FIELD, name);
   1366 }
   1367 
   1368 
   1369 Handle<Code> CallStubCompiler::CompileArrayPushCall(
   1370     Handle<Object> object,
   1371     Handle<JSObject> holder,
   1372     Handle<JSGlobalPropertyCell> cell,
   1373     Handle<JSFunction> function,
   1374     Handle<String> name) {
   1375   // ----------- S t a t e -------------
   1376   //  -- ecx                 : name
   1377   //  -- esp[0]              : return address
   1378   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1379   //  -- ...
   1380   //  -- esp[(argc + 1) * 4] : receiver
   1381   // -----------------------------------
   1382 
   1383   // If object is not an array, bail out to regular call.
   1384   if (!object->IsJSArray() || !cell.is_null()) {
   1385     return Handle<Code>::null();
   1386   }
   1387 
   1388   Label miss;
   1389 
   1390   GenerateNameCheck(name, &miss);
   1391 
   1392   // Get the receiver from the stack.
   1393   const int argc = arguments().immediate();
   1394   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   1395 
   1396   // Check that the receiver isn't a smi.
   1397   __ JumpIfSmi(edx, &miss);
   1398 
   1399   CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
   1400                   name, &miss);
   1401 
   1402   if (argc == 0) {
   1403     // Noop, return the length.
   1404     __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
   1405     __ ret((argc + 1) * kPointerSize);
   1406   } else {
   1407     Label call_builtin;
   1408 
   1409     if (argc == 1) {  // Otherwise fall through to call builtin.
   1410       Label attempt_to_grow_elements, with_write_barrier;
   1411 
   1412       // Get the elements array of the object.
   1413       __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
   1414 
   1415       // Check that the elements are in fast mode and writable.
   1416       __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
   1417              Immediate(factory()->fixed_array_map()));
   1418       __ j(not_equal, &call_builtin);
   1419 
   1420       // Get the array's length into eax and calculate new length.
   1421       __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
   1422       STATIC_ASSERT(kSmiTagSize == 1);
   1423       STATIC_ASSERT(kSmiTag == 0);
   1424       __ add(eax, Immediate(Smi::FromInt(argc)));
   1425 
   1426       // Get the elements' length into ecx.
   1427       __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
   1428 
   1429       // Check if we could survive without allocation.
   1430       __ cmp(eax, ecx);
   1431       __ j(greater, &attempt_to_grow_elements);
   1432 
   1433       // Check if value is a smi.
   1434       __ mov(ecx, Operand(esp, argc * kPointerSize));
   1435       __ JumpIfNotSmi(ecx, &with_write_barrier);
   1436 
   1437       // Save new length.
   1438       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1439 
   1440       // Store the value.
   1441       __ mov(FieldOperand(edi,
   1442                           eax,
   1443                           times_half_pointer_size,
   1444                           FixedArray::kHeaderSize - argc * kPointerSize),
   1445              ecx);
   1446 
   1447       __ ret((argc + 1) * kPointerSize);
   1448 
   1449       __ bind(&with_write_barrier);
   1450 
   1451       __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
   1452 
   1453       if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
   1454         Label fast_object, not_fast_object;
   1455         __ CheckFastObjectElements(ebx, &not_fast_object, Label::kNear);
   1456         __ jmp(&fast_object);
   1457         // In case of fast smi-only, convert to fast object, otherwise bail out.
   1458         __ bind(&not_fast_object);
   1459         __ CheckFastSmiOnlyElements(ebx, &call_builtin);
   1460         // edi: elements array
   1461         // edx: receiver
   1462         // ebx: map
   1463         __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
   1464                                                FAST_ELEMENTS,
   1465                                                ebx,
   1466                                                edi,
   1467                                                &call_builtin);
   1468         ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
   1469         // Restore edi.
   1470         __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
   1471         __ bind(&fast_object);
   1472       } else {
   1473         __ CheckFastObjectElements(ebx, &call_builtin);
   1474       }
   1475 
   1476       // Save new length.
   1477       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1478 
   1479       // Store the value.
   1480       __ lea(edx, FieldOperand(edi,
   1481                                eax, times_half_pointer_size,
   1482                                FixedArray::kHeaderSize - argc * kPointerSize));
   1483       __ mov(Operand(edx, 0), ecx);
   1484 
   1485       __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1486                      OMIT_SMI_CHECK);
   1487 
   1488       __ ret((argc + 1) * kPointerSize);
   1489 
   1490       __ bind(&attempt_to_grow_elements);
   1491       if (!FLAG_inline_new) {
   1492         __ jmp(&call_builtin);
   1493       }
   1494 
   1495       __ mov(ebx, Operand(esp, argc * kPointerSize));
   1496       // Growing elements that are SMI-only requires special handling in case
   1497       // the new element is non-Smi. For now, delegate to the builtin.
   1498       Label no_fast_elements_check;
   1499       __ JumpIfSmi(ebx, &no_fast_elements_check);
   1500       __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
   1501       __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
   1502       __ bind(&no_fast_elements_check);
   1503 
   1504       // We could be lucky and the elements array could be at the top of
   1505       // new-space.  In this case we can just grow it in place by moving the
   1506       // allocation pointer up.
   1507 
   1508       ExternalReference new_space_allocation_top =
   1509           ExternalReference::new_space_allocation_top_address(isolate());
   1510       ExternalReference new_space_allocation_limit =
   1511           ExternalReference::new_space_allocation_limit_address(isolate());
   1512 
   1513       const int kAllocationDelta = 4;
   1514       // Load top.
   1515       __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
   1516 
   1517       // Check if it's the end of elements.
   1518       __ lea(edx, FieldOperand(edi,
   1519                                eax, times_half_pointer_size,
   1520                                FixedArray::kHeaderSize - argc * kPointerSize));
   1521       __ cmp(edx, ecx);
   1522       __ j(not_equal, &call_builtin);
   1523       __ add(ecx, Immediate(kAllocationDelta * kPointerSize));
   1524       __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
   1525       __ j(above, &call_builtin);
   1526 
   1527       // We fit and could grow elements.
   1528       __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
   1529 
   1530       // Push the argument...
   1531       __ mov(Operand(edx, 0), ebx);
   1532       // ... and fill the rest with holes.
   1533       for (int i = 1; i < kAllocationDelta; i++) {
   1534         __ mov(Operand(edx, i * kPointerSize),
   1535                Immediate(factory()->the_hole_value()));
   1536       }
   1537 
   1538       // We know the elements array is in new space so we don't need the
   1539       // remembered set, but we just pushed a value onto it so we may have to
   1540       // tell the incremental marker to rescan the object that we just grew.  We
   1541       // don't need to worry about the holes because they are in old space and
   1542       // already marked black.
   1543       __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
   1544 
   1545       // Restore receiver to edx as finish sequence assumes it's here.
   1546       __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   1547 
   1548       // Increment element's and array's sizes.
   1549       __ add(FieldOperand(edi, FixedArray::kLengthOffset),
   1550              Immediate(Smi::FromInt(kAllocationDelta)));
   1551 
   1552       // NOTE: This only happen in new-space, where we don't
   1553       // care about the black-byte-count on pages. Otherwise we should
   1554       // update that too if the object is black.
   1555 
   1556       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1557 
   1558       __ ret((argc + 1) * kPointerSize);
   1559     }
   1560 
   1561     __ bind(&call_builtin);
   1562     __ TailCallExternalReference(
   1563         ExternalReference(Builtins::c_ArrayPush, isolate()),
   1564         argc + 1,
   1565         1);
   1566   }
   1567 
   1568   __ bind(&miss);
   1569   GenerateMissBranch();
   1570 
   1571   // Return the generated code.
   1572   return GetCode(function);
   1573 }
   1574 
   1575 
   1576 Handle<Code> CallStubCompiler::CompileArrayPopCall(
   1577     Handle<Object> object,
   1578     Handle<JSObject> holder,
   1579     Handle<JSGlobalPropertyCell> cell,
   1580     Handle<JSFunction> function,
   1581     Handle<String> name) {
   1582   // ----------- S t a t e -------------
   1583   //  -- ecx                 : name
   1584   //  -- esp[0]              : return address
   1585   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1586   //  -- ...
   1587   //  -- esp[(argc + 1) * 4] : receiver
   1588   // -----------------------------------
   1589 
   1590   // If object is not an array, bail out to regular call.
   1591   if (!object->IsJSArray() || !cell.is_null()) {
   1592     return Handle<Code>::null();
   1593   }
   1594 
   1595   Label miss, return_undefined, call_builtin;
   1596 
   1597   GenerateNameCheck(name, &miss);
   1598 
   1599   // Get the receiver from the stack.
   1600   const int argc = arguments().immediate();
   1601   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   1602 
   1603   // Check that the receiver isn't a smi.
   1604   __ JumpIfSmi(edx, &miss);
   1605   CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
   1606                   name, &miss);
   1607 
   1608   // Get the elements array of the object.
   1609   __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
   1610 
   1611   // Check that the elements are in fast mode and writable.
   1612   __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
   1613          Immediate(factory()->fixed_array_map()));
   1614   __ j(not_equal, &call_builtin);
   1615 
   1616   // Get the array's length into ecx and calculate new length.
   1617   __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
   1618   __ sub(ecx, Immediate(Smi::FromInt(1)));
   1619   __ j(negative, &return_undefined);
   1620 
   1621   // Get the last element.
   1622   STATIC_ASSERT(kSmiTagSize == 1);
   1623   STATIC_ASSERT(kSmiTag == 0);
   1624   __ mov(eax, FieldOperand(ebx,
   1625                            ecx, times_half_pointer_size,
   1626                            FixedArray::kHeaderSize));
   1627   __ cmp(eax, Immediate(factory()->the_hole_value()));
   1628   __ j(equal, &call_builtin);
   1629 
   1630   // Set the array's length.
   1631   __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
   1632 
   1633   // Fill with the hole.
   1634   __ mov(FieldOperand(ebx,
   1635                       ecx, times_half_pointer_size,
   1636                       FixedArray::kHeaderSize),
   1637          Immediate(factory()->the_hole_value()));
   1638   __ ret((argc + 1) * kPointerSize);
   1639 
   1640   __ bind(&return_undefined);
   1641   __ mov(eax, Immediate(factory()->undefined_value()));
   1642   __ ret((argc + 1) * kPointerSize);
   1643 
   1644   __ bind(&call_builtin);
   1645   __ TailCallExternalReference(
   1646       ExternalReference(Builtins::c_ArrayPop, isolate()),
   1647       argc + 1,
   1648       1);
   1649 
   1650   __ bind(&miss);
   1651   GenerateMissBranch();
   1652 
   1653   // Return the generated code.
   1654   return GetCode(function);
   1655 }
   1656 
   1657 
   1658 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
   1659     Handle<Object> object,
   1660     Handle<JSObject> holder,
   1661     Handle<JSGlobalPropertyCell> cell,
   1662     Handle<JSFunction> function,
   1663     Handle<String> name) {
   1664   // ----------- S t a t e -------------
   1665   //  -- ecx                 : function name
   1666   //  -- esp[0]              : return address
   1667   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1668   //  -- ...
   1669   //  -- esp[(argc + 1) * 4] : receiver
   1670   // -----------------------------------
   1671 
   1672   // If object is not a string, bail out to regular call.
   1673   if (!object->IsString() || !cell.is_null()) {
   1674     return Handle<Code>::null();
   1675   }
   1676 
   1677   const int argc = arguments().immediate();
   1678 
   1679   Label miss;
   1680   Label name_miss;
   1681   Label index_out_of_range;
   1682   Label* index_out_of_range_label = &index_out_of_range;
   1683 
   1684   if (kind_ == Code::CALL_IC &&
   1685       (CallICBase::StringStubState::decode(extra_state_) ==
   1686        DEFAULT_STRING_STUB)) {
   1687     index_out_of_range_label = &miss;
   1688   }
   1689 
   1690   GenerateNameCheck(name, &name_miss);
   1691 
   1692   // Check that the maps starting from the prototype haven't changed.
   1693   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   1694                                             Context::STRING_FUNCTION_INDEX,
   1695                                             eax,
   1696                                             &miss);
   1697   ASSERT(!object.is_identical_to(holder));
   1698   CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
   1699                   eax, holder, ebx, edx, edi, name, &miss);
   1700 
   1701   Register receiver = ebx;
   1702   Register index = edi;
   1703   Register result = eax;
   1704   __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
   1705   if (argc > 0) {
   1706     __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
   1707   } else {
   1708     __ Set(index, Immediate(factory()->undefined_value()));
   1709   }
   1710 
   1711   StringCharCodeAtGenerator generator(receiver,
   1712                                       index,
   1713                                       result,
   1714                                       &miss,  // When not a string.
   1715                                       &miss,  // When not a number.
   1716                                       index_out_of_range_label,
   1717                                       STRING_INDEX_IS_NUMBER);
   1718   generator.GenerateFast(masm());
   1719   __ ret((argc + 1) * kPointerSize);
   1720 
   1721   StubRuntimeCallHelper call_helper;
   1722   generator.GenerateSlow(masm(), call_helper);
   1723 
   1724   if (index_out_of_range.is_linked()) {
   1725     __ bind(&index_out_of_range);
   1726     __ Set(eax, Immediate(factory()->nan_value()));
   1727     __ ret((argc + 1) * kPointerSize);
   1728   }
   1729 
   1730   __ bind(&miss);
   1731   // Restore function name in ecx.
   1732   __ Set(ecx, Immediate(name));
   1733   __ bind(&name_miss);
   1734   GenerateMissBranch();
   1735 
   1736   // Return the generated code.
   1737   return GetCode(function);
   1738 }
   1739 
   1740 
   1741 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
   1742     Handle<Object> object,
   1743     Handle<JSObject> holder,
   1744     Handle<JSGlobalPropertyCell> cell,
   1745     Handle<JSFunction> function,
   1746     Handle<String> name) {
   1747   // ----------- S t a t e -------------
   1748   //  -- ecx                 : function name
   1749   //  -- esp[0]              : return address
   1750   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1751   //  -- ...
   1752   //  -- esp[(argc + 1) * 4] : receiver
   1753   // -----------------------------------
   1754 
   1755   // If object is not a string, bail out to regular call.
   1756   if (!object->IsString() || !cell.is_null()) {
   1757     return Handle<Code>::null();
   1758   }
   1759 
   1760   const int argc = arguments().immediate();
   1761 
   1762   Label miss;
   1763   Label name_miss;
   1764   Label index_out_of_range;
   1765   Label* index_out_of_range_label = &index_out_of_range;
   1766 
   1767   if (kind_ == Code::CALL_IC &&
   1768       (CallICBase::StringStubState::decode(extra_state_) ==
   1769        DEFAULT_STRING_STUB)) {
   1770     index_out_of_range_label = &miss;
   1771   }
   1772 
   1773   GenerateNameCheck(name, &name_miss);
   1774 
   1775   // Check that the maps starting from the prototype haven't changed.
   1776   GenerateDirectLoadGlobalFunctionPrototype(masm(),
   1777                                             Context::STRING_FUNCTION_INDEX,
   1778                                             eax,
   1779                                             &miss);
   1780   ASSERT(!object.is_identical_to(holder));
   1781   CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
   1782                   eax, holder, ebx, edx, edi, name, &miss);
   1783 
   1784   Register receiver = eax;
   1785   Register index = edi;
   1786   Register scratch = edx;
   1787   Register result = eax;
   1788   __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
   1789   if (argc > 0) {
   1790     __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
   1791   } else {
   1792     __ Set(index, Immediate(factory()->undefined_value()));
   1793   }
   1794 
   1795   StringCharAtGenerator generator(receiver,
   1796                                   index,
   1797                                   scratch,
   1798                                   result,
   1799                                   &miss,  // When not a string.
   1800                                   &miss,  // When not a number.
   1801                                   index_out_of_range_label,
   1802                                   STRING_INDEX_IS_NUMBER);
   1803   generator.GenerateFast(masm());
   1804   __ ret((argc + 1) * kPointerSize);
   1805 
   1806   StubRuntimeCallHelper call_helper;
   1807   generator.GenerateSlow(masm(), call_helper);
   1808 
   1809   if (index_out_of_range.is_linked()) {
   1810     __ bind(&index_out_of_range);
   1811     __ Set(eax, Immediate(factory()->empty_string()));
   1812     __ ret((argc + 1) * kPointerSize);
   1813   }
   1814 
   1815   __ bind(&miss);
   1816   // Restore function name in ecx.
   1817   __ Set(ecx, Immediate(name));
   1818   __ bind(&name_miss);
   1819   GenerateMissBranch();
   1820 
   1821   // Return the generated code.
   1822   return GetCode(function);
   1823 }
   1824 
   1825 
   1826 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
   1827     Handle<Object> object,
   1828     Handle<JSObject> holder,
   1829     Handle<JSGlobalPropertyCell> cell,
   1830     Handle<JSFunction> function,
   1831     Handle<String> name) {
   1832   // ----------- S t a t e -------------
   1833   //  -- ecx                 : function name
   1834   //  -- esp[0]              : return address
   1835   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1836   //  -- ...
   1837   //  -- esp[(argc + 1) * 4] : receiver
   1838   // -----------------------------------
   1839 
   1840   const int argc = arguments().immediate();
   1841 
   1842   // If the object is not a JSObject or we got an unexpected number of
   1843   // arguments, bail out to the regular call.
   1844   if (!object->IsJSObject() || argc != 1) {
   1845     return Handle<Code>::null();
   1846   }
   1847 
   1848   Label miss;
   1849   GenerateNameCheck(name, &miss);
   1850 
   1851   if (cell.is_null()) {
   1852     __ mov(edx, Operand(esp, 2 * kPointerSize));
   1853     STATIC_ASSERT(kSmiTag == 0);
   1854     __ JumpIfSmi(edx, &miss);
   1855     CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
   1856                     name, &miss);
   1857   } else {
   1858     ASSERT(cell->value() == *function);
   1859     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   1860                                 &miss);
   1861     GenerateLoadFunctionFromCell(cell, function, &miss);
   1862   }
   1863 
   1864   // Load the char code argument.
   1865   Register code = ebx;
   1866   __ mov(code, Operand(esp, 1 * kPointerSize));
   1867 
   1868   // Check the code is a smi.
   1869   Label slow;
   1870   STATIC_ASSERT(kSmiTag == 0);
   1871   __ JumpIfNotSmi(code, &slow);
   1872 
   1873   // Convert the smi code to uint16.
   1874   __ and_(code, Immediate(Smi::FromInt(0xffff)));
   1875 
   1876   StringCharFromCodeGenerator generator(code, eax);
   1877   generator.GenerateFast(masm());
   1878   __ ret(2 * kPointerSize);
   1879 
   1880   StubRuntimeCallHelper call_helper;
   1881   generator.GenerateSlow(masm(), call_helper);
   1882 
   1883   // Tail call the full function. We do not have to patch the receiver
   1884   // because the function makes no use of it.
   1885   __ bind(&slow);
   1886   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   1887       ? CALL_AS_FUNCTION
   1888       : CALL_AS_METHOD;
   1889   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
   1890                     NullCallWrapper(), call_kind);
   1891 
   1892   __ bind(&miss);
   1893   // ecx: function name.
   1894   GenerateMissBranch();
   1895 
   1896   // Return the generated code.
   1897   return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
   1898 }
   1899 
   1900 
   1901 Handle<Code> CallStubCompiler::CompileMathFloorCall(
   1902     Handle<Object> object,
   1903     Handle<JSObject> holder,
   1904     Handle<JSGlobalPropertyCell> cell,
   1905     Handle<JSFunction> function,
   1906     Handle<String> name) {
   1907   // ----------- S t a t e -------------
   1908   //  -- ecx                 : name
   1909   //  -- esp[0]              : return address
   1910   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1911   //  -- ...
   1912   //  -- esp[(argc + 1) * 4] : receiver
   1913   // -----------------------------------
   1914 
   1915   if (!CpuFeatures::IsSupported(SSE2)) {
   1916     return Handle<Code>::null();
   1917   }
   1918 
   1919   CpuFeatures::Scope use_sse2(SSE2);
   1920 
   1921   const int argc = arguments().immediate();
   1922 
   1923   // If the object is not a JSObject or we got an unexpected number of
   1924   // arguments, bail out to the regular call.
   1925   if (!object->IsJSObject() || argc != 1) {
   1926     return Handle<Code>::null();
   1927   }
   1928 
   1929   Label miss;
   1930   GenerateNameCheck(name, &miss);
   1931 
   1932   if (cell.is_null()) {
   1933     __ mov(edx, Operand(esp, 2 * kPointerSize));
   1934 
   1935     STATIC_ASSERT(kSmiTag == 0);
   1936     __ JumpIfSmi(edx, &miss);
   1937 
   1938     CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
   1939                     name, &miss);
   1940   } else {
   1941     ASSERT(cell->value() == *function);
   1942     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   1943                                 &miss);
   1944     GenerateLoadFunctionFromCell(cell, function, &miss);
   1945   }
   1946 
   1947   // Load the (only) argument into eax.
   1948   __ mov(eax, Operand(esp, 1 * kPointerSize));
   1949 
   1950   // Check if the argument is a smi.
   1951   Label smi;
   1952   STATIC_ASSERT(kSmiTag == 0);
   1953   __ JumpIfSmi(eax, &smi);
   1954 
   1955   // Check if the argument is a heap number and load its value into xmm0.
   1956   Label slow;
   1957   __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
   1958   __ movdbl(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
   1959 
   1960   // Check if the argument is strictly positive. Note this also
   1961   // discards NaN.
   1962   __ xorpd(xmm1, xmm1);
   1963   __ ucomisd(xmm0, xmm1);
   1964   __ j(below_equal, &slow);
   1965 
   1966   // Do a truncating conversion.
   1967   __ cvttsd2si(eax, Operand(xmm0));
   1968 
   1969   // Check if the result fits into a smi. Note this also checks for
   1970   // 0x80000000 which signals a failed conversion.
   1971   Label wont_fit_into_smi;
   1972   __ test(eax, Immediate(0xc0000000));
   1973   __ j(not_zero, &wont_fit_into_smi);
   1974 
   1975   // Smi tag and return.
   1976   __ SmiTag(eax);
   1977   __ bind(&smi);
   1978   __ ret(2 * kPointerSize);
   1979 
   1980   // Check if the argument is < 2^kMantissaBits.
   1981   Label already_round;
   1982   __ bind(&wont_fit_into_smi);
   1983   __ LoadPowerOf2(xmm1, ebx, HeapNumber::kMantissaBits);
   1984   __ ucomisd(xmm0, xmm1);
   1985   __ j(above_equal, &already_round);
   1986 
   1987   // Save a copy of the argument.
   1988   __ movaps(xmm2, xmm0);
   1989 
   1990   // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
   1991   __ addsd(xmm0, xmm1);
   1992   __ subsd(xmm0, xmm1);
   1993 
   1994   // Compare the argument and the tentative result to get the right mask:
   1995   //   if xmm2 < xmm0:
   1996   //     xmm2 = 1...1
   1997   //   else:
   1998   //     xmm2 = 0...0
   1999   __ cmpltsd(xmm2, xmm0);
   2000 
   2001   // Subtract 1 if the argument was less than the tentative result.
   2002   __ LoadPowerOf2(xmm1, ebx, 0);
   2003   __ andpd(xmm1, xmm2);
   2004   __ subsd(xmm0, xmm1);
   2005 
   2006   // Return a new heap number.
   2007   __ AllocateHeapNumber(eax, ebx, edx, &slow);
   2008   __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
   2009   __ ret(2 * kPointerSize);
   2010 
   2011   // Return the argument (when it's an already round heap number).
   2012   __ bind(&already_round);
   2013   __ mov(eax, Operand(esp, 1 * kPointerSize));
   2014   __ ret(2 * kPointerSize);
   2015 
   2016   // Tail call the full function. We do not have to patch the receiver
   2017   // because the function makes no use of it.
   2018   __ bind(&slow);
   2019   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
   2020                     NullCallWrapper(), CALL_AS_METHOD);
   2021 
   2022   __ bind(&miss);
   2023   // ecx: function name.
   2024   GenerateMissBranch();
   2025 
   2026   // Return the generated code.
   2027   return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
   2028 }
   2029 
   2030 
   2031 Handle<Code> CallStubCompiler::CompileMathAbsCall(
   2032     Handle<Object> object,
   2033     Handle<JSObject> holder,
   2034     Handle<JSGlobalPropertyCell> cell,
   2035     Handle<JSFunction> function,
   2036     Handle<String> name) {
   2037   // ----------- S t a t e -------------
   2038   //  -- ecx                 : name
   2039   //  -- esp[0]              : return address
   2040   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   2041   //  -- ...
   2042   //  -- esp[(argc + 1) * 4] : receiver
   2043   // -----------------------------------
   2044 
   2045   const int argc = arguments().immediate();
   2046 
   2047   // If the object is not a JSObject or we got an unexpected number of
   2048   // arguments, bail out to the regular call.
   2049   if (!object->IsJSObject() || argc != 1) {
   2050     return Handle<Code>::null();
   2051   }
   2052 
   2053   Label miss;
   2054   GenerateNameCheck(name, &miss);
   2055 
   2056   if (cell.is_null()) {
   2057     __ mov(edx, Operand(esp, 2 * kPointerSize));
   2058 
   2059     STATIC_ASSERT(kSmiTag == 0);
   2060     __ JumpIfSmi(edx, &miss);
   2061 
   2062     CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
   2063                     name, &miss);
   2064   } else {
   2065     ASSERT(cell->value() == *function);
   2066     GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
   2067                                 &miss);
   2068     GenerateLoadFunctionFromCell(cell, function, &miss);
   2069   }
   2070 
   2071   // Load the (only) argument into eax.
   2072   __ mov(eax, Operand(esp, 1 * kPointerSize));
   2073 
   2074   // Check if the argument is a smi.
   2075   Label not_smi;
   2076   STATIC_ASSERT(kSmiTag == 0);
   2077   __ JumpIfNotSmi(eax, &not_smi);
   2078 
   2079   // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
   2080   // otherwise.
   2081   __ mov(ebx, eax);
   2082   __ sar(ebx, kBitsPerInt - 1);
   2083 
   2084   // Do bitwise not or do nothing depending on ebx.
   2085   __ xor_(eax, ebx);
   2086 
   2087   // Add 1 or do nothing depending on ebx.
   2088   __ sub(eax, ebx);
   2089 
   2090   // If the result is still negative, go to the slow case.
   2091   // This only happens for the most negative smi.
   2092   Label slow;
   2093   __ j(negative, &slow);
   2094 
   2095   // Smi case done.
   2096   __ ret(2 * kPointerSize);
   2097 
   2098   // Check if the argument is a heap number and load its exponent and
   2099   // sign into ebx.
   2100   __ bind(&not_smi);
   2101   __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
   2102   __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset));
   2103 
   2104   // Check the sign of the argument. If the argument is positive,
   2105   // just return it.
   2106   Label negative_sign;
   2107   __ test(ebx, Immediate(HeapNumber::kSignMask));
   2108   __ j(not_zero, &negative_sign);
   2109   __ ret(2 * kPointerSize);
   2110 
   2111   // If the argument is negative, clear the sign, and return a new
   2112   // number.
   2113   __ bind(&negative_sign);
   2114   __ and_(ebx, ~HeapNumber::kSignMask);
   2115   __ mov(ecx, FieldOperand(eax, HeapNumber::kMantissaOffset));
   2116   __ AllocateHeapNumber(eax, edi, edx, &slow);
   2117   __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ebx);
   2118   __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
   2119   __ ret(2 * kPointerSize);
   2120 
   2121   // Tail call the full function. We do not have to patch the receiver
   2122   // because the function makes no use of it.
   2123   __ bind(&slow);
   2124   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
   2125                     NullCallWrapper(), CALL_AS_METHOD);
   2126 
   2127   __ bind(&miss);
   2128   // ecx: function name.
   2129   GenerateMissBranch();
   2130 
   2131   // Return the generated code.
   2132   return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
   2133 }
   2134 
   2135 
   2136 Handle<Code> CallStubCompiler::CompileFastApiCall(
   2137     const CallOptimization& optimization,
   2138     Handle<Object> object,
   2139     Handle<JSObject> holder,
   2140     Handle<JSGlobalPropertyCell> cell,
   2141     Handle<JSFunction> function,
   2142     Handle<String> name) {
   2143   ASSERT(optimization.is_simple_api_call());
   2144   // Bail out if object is a global object as we don't want to
   2145   // repatch it to global receiver.
   2146   if (object->IsGlobalObject()) return Handle<Code>::null();
   2147   if (!cell.is_null()) return Handle<Code>::null();
   2148   if (!object->IsJSObject()) return Handle<Code>::null();
   2149   int depth = optimization.GetPrototypeDepthOfExpectedType(
   2150       Handle<JSObject>::cast(object), holder);
   2151   if (depth == kInvalidProtoDepth) return Handle<Code>::null();
   2152 
   2153   Label miss, miss_before_stack_reserved;
   2154 
   2155   GenerateNameCheck(name, &miss_before_stack_reserved);
   2156 
   2157   // Get the receiver from the stack.
   2158   const int argc = arguments().immediate();
   2159   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2160 
   2161   // Check that the receiver isn't a smi.
   2162   __ JumpIfSmi(edx, &miss_before_stack_reserved);
   2163 
   2164   Counters* counters = isolate()->counters();
   2165   __ IncrementCounter(counters->call_const(), 1);
   2166   __ IncrementCounter(counters->call_const_fast_api(), 1);
   2167 
   2168   // Allocate space for v8::Arguments implicit values. Must be initialized
   2169   // before calling any runtime function.
   2170   __ sub(esp, Immediate(kFastApiCallArguments * kPointerSize));
   2171 
   2172   // Check that the maps haven't changed and find a Holder as a side effect.
   2173   CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
   2174                   name, depth, &miss);
   2175 
   2176   // Move the return address on top of the stack.
   2177   __ mov(eax, Operand(esp, 3 * kPointerSize));
   2178   __ mov(Operand(esp, 0 * kPointerSize), eax);
   2179 
   2180   // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
   2181   // duplicate of return address and will be overwritten.
   2182   GenerateFastApiCall(masm(), optimization, argc);
   2183 
   2184   __ bind(&miss);
   2185   __ add(esp, Immediate(kFastApiCallArguments * kPointerSize));
   2186 
   2187   __ bind(&miss_before_stack_reserved);
   2188   GenerateMissBranch();
   2189 
   2190   // Return the generated code.
   2191   return GetCode(function);
   2192 }
   2193 
   2194 
   2195 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
   2196                                                    Handle<JSObject> holder,
   2197                                                    Handle<JSFunction> function,
   2198                                                    Handle<String> name,
   2199                                                    CheckType check) {
   2200   // ----------- S t a t e -------------
   2201   //  -- ecx                 : name
   2202   //  -- esp[0]              : return address
   2203   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   2204   //  -- ...
   2205   //  -- esp[(argc + 1) * 4] : receiver
   2206   // -----------------------------------
   2207 
   2208   if (HasCustomCallGenerator(function)) {
   2209     Handle<Code> code = CompileCustomCall(object, holder,
   2210                                           Handle<JSGlobalPropertyCell>::null(),
   2211                                           function, name);
   2212     // A null handle means bail out to the regular compiler code below.
   2213     if (!code.is_null()) return code;
   2214   }
   2215 
   2216   Label miss;
   2217   GenerateNameCheck(name, &miss);
   2218 
   2219   // Get the receiver from the stack.
   2220   const int argc = arguments().immediate();
   2221   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2222 
   2223   // Check that the receiver isn't a smi.
   2224   if (check != NUMBER_CHECK) {
   2225     __ JumpIfSmi(edx, &miss);
   2226   }
   2227 
   2228   // Make sure that it's okay not to patch the on stack receiver
   2229   // unless we're doing a receiver map check.
   2230   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
   2231   switch (check) {
   2232     case RECEIVER_MAP_CHECK:
   2233       __ IncrementCounter(isolate()->counters()->call_const(), 1);
   2234 
   2235       // Check that the maps haven't changed.
   2236       CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax,
   2237                       edi, name, &miss);
   2238 
   2239       // Patch the receiver on the stack with the global proxy if
   2240       // necessary.
   2241       if (object->IsGlobalObject()) {
   2242         __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
   2243         __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
   2244       }
   2245       break;
   2246 
   2247     case STRING_CHECK:
   2248       if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
   2249         // Check that the object is a string or a symbol.
   2250         __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax);
   2251         __ j(above_equal, &miss);
   2252         // Check that the maps starting from the prototype haven't changed.
   2253         GenerateDirectLoadGlobalFunctionPrototype(
   2254             masm(), Context::STRING_FUNCTION_INDEX, eax, &miss);
   2255         CheckPrototypes(
   2256             Handle<JSObject>(JSObject::cast(object->GetPrototype())),
   2257             eax, holder, ebx, edx, edi, name, &miss);
   2258       } else {
   2259         // Calling non-strict non-builtins with a value as the receiver
   2260         // requires boxing.
   2261         __ jmp(&miss);
   2262       }
   2263       break;
   2264 
   2265     case NUMBER_CHECK:
   2266       if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
   2267         Label fast;
   2268         // Check that the object is a smi or a heap number.
   2269         __ JumpIfSmi(edx, &fast);
   2270         __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
   2271         __ j(not_equal, &miss);
   2272         __ bind(&fast);
   2273         // Check that the maps starting from the prototype haven't changed.
   2274         GenerateDirectLoadGlobalFunctionPrototype(
   2275             masm(), Context::NUMBER_FUNCTION_INDEX, eax, &miss);
   2276         CheckPrototypes(
   2277             Handle<JSObject>(JSObject::cast(object->GetPrototype())),
   2278             eax, holder, ebx, edx, edi, name, &miss);
   2279       } else {
   2280         // Calling non-strict non-builtins with a value as the receiver
   2281         // requires boxing.
   2282         __ jmp(&miss);
   2283       }
   2284       break;
   2285 
   2286     case BOOLEAN_CHECK:
   2287       if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
   2288         Label fast;
   2289         // Check that the object is a boolean.
   2290         __ cmp(edx, factory()->true_value());
   2291         __ j(equal, &fast);
   2292         __ cmp(edx, factory()->false_value());
   2293         __ j(not_equal, &miss);
   2294         __ bind(&fast);
   2295         // Check that the maps starting from the prototype haven't changed.
   2296         GenerateDirectLoadGlobalFunctionPrototype(
   2297             masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, &miss);
   2298         CheckPrototypes(
   2299             Handle<JSObject>(JSObject::cast(object->GetPrototype())),
   2300             eax, holder, ebx, edx, edi, name, &miss);
   2301       } else {
   2302         // Calling non-strict non-builtins with a value as the receiver
   2303         // requires boxing.
   2304         __ jmp(&miss);
   2305       }
   2306       break;
   2307   }
   2308 
   2309   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2310       ? CALL_AS_FUNCTION
   2311       : CALL_AS_METHOD;
   2312   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
   2313                     NullCallWrapper(), call_kind);
   2314 
   2315   // Handle call cache miss.
   2316   __ bind(&miss);
   2317   GenerateMissBranch();
   2318 
   2319   // Return the generated code.
   2320   return GetCode(function);
   2321 }
   2322 
   2323 
   2324 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
   2325                                                       Handle<JSObject> holder,
   2326                                                       Handle<String> name) {
   2327   // ----------- S t a t e -------------
   2328   //  -- ecx                 : name
   2329   //  -- esp[0]              : return address
   2330   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   2331   //  -- ...
   2332   //  -- esp[(argc + 1) * 4] : receiver
   2333   // -----------------------------------
   2334   Label miss;
   2335 
   2336   GenerateNameCheck(name, &miss);
   2337 
   2338   // Get the number of arguments.
   2339   const int argc = arguments().immediate();
   2340 
   2341   LookupResult lookup(isolate());
   2342   LookupPostInterceptor(holder, name, &lookup);
   2343 
   2344   // Get the receiver from the stack.
   2345   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2346 
   2347   CallInterceptorCompiler compiler(this, arguments(), ecx, extra_state_);
   2348   compiler.Compile(masm(), object, holder, name, &lookup, edx, ebx, edi, eax,
   2349                    &miss);
   2350 
   2351   // Restore receiver.
   2352   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2353 
   2354   // Check that the function really is a function.
   2355   __ JumpIfSmi(eax, &miss);
   2356   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   2357   __ j(not_equal, &miss);
   2358 
   2359   // Patch the receiver on the stack with the global proxy if
   2360   // necessary.
   2361   if (object->IsGlobalObject()) {
   2362     __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
   2363     __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
   2364   }
   2365 
   2366   // Invoke the function.
   2367   __ mov(edi, eax);
   2368   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2369       ? CALL_AS_FUNCTION
   2370       : CALL_AS_METHOD;
   2371   __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
   2372                     NullCallWrapper(), call_kind);
   2373 
   2374   // Handle load cache miss.
   2375   __ bind(&miss);
   2376   GenerateMissBranch();
   2377 
   2378   // Return the generated code.
   2379   return GetCode(INTERCEPTOR, name);
   2380 }
   2381 
   2382 
   2383 Handle<Code> CallStubCompiler::CompileCallGlobal(
   2384     Handle<JSObject> object,
   2385     Handle<GlobalObject> holder,
   2386     Handle<JSGlobalPropertyCell> cell,
   2387     Handle<JSFunction> function,
   2388     Handle<String> name) {
   2389   // ----------- S t a t e -------------
   2390   //  -- ecx                 : name
   2391   //  -- esp[0]              : return address
   2392   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   2393   //  -- ...
   2394   //  -- esp[(argc + 1) * 4] : receiver
   2395   // -----------------------------------
   2396 
   2397   if (HasCustomCallGenerator(function)) {
   2398     Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
   2399     // A null handle means bail out to the regular compiler code below.
   2400     if (!code.is_null()) return code;
   2401   }
   2402 
   2403   Label miss;
   2404   GenerateNameCheck(name, &miss);
   2405 
   2406   // Get the number of arguments.
   2407   const int argc = arguments().immediate();
   2408   GenerateGlobalReceiverCheck(object, holder, name, &miss);
   2409   GenerateLoadFunctionFromCell(cell, function, &miss);
   2410 
   2411   // Patch the receiver on the stack with the global proxy.
   2412   if (object->IsGlobalObject()) {
   2413     __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
   2414     __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
   2415   }
   2416 
   2417   // Set up the context (function already in edi).
   2418   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2419 
   2420   // Jump to the cached code (tail call).
   2421   Counters* counters = isolate()->counters();
   2422   __ IncrementCounter(counters->call_global_inline(), 1);
   2423   ParameterCount expected(function->shared()->formal_parameter_count());
   2424   CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
   2425       ? CALL_AS_FUNCTION
   2426       : CALL_AS_METHOD;
   2427   // We call indirectly through the code field in the function to
   2428   // allow recompilation to take effect without changing any of the
   2429   // call sites.
   2430   __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2431                 expected, arguments(), JUMP_FUNCTION,
   2432                 NullCallWrapper(), call_kind);
   2433 
   2434   // Handle call cache miss.
   2435   __ bind(&miss);
   2436   __ IncrementCounter(counters->call_global_inline_miss(), 1);
   2437   GenerateMissBranch();
   2438 
   2439   // Return the generated code.
   2440   return GetCode(NORMAL, name);
   2441 }
   2442 
   2443 
   2444 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
   2445                                                   int index,
   2446                                                   Handle<Map> transition,
   2447                                                   Handle<String> name) {
   2448   // ----------- S t a t e -------------
   2449   //  -- eax    : value
   2450   //  -- ecx    : name
   2451   //  -- edx    : receiver
   2452   //  -- esp[0] : return address
   2453   // -----------------------------------
   2454   Label miss;
   2455 
   2456   // Generate store field code.  Trashes the name register.
   2457   GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
   2458 
   2459   // Handle store cache miss.
   2460   __ bind(&miss);
   2461   __ mov(ecx, Immediate(name));  // restore name
   2462   Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
   2463   __ jmp(ic, RelocInfo::CODE_TARGET);
   2464 
   2465   // Return the generated code.
   2466   return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
   2467 }
   2468 
   2469 
   2470 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2471     Handle<JSObject> object,
   2472     Handle<AccessorInfo> callback,
   2473     Handle<String> name) {
   2474   // ----------- S t a t e -------------
   2475   //  -- eax    : value
   2476   //  -- ecx    : name
   2477   //  -- edx    : receiver
   2478   //  -- esp[0] : return address
   2479   // -----------------------------------
   2480   Label miss;
   2481 
   2482   // Check that the map of the object hasn't changed.
   2483   __ CheckMap(edx, Handle<Map>(object->map()),
   2484               &miss, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
   2485 
   2486   // Perform global security token check if needed.
   2487   if (object->IsJSGlobalProxy()) {
   2488     __ CheckAccessGlobalProxy(edx, ebx, &miss);
   2489   }
   2490 
   2491   // Stub never generated for non-global objects that require access
   2492   // checks.
   2493   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
   2494 
   2495   __ pop(ebx);  // remove the return address
   2496   __ push(edx);  // receiver
   2497   __ push(Immediate(callback));  // callback info
   2498   __ push(ecx);  // name
   2499   __ push(eax);  // value
   2500   __ push(ebx);  // restore return address
   2501 
   2502   // Do tail-call to the runtime system.
   2503   ExternalReference store_callback_property =
   2504       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
   2505   __ TailCallExternalReference(store_callback_property, 4, 1);
   2506 
   2507   // Handle store cache miss.
   2508   __ bind(&miss);
   2509   Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
   2510   __ jmp(ic, RelocInfo::CODE_TARGET);
   2511 
   2512   // Return the generated code.
   2513   return GetCode(CALLBACKS, name);
   2514 }
   2515 
   2516 
   2517 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
   2518     Handle<JSObject> receiver,
   2519     Handle<String> name) {
   2520   // ----------- S t a t e -------------
   2521   //  -- eax    : value
   2522   //  -- ecx    : name
   2523   //  -- edx    : receiver
   2524   //  -- esp[0] : return address
   2525   // -----------------------------------
   2526   Label miss;
   2527 
   2528   // Check that the map of the object hasn't changed.
   2529   __ CheckMap(edx, Handle<Map>(receiver->map()),
   2530               &miss, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
   2531 
   2532   // Perform global security token check if needed.
   2533   if (receiver->IsJSGlobalProxy()) {
   2534     __ CheckAccessGlobalProxy(edx, ebx, &miss);
   2535   }
   2536 
   2537   // Stub never generated for non-global objects that require access
   2538   // checks.
   2539   ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
   2540 
   2541   __ pop(ebx);  // remove the return address
   2542   __ push(edx);  // receiver
   2543   __ push(ecx);  // name
   2544   __ push(eax);  // value
   2545   __ push(Immediate(Smi::FromInt(strict_mode_)));
   2546   __ push(ebx);  // restore return address
   2547 
   2548   // Do tail-call to the runtime system.
   2549   ExternalReference store_ic_property =
   2550       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
   2551   __ TailCallExternalReference(store_ic_property, 4, 1);
   2552 
   2553   // Handle store cache miss.
   2554   __ bind(&miss);
   2555   Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
   2556   __ jmp(ic, RelocInfo::CODE_TARGET);
   2557 
   2558   // Return the generated code.
   2559   return GetCode(INTERCEPTOR, name);
   2560 }
   2561 
   2562 
   2563 Handle<Code> StoreStubCompiler::CompileStoreGlobal(
   2564     Handle<GlobalObject> object,
   2565     Handle<JSGlobalPropertyCell> cell,
   2566     Handle<String> name) {
   2567   // ----------- S t a t e -------------
   2568   //  -- eax    : value
   2569   //  -- ecx    : name
   2570   //  -- edx    : receiver
   2571   //  -- esp[0] : return address
   2572   // -----------------------------------
   2573   Label miss;
   2574 
   2575   // Check that the map of the global has not changed.
   2576   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
   2577          Immediate(Handle<Map>(object->map())));
   2578   __ j(not_equal, &miss);
   2579 
   2580   // Compute the cell operand to use.
   2581   __ mov(ebx, Immediate(cell));
   2582   Operand cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset);
   2583 
   2584   // Check that the value in the cell is not the hole. If it is, this
   2585   // cell could have been deleted and reintroducing the global needs
   2586   // to update the property details in the property dictionary of the
   2587   // global object. We bail out to the runtime system to do that.
   2588   __ cmp(cell_operand, factory()->the_hole_value());
   2589   __ j(equal, &miss);
   2590 
   2591   // Store the value in the cell.
   2592   __ mov(cell_operand, eax);
   2593   // No write barrier here, because cells are always rescanned.
   2594 
   2595   // Return the value (register eax).
   2596   Counters* counters = isolate()->counters();
   2597   __ IncrementCounter(counters->named_store_global_inline(), 1);
   2598   __ ret(0);
   2599 
   2600   // Handle store cache miss.
   2601   __ bind(&miss);
   2602   __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
   2603   Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
   2604   __ jmp(ic, RelocInfo::CODE_TARGET);
   2605 
   2606   // Return the generated code.
   2607   return GetCode(NORMAL, name);
   2608 }
   2609 
   2610 
   2611 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
   2612                                                        int index,
   2613                                                        Handle<Map> transition,
   2614                                                        Handle<String> name) {
   2615   // ----------- S t a t e -------------
   2616   //  -- eax    : value
   2617   //  -- ecx    : key
   2618   //  -- edx    : receiver
   2619   //  -- esp[0] : return address
   2620   // -----------------------------------
   2621   Label miss;
   2622 
   2623   Counters* counters = isolate()->counters();
   2624   __ IncrementCounter(counters->keyed_store_field(), 1);
   2625 
   2626   // Check that the name has not changed.
   2627   __ cmp(ecx, Immediate(name));
   2628   __ j(not_equal, &miss);
   2629 
   2630   // Generate store field code.  Trashes the name register.
   2631   GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
   2632 
   2633   // Handle store cache miss.
   2634   __ bind(&miss);
   2635   __ DecrementCounter(counters->keyed_store_field(), 1);
   2636   Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
   2637   __ jmp(ic, RelocInfo::CODE_TARGET);
   2638 
   2639   // Return the generated code.
   2640   return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
   2641 }
   2642 
   2643 
   2644 Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
   2645     Handle<Map> receiver_map) {
   2646   // ----------- S t a t e -------------
   2647   //  -- eax    : value
   2648   //  -- ecx    : key
   2649   //  -- edx    : receiver
   2650   //  -- esp[0] : return address
   2651   // -----------------------------------
   2652   ElementsKind elements_kind = receiver_map->elements_kind();
   2653   bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
   2654   Handle<Code> stub =
   2655       KeyedStoreElementStub(is_jsarray, elements_kind, grow_mode_).GetCode();
   2656 
   2657   __ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
   2658 
   2659   Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
   2660   __ jmp(ic, RelocInfo::CODE_TARGET);
   2661 
   2662   // Return the generated code.
   2663   return GetCode(NORMAL, factory()->empty_string());
   2664 }
   2665 
   2666 
   2667 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
   2668     MapHandleList* receiver_maps,
   2669     CodeHandleList* handler_stubs,
   2670     MapHandleList* transitioned_maps) {
   2671   // ----------- S t a t e -------------
   2672   //  -- eax    : value
   2673   //  -- ecx    : key
   2674   //  -- edx    : receiver
   2675   //  -- esp[0] : return address
   2676   // -----------------------------------
   2677   Label miss;
   2678   __ JumpIfSmi(edx, &miss, Label::kNear);
   2679   __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
   2680   // ebx: receiver->map().
   2681   for (int i = 0; i < receiver_maps->length(); ++i) {
   2682     __ cmp(edi, receiver_maps->at(i));
   2683     if (transitioned_maps->at(i).is_null()) {
   2684       __ j(equal, handler_stubs->at(i));
   2685     } else {
   2686       Label next_map;
   2687       __ j(not_equal, &next_map, Label::kNear);
   2688       __ mov(ebx, Immediate(transitioned_maps->at(i)));
   2689       __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
   2690       __ bind(&next_map);
   2691     }
   2692   }
   2693   __ bind(&miss);
   2694   Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
   2695   __ jmp(miss_ic, RelocInfo::CODE_TARGET);
   2696 
   2697   // Return the generated code.
   2698   return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
   2699 }
   2700 
   2701 
   2702 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
   2703                                                       Handle<JSObject> object,
   2704                                                       Handle<JSObject> last) {
   2705   // ----------- S t a t e -------------
   2706   //  -- eax    : receiver
   2707   //  -- ecx    : name
   2708   //  -- esp[0] : return address
   2709   // -----------------------------------
   2710   Label miss;
   2711 
   2712   // Check that the receiver isn't a smi.
   2713   __ JumpIfSmi(eax, &miss);
   2714 
   2715   ASSERT(last->IsGlobalObject() || last->HasFastProperties());
   2716 
   2717   // Check the maps of the full prototype chain. Also check that
   2718   // global property cells up to (but not including) the last object
   2719   // in the prototype chain are empty.
   2720   CheckPrototypes(object, eax, last, ebx, edx, edi, name, &miss);
   2721 
   2722   // If the last object in the prototype chain is a global object,
   2723   // check that the global property cell is empty.
   2724   if (last->IsGlobalObject()) {
   2725     GenerateCheckPropertyCell(
   2726         masm(), Handle<GlobalObject>::cast(last), name, edx, &miss);
   2727   }
   2728 
   2729   // Return undefined if maps of the full prototype chain are still the
   2730   // same and no global property with this name contains a value.
   2731   __ mov(eax, isolate()->factory()->undefined_value());
   2732   __ ret(0);
   2733 
   2734   __ bind(&miss);
   2735   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2736 
   2737   // Return the generated code.
   2738   return GetCode(NONEXISTENT, factory()->empty_string());
   2739 }
   2740 
   2741 
   2742 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
   2743                                                 Handle<JSObject> holder,
   2744                                                 int index,
   2745                                                 Handle<String> name) {
   2746   // ----------- S t a t e -------------
   2747   //  -- eax    : receiver
   2748   //  -- ecx    : name
   2749   //  -- esp[0] : return address
   2750   // -----------------------------------
   2751   Label miss;
   2752 
   2753   GenerateLoadField(object, holder, eax, ebx, edx, edi, index, name, &miss);
   2754   __ bind(&miss);
   2755   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2756 
   2757   // Return the generated code.
   2758   return GetCode(FIELD, name);
   2759 }
   2760 
   2761 
   2762 Handle<Code> LoadStubCompiler::CompileLoadCallback(
   2763     Handle<String> name,
   2764     Handle<JSObject> object,
   2765     Handle<JSObject> holder,
   2766     Handle<AccessorInfo> callback) {
   2767   // ----------- S t a t e -------------
   2768   //  -- eax    : receiver
   2769   //  -- ecx    : name
   2770   //  -- esp[0] : return address
   2771   // -----------------------------------
   2772   Label miss;
   2773 
   2774   GenerateLoadCallback(object, holder, eax, ecx, ebx, edx, edi, callback,
   2775                        name, &miss);
   2776   __ bind(&miss);
   2777   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2778 
   2779   // Return the generated code.
   2780   return GetCode(CALLBACKS, name);
   2781 }
   2782 
   2783 
   2784 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
   2785                                                    Handle<JSObject> holder,
   2786                                                    Handle<JSFunction> value,
   2787                                                    Handle<String> name) {
   2788   // ----------- S t a t e -------------
   2789   //  -- eax    : receiver
   2790   //  -- ecx    : name
   2791   //  -- esp[0] : return address
   2792   // -----------------------------------
   2793   Label miss;
   2794 
   2795   GenerateLoadConstant(object, holder, eax, ebx, edx, edi, value, name, &miss);
   2796   __ bind(&miss);
   2797   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2798 
   2799   // Return the generated code.
   2800   return GetCode(CONSTANT_FUNCTION, name);
   2801 }
   2802 
   2803 
   2804 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
   2805                                                       Handle<JSObject> holder,
   2806                                                       Handle<String> name) {
   2807   // ----------- S t a t e -------------
   2808   //  -- eax    : receiver
   2809   //  -- ecx    : name
   2810   //  -- esp[0] : return address
   2811   // -----------------------------------
   2812   Label miss;
   2813 
   2814   LookupResult lookup(isolate());
   2815   LookupPostInterceptor(holder, name, &lookup);
   2816 
   2817   // TODO(368): Compile in the whole chain: all the interceptors in
   2818   // prototypes and ultimate answer.
   2819   GenerateLoadInterceptor(receiver, holder, &lookup, eax, ecx, edx, ebx, edi,
   2820                           name, &miss);
   2821 
   2822   __ bind(&miss);
   2823   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2824 
   2825   // Return the generated code.
   2826   return GetCode(INTERCEPTOR, name);
   2827 }
   2828 
   2829 
   2830 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
   2831     Handle<JSObject> object,
   2832     Handle<GlobalObject> holder,
   2833     Handle<JSGlobalPropertyCell> cell,
   2834     Handle<String> name,
   2835     bool is_dont_delete) {
   2836   // ----------- S t a t e -------------
   2837   //  -- eax    : receiver
   2838   //  -- ecx    : name
   2839   //  -- esp[0] : return address
   2840   // -----------------------------------
   2841   Label miss;
   2842 
   2843   // Check that the maps haven't changed.
   2844   __ JumpIfSmi(eax, &miss);
   2845   CheckPrototypes(object, eax, holder, ebx, edx, edi, name, &miss);
   2846 
   2847   // Get the value from the cell.
   2848   if (Serializer::enabled()) {
   2849     __ mov(ebx, Immediate(cell));
   2850     __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
   2851   } else {
   2852     __ mov(ebx, Operand::Cell(cell));
   2853   }
   2854 
   2855   // Check for deleted property if property can actually be deleted.
   2856   if (!is_dont_delete) {
   2857     __ cmp(ebx, factory()->the_hole_value());
   2858     __ j(equal, &miss);
   2859   } else if (FLAG_debug_code) {
   2860     __ cmp(ebx, factory()->the_hole_value());
   2861     __ Check(not_equal, "DontDelete cells can't contain the hole");
   2862   }
   2863 
   2864   Counters* counters = isolate()->counters();
   2865   __ IncrementCounter(counters->named_load_global_stub(), 1);
   2866   __ mov(eax, ebx);
   2867   __ ret(0);
   2868 
   2869   __ bind(&miss);
   2870   __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
   2871   GenerateLoadMiss(masm(), Code::LOAD_IC);
   2872 
   2873   // Return the generated code.
   2874   return GetCode(NORMAL, name);
   2875 }
   2876 
   2877 
   2878 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
   2879                                                      Handle<JSObject> receiver,
   2880                                                      Handle<JSObject> holder,
   2881                                                      int index) {
   2882   // ----------- S t a t e -------------
   2883   //  -- eax    : key
   2884   //  -- edx    : receiver
   2885   //  -- esp[0] : return address
   2886   // -----------------------------------
   2887   Label miss;
   2888 
   2889   Counters* counters = isolate()->counters();
   2890   __ IncrementCounter(counters->keyed_load_field(), 1);
   2891 
   2892   // Check that the name has not changed.
   2893   __ cmp(eax, Immediate(name));
   2894   __ j(not_equal, &miss);
   2895 
   2896   GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
   2897 
   2898   __ bind(&miss);
   2899   __ DecrementCounter(counters->keyed_load_field(), 1);
   2900   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   2901 
   2902   // Return the generated code.
   2903   return GetCode(FIELD, name);
   2904 }
   2905 
   2906 
   2907 Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
   2908     Handle<String> name,
   2909     Handle<JSObject> receiver,
   2910     Handle<JSObject> holder,
   2911     Handle<AccessorInfo> callback) {
   2912   // ----------- S t a t e -------------
   2913   //  -- eax    : key
   2914   //  -- edx    : receiver
   2915   //  -- esp[0] : return address
   2916   // -----------------------------------
   2917   Label miss;
   2918 
   2919   Counters* counters = isolate()->counters();
   2920   __ IncrementCounter(counters->keyed_load_callback(), 1);
   2921 
   2922   // Check that the name has not changed.
   2923   __ cmp(eax, Immediate(name));
   2924   __ j(not_equal, &miss);
   2925 
   2926   GenerateLoadCallback(receiver, holder, edx, eax, ebx, ecx, edi, callback,
   2927                        name, &miss);
   2928 
   2929   __ bind(&miss);
   2930   __ DecrementCounter(counters->keyed_load_callback(), 1);
   2931   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   2932 
   2933   // Return the generated code.
   2934   return GetCode(CALLBACKS, name);
   2935 }
   2936 
   2937 
   2938 Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
   2939     Handle<String> name,
   2940     Handle<JSObject> receiver,
   2941     Handle<JSObject> holder,
   2942     Handle<JSFunction> value) {
   2943   // ----------- S t a t e -------------
   2944   //  -- eax    : key
   2945   //  -- edx    : receiver
   2946   //  -- esp[0] : return address
   2947   // -----------------------------------
   2948   Label miss;
   2949 
   2950   Counters* counters = isolate()->counters();
   2951   __ IncrementCounter(counters->keyed_load_constant_function(), 1);
   2952 
   2953   // Check that the name has not changed.
   2954   __ cmp(eax, Immediate(name));
   2955   __ j(not_equal, &miss);
   2956 
   2957   GenerateLoadConstant(
   2958       receiver, holder, edx, ebx, ecx, edi, value, name, &miss);
   2959   __ bind(&miss);
   2960   __ DecrementCounter(counters->keyed_load_constant_function(), 1);
   2961   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   2962 
   2963   // Return the generated code.
   2964   return GetCode(CONSTANT_FUNCTION, name);
   2965 }
   2966 
   2967 
   2968 Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
   2969     Handle<JSObject> receiver,
   2970     Handle<JSObject> holder,
   2971     Handle<String> name) {
   2972   // ----------- S t a t e -------------
   2973   //  -- eax    : key
   2974   //  -- edx    : receiver
   2975   //  -- esp[0] : return address
   2976   // -----------------------------------
   2977   Label miss;
   2978 
   2979   Counters* counters = isolate()->counters();
   2980   __ IncrementCounter(counters->keyed_load_interceptor(), 1);
   2981 
   2982   // Check that the name has not changed.
   2983   __ cmp(eax, Immediate(name));
   2984   __ j(not_equal, &miss);
   2985 
   2986   LookupResult lookup(isolate());
   2987   LookupPostInterceptor(holder, name, &lookup);
   2988   GenerateLoadInterceptor(receiver, holder, &lookup, edx, eax, ecx, ebx, edi,
   2989                           name, &miss);
   2990   __ bind(&miss);
   2991   __ DecrementCounter(counters->keyed_load_interceptor(), 1);
   2992   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   2993 
   2994   // Return the generated code.
   2995   return GetCode(INTERCEPTOR, name);
   2996 }
   2997 
   2998 
   2999 Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
   3000     Handle<String> name) {
   3001   // ----------- S t a t e -------------
   3002   //  -- eax    : key
   3003   //  -- edx    : receiver
   3004   //  -- esp[0] : return address
   3005   // -----------------------------------
   3006   Label miss;
   3007 
   3008   Counters* counters = isolate()->counters();
   3009   __ IncrementCounter(counters->keyed_load_array_length(), 1);
   3010 
   3011   // Check that the name has not changed.
   3012   __ cmp(eax, Immediate(name));
   3013   __ j(not_equal, &miss);
   3014 
   3015   GenerateLoadArrayLength(masm(), edx, ecx, &miss);
   3016   __ bind(&miss);
   3017   __ DecrementCounter(counters->keyed_load_array_length(), 1);
   3018   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3019 
   3020   // Return the generated code.
   3021   return GetCode(CALLBACKS, name);
   3022 }
   3023 
   3024 
   3025 Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
   3026     Handle<String> name) {
   3027   // ----------- S t a t e -------------
   3028   //  -- eax    : key
   3029   //  -- edx    : receiver
   3030   //  -- esp[0] : return address
   3031   // -----------------------------------
   3032   Label miss;
   3033 
   3034   Counters* counters = isolate()->counters();
   3035   __ IncrementCounter(counters->keyed_load_string_length(), 1);
   3036 
   3037   // Check that the name has not changed.
   3038   __ cmp(eax, Immediate(name));
   3039   __ j(not_equal, &miss);
   3040 
   3041   GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
   3042   __ bind(&miss);
   3043   __ DecrementCounter(counters->keyed_load_string_length(), 1);
   3044   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3045 
   3046   // Return the generated code.
   3047   return GetCode(CALLBACKS, name);
   3048 }
   3049 
   3050 
   3051 Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
   3052     Handle<String> name) {
   3053   // ----------- S t a t e -------------
   3054   //  -- eax    : key
   3055   //  -- edx    : receiver
   3056   //  -- esp[0] : return address
   3057   // -----------------------------------
   3058   Label miss;
   3059 
   3060   Counters* counters = isolate()->counters();
   3061   __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
   3062 
   3063   // Check that the name has not changed.
   3064   __ cmp(eax, Immediate(name));
   3065   __ j(not_equal, &miss);
   3066 
   3067   GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
   3068   __ bind(&miss);
   3069   __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
   3070   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3071 
   3072   // Return the generated code.
   3073   return GetCode(CALLBACKS, name);
   3074 }
   3075 
   3076 
   3077 Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
   3078     Handle<Map> receiver_map) {
   3079   // ----------- S t a t e -------------
   3080   //  -- eax    : key
   3081   //  -- edx    : receiver
   3082   //  -- esp[0] : return address
   3083   // -----------------------------------
   3084 
   3085   ElementsKind elements_kind = receiver_map->elements_kind();
   3086   Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
   3087 
   3088   __ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
   3089 
   3090   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3091 
   3092   // Return the generated code.
   3093   return GetCode(NORMAL, factory()->empty_string());
   3094 }
   3095 
   3096 
   3097 Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
   3098     MapHandleList* receiver_maps,
   3099     CodeHandleList* handler_ics) {
   3100   // ----------- S t a t e -------------
   3101   //  -- eax    : key
   3102   //  -- edx    : receiver
   3103   //  -- esp[0] : return address
   3104   // -----------------------------------
   3105   Label miss;
   3106   __ JumpIfSmi(edx, &miss);
   3107 
   3108   Register map_reg = ebx;
   3109   __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset));
   3110   int receiver_count = receiver_maps->length();
   3111   for (int current = 0; current < receiver_count; ++current) {
   3112     __ cmp(map_reg, receiver_maps->at(current));
   3113     __ j(equal, handler_ics->at(current));
   3114   }
   3115 
   3116   __ bind(&miss);
   3117   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
   3118 
   3119   // Return the generated code.
   3120   return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
   3121 }
   3122 
   3123 
   3124 // Specialized stub for constructing objects from functions which only have only
   3125 // simple assignments of the form this.x = ...; in their body.
   3126 Handle<Code> ConstructStubCompiler::CompileConstructStub(
   3127     Handle<JSFunction> function) {
   3128   // ----------- S t a t e -------------
   3129   //  -- eax : argc
   3130   //  -- edi : constructor
   3131   //  -- esp[0] : return address
   3132   //  -- esp[4] : last argument
   3133   // -----------------------------------
   3134   Label generic_stub_call;
   3135 #ifdef ENABLE_DEBUGGER_SUPPORT
   3136   // Check to see whether there are any break points in the function code. If
   3137   // there are jump to the generic constructor stub which calls the actual
   3138   // code for the function thereby hitting the break points.
   3139   __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   3140   __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset));
   3141   __ cmp(ebx, factory()->undefined_value());
   3142   __ j(not_equal, &generic_stub_call);
   3143 #endif
   3144 
   3145   // Load the initial map and verify that it is in fact a map.
   3146   __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   3147   // Will both indicate a NULL and a Smi.
   3148   __ JumpIfSmi(ebx, &generic_stub_call);
   3149   __ CmpObjectType(ebx, MAP_TYPE, ecx);
   3150   __ j(not_equal, &generic_stub_call);
   3151 
   3152 #ifdef DEBUG
   3153   // Cannot construct functions this way.
   3154   // edi: constructor
   3155   // ebx: initial map
   3156   __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
   3157   __ Assert(not_equal, "Function constructed by construct stub.");
   3158 #endif
   3159 
   3160   // Now allocate the JSObject on the heap by moving the new space allocation
   3161   // top forward.
   3162   // edi: constructor
   3163   // ebx: initial map
   3164   __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
   3165   __ shl(ecx, kPointerSizeLog2);
   3166   __ AllocateInNewSpace(ecx, edx, ecx, no_reg,
   3167                         &generic_stub_call, NO_ALLOCATION_FLAGS);
   3168 
   3169   // Allocated the JSObject, now initialize the fields and add the heap tag.
   3170   // ebx: initial map
   3171   // edx: JSObject (untagged)
   3172   __ mov(Operand(edx, JSObject::kMapOffset), ebx);
   3173   __ mov(ebx, factory()->empty_fixed_array());
   3174   __ mov(Operand(edx, JSObject::kPropertiesOffset), ebx);
   3175   __ mov(Operand(edx, JSObject::kElementsOffset), ebx);
   3176 
   3177   // Push the allocated object to the stack. This is the object that will be
   3178   // returned (after it is tagged).
   3179   __ push(edx);
   3180 
   3181   // eax: argc
   3182   // edx: JSObject (untagged)
   3183   // Load the address of the first in-object property into edx.
   3184   __ lea(edx, Operand(edx, JSObject::kHeaderSize));
   3185   // Calculate the location of the first argument. The stack contains the
   3186   // allocated object and the return address on top of the argc arguments.
   3187   __ lea(ecx, Operand(esp, eax, times_4, 1 * kPointerSize));
   3188 
   3189   // Use edi for holding undefined which is used in several places below.
   3190   __ mov(edi, factory()->undefined_value());
   3191 
   3192   // eax: argc
   3193   // ecx: first argument
   3194   // edx: first in-object property of the JSObject
   3195   // edi: undefined
   3196   // Fill the initialized properties with a constant value or a passed argument
   3197   // depending on the this.x = ...; assignment in the function.
   3198   Handle<SharedFunctionInfo> shared(function->shared());
   3199   for (int i = 0; i < shared->this_property_assignments_count(); i++) {
   3200     if (shared->IsThisPropertyAssignmentArgument(i)) {
   3201       // Check if the argument assigned to the property is actually passed.
   3202       // If argument is not passed the property is set to undefined,
   3203       // otherwise find it on the stack.
   3204       int arg_number = shared->GetThisPropertyAssignmentArgument(i);
   3205       __ mov(ebx, edi);
   3206       __ cmp(eax, arg_number);
   3207       if (CpuFeatures::IsSupported(CMOV)) {
   3208         CpuFeatures::Scope use_cmov(CMOV);
   3209         __ cmov(above, ebx, Operand(ecx, arg_number * -kPointerSize));
   3210       } else {
   3211         Label not_passed;
   3212         __ j(below_equal, &not_passed);
   3213         __ mov(ebx, Operand(ecx, arg_number * -kPointerSize));
   3214         __ bind(&not_passed);
   3215       }
   3216       // Store value in the property.
   3217       __ mov(Operand(edx, i * kPointerSize), ebx);
   3218     } else {
   3219       // Set the property to the constant value.
   3220       Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
   3221       __ mov(Operand(edx, i * kPointerSize), Immediate(constant));
   3222     }
   3223   }
   3224 
   3225   // Fill the unused in-object property fields with undefined.
   3226   ASSERT(function->has_initial_map());
   3227   for (int i = shared->this_property_assignments_count();
   3228        i < function->initial_map()->inobject_properties();
   3229        i++) {
   3230     __ mov(Operand(edx, i * kPointerSize), edi);
   3231   }
   3232 
   3233   // Move argc to ebx and retrieve and tag the JSObject to return.
   3234   __ mov(ebx, eax);
   3235   __ pop(eax);
   3236   __ or_(eax, Immediate(kHeapObjectTag));
   3237 
   3238   // Remove caller arguments and receiver from the stack and return.
   3239   __ pop(ecx);
   3240   __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
   3241   __ push(ecx);
   3242   Counters* counters = isolate()->counters();
   3243   __ IncrementCounter(counters->constructed_objects(), 1);
   3244   __ IncrementCounter(counters->constructed_objects_stub(), 1);
   3245   __ ret(0);
   3246 
   3247   // Jump to the generic stub in case the specialized code cannot handle the
   3248   // construction.
   3249   __ bind(&generic_stub_call);
   3250   Handle<Code> code = isolate()->builtins()->JSConstructStubGeneric();
   3251   __ jmp(code, RelocInfo::CODE_TARGET);
   3252 
   3253   // Return the generated code.
   3254   return GetCode();
   3255 }
   3256 
   3257 
   3258 #undef __
   3259 #define __ ACCESS_MASM(masm)
   3260 
   3261 
   3262 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
   3263     MacroAssembler* masm) {
   3264   // ----------- S t a t e -------------
   3265   //  -- eax    : key
   3266   //  -- edx    : receiver
   3267   //  -- esp[0] : return address
   3268   // -----------------------------------
   3269   Label slow, miss_force_generic;
   3270 
   3271   // This stub is meant to be tail-jumped to, the receiver must already
   3272   // have been verified by the caller to not be a smi.
   3273   __ JumpIfNotSmi(eax, &miss_force_generic);
   3274   __ mov(ebx, eax);
   3275   __ SmiUntag(ebx);
   3276   __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
   3277 
   3278   // Push receiver on the stack to free up a register for the dictionary
   3279   // probing.
   3280   __ push(edx);
   3281   __ LoadFromNumberDictionary(&slow,
   3282                               ecx,
   3283                               eax,
   3284                               ebx,
   3285                               edx,
   3286                               edi,
   3287                               eax);
   3288   // Pop receiver before returning.
   3289   __ pop(edx);
   3290   __ ret(0);
   3291 
   3292   __ bind(&slow);
   3293   __ pop(edx);
   3294 
   3295   // ----------- S t a t e -------------
   3296   //  -- eax    : value
   3297   //  -- ecx    : key
   3298   //  -- edx    : receiver
   3299   //  -- esp[0] : return address
   3300   // -----------------------------------
   3301 
   3302   Handle<Code> slow_ic =
   3303       masm->isolate()->builtins()->KeyedLoadIC_Slow();
   3304   __ jmp(slow_ic, RelocInfo::CODE_TARGET);
   3305 
   3306   __ bind(&miss_force_generic);
   3307   // ----------- S t a t e -------------
   3308   //  -- eax    : value
   3309   //  -- ecx    : key
   3310   //  -- edx    : receiver
   3311   //  -- esp[0] : return address
   3312   // -----------------------------------
   3313 
   3314   Handle<Code> miss_force_generic_ic =
   3315       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
   3316   __ jmp(miss_force_generic_ic, RelocInfo::CODE_TARGET);
   3317 }
   3318 
   3319 
   3320 void KeyedLoadStubCompiler::GenerateLoadExternalArray(
   3321     MacroAssembler* masm,
   3322     ElementsKind elements_kind) {
   3323   // ----------- S t a t e -------------
   3324   //  -- eax    : key
   3325   //  -- edx    : receiver
   3326   //  -- esp[0] : return address
   3327   // -----------------------------------
   3328   Label miss_force_generic, failed_allocation, slow;
   3329 
   3330   // This stub is meant to be tail-jumped to, the receiver must already
   3331   // have been verified by the caller to not be a smi.
   3332 
   3333   // Check that the key is a smi.
   3334   __ JumpIfNotSmi(eax, &miss_force_generic);
   3335 
   3336   // Check that the index is in range.
   3337   __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
   3338   __ cmp(eax, FieldOperand(ebx, ExternalArray::kLengthOffset));
   3339   // Unsigned comparison catches both negative and too-large values.
   3340   __ j(above_equal, &miss_force_generic);
   3341   __ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset));
   3342   // ebx: base pointer of external storage
   3343   switch (elements_kind) {
   3344     case EXTERNAL_BYTE_ELEMENTS:
   3345       __ SmiUntag(eax);  // Untag the index.
   3346       __ movsx_b(eax, Operand(ebx, eax, times_1, 0));
   3347       break;
   3348     case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3349     case EXTERNAL_PIXEL_ELEMENTS:
   3350       __ SmiUntag(eax);  // Untag the index.
   3351       __ movzx_b(eax, Operand(ebx, eax, times_1, 0));
   3352       break;
   3353     case EXTERNAL_SHORT_ELEMENTS:
   3354       __ movsx_w(eax, Operand(ebx, eax, times_1, 0));
   3355       break;
   3356     case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3357       __ movzx_w(eax, Operand(ebx, eax, times_1, 0));
   3358       break;
   3359     case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3360     case EXTERNAL_INT_ELEMENTS:
   3361       __ mov(ecx, Operand(ebx, eax, times_2, 0));
   3362       break;
   3363     case EXTERNAL_FLOAT_ELEMENTS:
   3364       __ fld_s(Operand(ebx, eax, times_2, 0));
   3365       break;
   3366     case EXTERNAL_DOUBLE_ELEMENTS:
   3367       __ fld_d(Operand(ebx, eax, times_4, 0));
   3368       break;
   3369     default:
   3370       UNREACHABLE();
   3371       break;
   3372   }
   3373 
   3374   // For integer array types:
   3375   // ecx: value
   3376   // For floating-point array type:
   3377   // FP(0): value
   3378 
   3379   if (elements_kind == EXTERNAL_INT_ELEMENTS ||
   3380       elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
   3381     // For the Int and UnsignedInt array types, we need to see whether
   3382     // the value can be represented in a Smi. If not, we need to convert
   3383     // it to a HeapNumber.
   3384     Label box_int;
   3385     if (elements_kind == EXTERNAL_INT_ELEMENTS) {
   3386       __ cmp(ecx, 0xC0000000);
   3387       __ j(sign, &box_int);
   3388     } else {
   3389       ASSERT_EQ(EXTERNAL_UNSIGNED_INT_ELEMENTS, elements_kind);
   3390       // The test is different for unsigned int values. Since we need
   3391       // the value to be in the range of a positive smi, we can't
   3392       // handle either of the top two bits being set in the value.
   3393       __ test(ecx, Immediate(0xC0000000));
   3394       __ j(not_zero, &box_int);
   3395     }
   3396 
   3397     __ mov(eax, ecx);
   3398     __ SmiTag(eax);
   3399     __ ret(0);
   3400 
   3401     __ bind(&box_int);
   3402 
   3403     // Allocate a HeapNumber for the int and perform int-to-double
   3404     // conversion.
   3405     if (elements_kind == EXTERNAL_INT_ELEMENTS) {
   3406       __ push(ecx);
   3407       __ fild_s(Operand(esp, 0));
   3408       __ pop(ecx);
   3409     } else {
   3410       ASSERT_EQ(EXTERNAL_UNSIGNED_INT_ELEMENTS, elements_kind);
   3411       // Need to zero-extend the value.
   3412       // There's no fild variant for unsigned values, so zero-extend
   3413       // to a 64-bit int manually.
   3414       __ push(Immediate(0));
   3415       __ push(ecx);
   3416       __ fild_d(Operand(esp, 0));
   3417       __ pop(ecx);
   3418       __ pop(ecx);
   3419     }
   3420     // FP(0): value
   3421     __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
   3422     // Set the value.
   3423     __ mov(eax, ecx);
   3424     __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
   3425     __ ret(0);
   3426   } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
   3427              elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
   3428     // For the floating-point array type, we need to always allocate a
   3429     // HeapNumber.
   3430     __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
   3431     // Set the value.
   3432     __ mov(eax, ecx);
   3433     __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
   3434     __ ret(0);
   3435   } else {
   3436     __ SmiTag(eax);
   3437     __ ret(0);
   3438   }
   3439 
   3440   // If we fail allocation of the HeapNumber, we still have a value on
   3441   // top of the FPU stack. Remove it.
   3442   __ bind(&failed_allocation);
   3443   __ fstp(0);
   3444   // Fall through to slow case.
   3445 
   3446   // Slow case: Jump to runtime.
   3447   __ bind(&slow);
   3448   Counters* counters = masm->isolate()->counters();
   3449   __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
   3450 
   3451   // ----------- S t a t e -------------
   3452   //  -- eax    : key
   3453   //  -- edx    : receiver
   3454   //  -- esp[0] : return address
   3455   // -----------------------------------
   3456 
   3457   Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
   3458   __ jmp(ic, RelocInfo::CODE_TARGET);
   3459 
   3460   // ----------- S t a t e -------------
   3461   //  -- eax    : key
   3462   //  -- edx    : receiver
   3463   //  -- esp[0] : return address
   3464   // -----------------------------------
   3465 
   3466   // Miss case: Jump to runtime.
   3467   __ bind(&miss_force_generic);
   3468   Handle<Code> miss_ic =
   3469       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
   3470   __ jmp(miss_ic, RelocInfo::CODE_TARGET);
   3471 }
   3472 
   3473 
   3474 void KeyedStoreStubCompiler::GenerateStoreExternalArray(
   3475     MacroAssembler* masm,
   3476     ElementsKind elements_kind) {
   3477   // ----------- S t a t e -------------
   3478   //  -- eax    : key
   3479   //  -- edx    : receiver
   3480   //  -- esp[0] : return address
   3481   // -----------------------------------
   3482   Label miss_force_generic, slow, check_heap_number;
   3483 
   3484   // This stub is meant to be tail-jumped to, the receiver must already
   3485   // have been verified by the caller to not be a smi.
   3486 
   3487   // Check that the key is a smi.
   3488   __ JumpIfNotSmi(ecx, &miss_force_generic);
   3489 
   3490   // Check that the index is in range.
   3491   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   3492   __ cmp(ecx, FieldOperand(edi, ExternalArray::kLengthOffset));
   3493   // Unsigned comparison catches both negative and too-large values.
   3494   __ j(above_equal, &slow);
   3495 
   3496   // Handle both smis and HeapNumbers in the fast path. Go to the
   3497   // runtime for all other kinds of values.
   3498   // eax: value
   3499   // edx: receiver
   3500   // ecx: key
   3501   // edi: elements array
   3502   if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
   3503     __ JumpIfNotSmi(eax, &slow);
   3504   } else {
   3505     __ JumpIfNotSmi(eax, &check_heap_number);
   3506   }
   3507 
   3508   // smi case
   3509   __ mov(ebx, eax);  // Preserve the value in eax as the return value.
   3510   __ SmiUntag(ebx);
   3511   __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
   3512   // edi: base pointer of external storage
   3513   switch (elements_kind) {
   3514     case EXTERNAL_PIXEL_ELEMENTS:
   3515       __ ClampUint8(ebx);
   3516       __ SmiUntag(ecx);
   3517       __ mov_b(Operand(edi, ecx, times_1, 0), ebx);
   3518       break;
   3519     case EXTERNAL_BYTE_ELEMENTS:
   3520     case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3521       __ SmiUntag(ecx);
   3522       __ mov_b(Operand(edi, ecx, times_1, 0), ebx);
   3523       break;
   3524     case EXTERNAL_SHORT_ELEMENTS:
   3525     case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3526       __ mov_w(Operand(edi, ecx, times_1, 0), ebx);
   3527       break;
   3528     case EXTERNAL_INT_ELEMENTS:
   3529     case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3530       __ mov(Operand(edi, ecx, times_2, 0), ebx);
   3531       break;
   3532     case EXTERNAL_FLOAT_ELEMENTS:
   3533     case EXTERNAL_DOUBLE_ELEMENTS:
   3534       // Need to perform int-to-float conversion.
   3535       __ push(ebx);
   3536       __ fild_s(Operand(esp, 0));
   3537       __ pop(ebx);
   3538       if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
   3539         __ fstp_s(Operand(edi, ecx, times_2, 0));
   3540       } else {  // elements_kind == EXTERNAL_DOUBLE_ELEMENTS.
   3541         __ fstp_d(Operand(edi, ecx, times_4, 0));
   3542       }
   3543       break;
   3544     default:
   3545       UNREACHABLE();
   3546       break;
   3547   }
   3548   __ ret(0);  // Return the original value.
   3549 
   3550   // TODO(danno): handle heap number -> pixel array conversion
   3551   if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
   3552     __ bind(&check_heap_number);
   3553     // eax: value
   3554     // edx: receiver
   3555     // ecx: key
   3556     // edi: elements array
   3557     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   3558            Immediate(masm->isolate()->factory()->heap_number_map()));
   3559     __ j(not_equal, &slow);
   3560 
   3561     // The WebGL specification leaves the behavior of storing NaN and
   3562     // +/-Infinity into integer arrays basically undefined. For more
   3563     // reproducible behavior, convert these to zero.
   3564     __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
   3565     // edi: base pointer of external storage
   3566     if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
   3567       __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
   3568       __ fstp_s(Operand(edi, ecx, times_2, 0));
   3569       __ ret(0);
   3570     } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
   3571       __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
   3572       __ fstp_d(Operand(edi, ecx, times_4, 0));
   3573       __ ret(0);
   3574     } else {
   3575       // Perform float-to-int conversion with truncation (round-to-zero)
   3576       // behavior.
   3577 
   3578       // For the moment we make the slow call to the runtime on
   3579       // processors that don't support SSE2. The code in IntegerConvert
   3580       // (code-stubs-ia32.cc) is roughly what is needed here though the
   3581       // conversion failure case does not need to be handled.
   3582       if (CpuFeatures::IsSupported(SSE2)) {
   3583         if (elements_kind != EXTERNAL_INT_ELEMENTS &&
   3584             elements_kind != EXTERNAL_UNSIGNED_INT_ELEMENTS) {
   3585           ASSERT(CpuFeatures::IsSupported(SSE2));
   3586           CpuFeatures::Scope scope(SSE2);
   3587           __ cvttsd2si(ebx, FieldOperand(eax, HeapNumber::kValueOffset));
   3588           // ecx: untagged integer value
   3589           switch (elements_kind) {
   3590             case EXTERNAL_PIXEL_ELEMENTS:
   3591               __ ClampUint8(ebx);
   3592               // Fall through.
   3593             case EXTERNAL_BYTE_ELEMENTS:
   3594             case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3595               __ SmiUntag(ecx);
   3596               __ mov_b(Operand(edi, ecx, times_1, 0), ebx);
   3597               break;
   3598             case EXTERNAL_SHORT_ELEMENTS:
   3599             case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3600               __ mov_w(Operand(edi, ecx, times_1, 0), ebx);
   3601               break;
   3602             default:
   3603               UNREACHABLE();
   3604               break;
   3605           }
   3606         } else {
   3607           if (CpuFeatures::IsSupported(SSE3)) {
   3608             CpuFeatures::Scope scope(SSE3);
   3609             // fisttp stores values as signed integers. To represent the
   3610             // entire range of int and unsigned int arrays, store as a
   3611             // 64-bit int and discard the high 32 bits.
   3612             // If the value is NaN or +/-infinity, the result is 0x80000000,
   3613             // which is automatically zero when taken mod 2^n, n < 32.
   3614             __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
   3615             __ sub(esp, Immediate(2 * kPointerSize));
   3616             __ fisttp_d(Operand(esp, 0));
   3617             __ pop(ebx);
   3618             __ add(esp, Immediate(kPointerSize));
   3619           } else {
   3620             ASSERT(CpuFeatures::IsSupported(SSE2));
   3621             CpuFeatures::Scope scope(SSE2);
   3622             // We can easily implement the correct rounding behavior for the
   3623             // range [0, 2^31-1]. For the time being, to keep this code simple,
   3624             // make the slow runtime call for values outside this range.
   3625             // Note: we could do better for signed int arrays.
   3626             __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
   3627             // We will need the key if we have to make the slow runtime call.
   3628             __ push(ebx);
   3629             __ LoadPowerOf2(xmm1, ebx, 31);
   3630             __ pop(ebx);
   3631             __ ucomisd(xmm1, xmm0);
   3632             __ j(above_equal, &slow);
   3633             __ cvttsd2si(ebx, Operand(xmm0));
   3634           }
   3635           // ebx: untagged integer value
   3636           __ mov(Operand(edi, ecx, times_2, 0), ebx);
   3637         }
   3638         __ ret(0);  // Return original value.
   3639       }
   3640     }
   3641   }
   3642 
   3643   // Slow case: call runtime.
   3644   __ bind(&slow);
   3645   Counters* counters = masm->isolate()->counters();
   3646   __ IncrementCounter(counters->keyed_store_external_array_slow(), 1);
   3647 
   3648   // ----------- S t a t e -------------
   3649   //  -- eax    : value
   3650   //  -- ecx    : key
   3651   //  -- edx    : receiver
   3652   //  -- esp[0] : return address
   3653   // -----------------------------------
   3654 
   3655   Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
   3656   __ jmp(ic, RelocInfo::CODE_TARGET);
   3657 
   3658   // ----------- S t a t e -------------
   3659   //  -- eax    : value
   3660   //  -- ecx    : key
   3661   //  -- edx    : receiver
   3662   //  -- esp[0] : return address
   3663   // -----------------------------------
   3664 
   3665   __ bind(&miss_force_generic);
   3666   Handle<Code> miss_ic =
   3667       masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
   3668   __ jmp(miss_ic, RelocInfo::CODE_TARGET);
   3669 }
   3670 
   3671 
   3672 void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
   3673   // ----------- S t a t e -------------
   3674   //  -- eax    : key
   3675   //  -- edx    : receiver
   3676   //  -- esp[0] : return address
   3677   // -----------------------------------
   3678   Label miss_force_generic;
   3679 
   3680   // This stub is meant to be tail-jumped to, the receiver must already
   3681   // have been verified by the caller to not be a smi.
   3682 
   3683   // Check that the key is a smi.
   3684   __ JumpIfNotSmi(eax, &miss_force_generic);
   3685 
   3686   // Get the elements array.
   3687   __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
   3688   __ AssertFastElements(ecx);
   3689 
   3690   // Check that the key is within bounds.
   3691   __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
   3692   __ j(above_equal, &miss_force_generic);
   3693 
   3694   // Load the result and make sure it's not the hole.
   3695   __ mov(ebx, Operand(ecx, eax, times_2,
   3696                       FixedArray::kHeaderSize - kHeapObjectTag));
   3697   __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
   3698   __ j(equal, &miss_force_generic);
   3699   __ mov(eax, ebx);
   3700   __ ret(0);
   3701 
   3702   __ bind(&miss_force_generic);
   3703   Handle<Code> miss_ic =
   3704       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
   3705   __ jmp(miss_ic, RelocInfo::CODE_TARGET);
   3706 }
   3707 
   3708 
   3709 void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
   3710     MacroAssembler* masm) {
   3711   // ----------- S t a t e -------------
   3712   //  -- eax    : key
   3713   //  -- edx    : receiver
   3714   //  -- esp[0] : return address
   3715   // -----------------------------------
   3716   Label miss_force_generic, slow_allocate_heapnumber;
   3717 
   3718   // This stub is meant to be tail-jumped to, the receiver must already
   3719   // have been verified by the caller to not be a smi.
   3720 
   3721   // Check that the key is a smi.
   3722   __ JumpIfNotSmi(eax, &miss_force_generic);
   3723 
   3724   // Get the elements array.
   3725   __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
   3726   __ AssertFastElements(ecx);
   3727 
   3728   // Check that the key is within bounds.
   3729   __ cmp(eax, FieldOperand(ecx, FixedDoubleArray::kLengthOffset));
   3730   __ j(above_equal, &miss_force_generic);
   3731 
   3732   // Check for the hole
   3733   uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
   3734   __ cmp(FieldOperand(ecx, eax, times_4, offset), Immediate(kHoleNanUpper32));
   3735   __ j(equal, &miss_force_generic);
   3736 
   3737   // Always allocate a heap number for the result.
   3738   if (CpuFeatures::IsSupported(SSE2)) {
   3739     CpuFeatures::Scope use_sse2(SSE2);
   3740     __ movdbl(xmm0, FieldOperand(ecx, eax, times_4,
   3741                                  FixedDoubleArray::kHeaderSize));
   3742   } else {
   3743     __ fld_d(FieldOperand(ecx, eax, times_4, FixedDoubleArray::kHeaderSize));
   3744   }
   3745   __ AllocateHeapNumber(ecx, ebx, edi, &slow_allocate_heapnumber);
   3746   // Set the value.
   3747   if (CpuFeatures::IsSupported(SSE2)) {
   3748     CpuFeatures::Scope use_sse2(SSE2);
   3749     __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
   3750   } else {
   3751     __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
   3752   }
   3753   __ mov(eax, ecx);
   3754   __ ret(0);
   3755 
   3756   __ bind(&slow_allocate_heapnumber);
   3757   // A value was pushed on the floating point stack before the allocation, if
   3758   // the allocation fails it needs to be removed.
   3759   if (!CpuFeatures::IsSupported(SSE2)) {
   3760     __ fstp(0);
   3761   }
   3762   Handle<Code> slow_ic =
   3763       masm->isolate()->builtins()->KeyedLoadIC_Slow();
   3764   __ jmp(slow_ic, RelocInfo::CODE_TARGET);
   3765 
   3766   __ bind(&miss_force_generic);
   3767   Handle<Code> miss_ic =
   3768       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
   3769   __ jmp(miss_ic, RelocInfo::CODE_TARGET);
   3770 }
   3771 
   3772 
   3773 void KeyedStoreStubCompiler::GenerateStoreFastElement(
   3774     MacroAssembler* masm,
   3775     bool is_js_array,
   3776     ElementsKind elements_kind,
   3777     KeyedAccessGrowMode grow_mode) {
   3778   // ----------- S t a t e -------------
   3779   //  -- eax    : value
   3780   //  -- ecx    : key
   3781   //  -- edx    : receiver
   3782   //  -- esp[0] : return address
   3783   // -----------------------------------
   3784   Label miss_force_generic, grow, slow, transition_elements_kind;
   3785   Label check_capacity, prepare_slow, finish_store, commit_backing_store;
   3786 
   3787   // This stub is meant to be tail-jumped to, the receiver must already
   3788   // have been verified by the caller to not be a smi.
   3789 
   3790   // Check that the key is a smi.
   3791   __ JumpIfNotSmi(ecx, &miss_force_generic);
   3792 
   3793   if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
   3794     __ JumpIfNotSmi(eax, &transition_elements_kind);
   3795   }
   3796 
   3797   // Get the elements array and make sure it is a fast element array, not 'cow'.
   3798   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   3799   if (is_js_array) {
   3800     // Check that the key is within bounds.
   3801     __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset));  // smis.
   3802     if (grow_mode == ALLOW_JSARRAY_GROWTH) {
   3803       __ j(above_equal, &grow);
   3804     } else {
   3805       __ j(above_equal, &miss_force_generic);
   3806     }
   3807   } else {
   3808     // Check that the key is within bounds.
   3809     __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));  // smis.
   3810     __ j(above_equal, &miss_force_generic);
   3811   }
   3812 
   3813   __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
   3814          Immediate(masm->isolate()->factory()->fixed_array_map()));
   3815   __ j(not_equal, &miss_force_generic);
   3816 
   3817   __ bind(&finish_store);
   3818   if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
   3819     // ecx is a smi, use times_half_pointer_size instead of
   3820     // times_pointer_size
   3821     __ mov(FieldOperand(edi,
   3822                         ecx,
   3823                         times_half_pointer_size,
   3824                         FixedArray::kHeaderSize), eax);
   3825   } else {
   3826     ASSERT(elements_kind == FAST_ELEMENTS);
   3827     // Do the store and update the write barrier.
   3828     // ecx is a smi, use times_half_pointer_size instead of
   3829     // times_pointer_size
   3830     __ lea(ecx, FieldOperand(edi,
   3831                              ecx,
   3832                              times_half_pointer_size,
   3833                              FixedArray::kHeaderSize));
   3834     __ mov(Operand(ecx, 0), eax);
   3835     // Make sure to preserve the value in register eax.
   3836     __ mov(ebx, eax);
   3837     __ RecordWrite(edi, ecx, ebx, kDontSaveFPRegs);
   3838   }
   3839 
   3840   // Done.
   3841   __ ret(0);
   3842 
   3843   // Handle store cache miss, replacing the ic with the generic stub.
   3844   __ bind(&miss_force_generic);
   3845   Handle<Code> ic_force_generic =
   3846       masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
   3847   __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
   3848 
   3849   // Handle transition to other elements kinds without using the generic stub.
   3850   __ bind(&transition_elements_kind);
   3851   Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
   3852   __ jmp(ic_miss, RelocInfo::CODE_TARGET);
   3853 
   3854   if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
   3855     // Handle transition requiring the array to grow.
   3856     __ bind(&grow);
   3857 
   3858     // Make sure the array is only growing by a single element, anything else
   3859     // must be handled by the runtime. Flags are already set by previous
   3860     // compare.
   3861     __ j(not_equal, &miss_force_generic);
   3862 
   3863     // Check for the empty array, and preallocate a small backing store if
   3864     // possible.
   3865     __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   3866     __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
   3867     __ j(not_equal, &check_capacity);
   3868 
   3869     int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
   3870     __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
   3871     // Restore the key, which is known to be the array length.
   3872 
   3873     // eax: value
   3874     // ecx: key
   3875     // edx: receiver
   3876     // edi: elements
   3877     // Make sure that the backing store can hold additional elements.
   3878     __ mov(FieldOperand(edi, JSObject::kMapOffset),
   3879            Immediate(masm->isolate()->factory()->fixed_array_map()));
   3880     __ mov(FieldOperand(edi, FixedArray::kLengthOffset),
   3881            Immediate(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
   3882     __ mov(ebx, Immediate(masm->isolate()->factory()->the_hole_value()));
   3883     for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
   3884       __ mov(FieldOperand(edi, FixedArray::SizeFor(i)), ebx);
   3885     }
   3886 
   3887     // Store the element at index zero.
   3888     __ mov(FieldOperand(edi, FixedArray::SizeFor(0)), eax);
   3889 
   3890     // Install the new backing store in the JSArray.
   3891     __ mov(FieldOperand(edx, JSObject::kElementsOffset), edi);
   3892     __ RecordWriteField(edx, JSObject::kElementsOffset, edi, ebx,
   3893                         kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   3894 
   3895     // Increment the length of the array.
   3896     __ mov(FieldOperand(edx, JSArray::kLengthOffset),
   3897            Immediate(Smi::FromInt(1)));
   3898     __ ret(0);
   3899 
   3900     __ bind(&check_capacity);
   3901     __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
   3902            Immediate(masm->isolate()->factory()->fixed_cow_array_map()));
   3903     __ j(equal, &miss_force_generic);
   3904 
   3905     // eax: value
   3906     // ecx: key
   3907     // edx: receiver
   3908     // edi: elements
   3909     // Make sure that the backing store can hold additional elements.
   3910     __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
   3911     __ j(above_equal, &slow);
   3912 
   3913     // Grow the array and finish the store.
   3914     __ add(FieldOperand(edx, JSArray::kLengthOffset),
   3915            Immediate(Smi::FromInt(1)));
   3916     __ jmp(&finish_store);
   3917 
   3918     __ bind(&prepare_slow);
   3919     // Restore the key, which is known to be the array length.
   3920     __ mov(ecx, Immediate(0));
   3921 
   3922     __ bind(&slow);
   3923     Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
   3924     __ jmp(ic_slow, RelocInfo::CODE_TARGET);
   3925   }
   3926 }
   3927 
   3928 
   3929 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
   3930     MacroAssembler* masm,
   3931     bool is_js_array,
   3932     KeyedAccessGrowMode grow_mode) {
   3933   // ----------- S t a t e -------------
   3934   //  -- eax    : value
   3935   //  -- ecx    : key
   3936   //  -- edx    : receiver
   3937   //  -- esp[0] : return address
   3938   // -----------------------------------
   3939   Label miss_force_generic, transition_elements_kind, grow, slow;
   3940   Label check_capacity, prepare_slow, finish_store, commit_backing_store;
   3941 
   3942   // This stub is meant to be tail-jumped to, the receiver must already
   3943   // have been verified by the caller to not be a smi.
   3944 
   3945   // Check that the key is a smi.
   3946   __ JumpIfNotSmi(ecx, &miss_force_generic);
   3947 
   3948   // Get the elements array.
   3949   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   3950   __ AssertFastElements(edi);
   3951 
   3952   if (is_js_array) {
   3953     // Check that the key is within bounds.
   3954     __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset));  // smis.
   3955     if (grow_mode == ALLOW_JSARRAY_GROWTH) {
   3956       __ j(above_equal, &grow);
   3957     } else {
   3958       __ j(above_equal, &miss_force_generic);
   3959     }
   3960   } else {
   3961     // Check that the key is within bounds.
   3962     __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));  // smis.
   3963     __ j(above_equal, &miss_force_generic);
   3964   }
   3965 
   3966   __ bind(&finish_store);
   3967   __ StoreNumberToDoubleElements(eax, edi, ecx, edx, xmm0,
   3968                                  &transition_elements_kind, true);
   3969   __ ret(0);
   3970 
   3971   // Handle store cache miss, replacing the ic with the generic stub.
   3972   __ bind(&miss_force_generic);
   3973   Handle<Code> ic_force_generic =
   3974       masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
   3975   __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
   3976 
   3977   // Handle transition to other elements kinds without using the generic stub.
   3978   __ bind(&transition_elements_kind);
   3979   Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
   3980   __ jmp(ic_miss, RelocInfo::CODE_TARGET);
   3981 
   3982   if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
   3983     // Handle transition requiring the array to grow.
   3984     __ bind(&grow);
   3985 
   3986     // Make sure the array is only growing by a single element, anything else
   3987     // must be handled by the runtime. Flags are already set by previous
   3988     // compare.
   3989     __ j(not_equal, &miss_force_generic);
   3990 
   3991     // Transition on values that can't be stored in a FixedDoubleArray.
   3992     Label value_is_smi;
   3993     __ JumpIfSmi(eax, &value_is_smi);
   3994     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   3995            Immediate(Handle<Map>(masm->isolate()->heap()->heap_number_map())));
   3996     __ j(not_equal, &transition_elements_kind);
   3997     __ bind(&value_is_smi);
   3998 
   3999     // Check for the empty array, and preallocate a small backing store if
   4000     // possible.
   4001     __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   4002     __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
   4003     __ j(not_equal, &check_capacity);
   4004 
   4005     int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
   4006     __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
   4007     // Restore the key, which is known to be the array length.
   4008     __ mov(ecx, Immediate(0));
   4009 
   4010     // eax: value
   4011     // ecx: key
   4012     // edx: receiver
   4013     // edi: elements
   4014     // Initialize the new FixedDoubleArray. Leave elements unitialized for
   4015     // efficiency, they are guaranteed to be initialized before use.
   4016     __ mov(FieldOperand(edi, JSObject::kMapOffset),
   4017            Immediate(masm->isolate()->factory()->fixed_double_array_map()));
   4018     __ mov(FieldOperand(edi, FixedDoubleArray::kLengthOffset),
   4019            Immediate(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
   4020 
   4021     // Install the new backing store in the JSArray.
   4022     __ mov(FieldOperand(edx, JSObject::kElementsOffset), edi);
   4023     __ RecordWriteField(edx, JSObject::kElementsOffset, edi, ebx,
   4024                         kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   4025 
   4026     // Increment the length of the array.
   4027     __ add(FieldOperand(edx, JSArray::kLengthOffset),
   4028            Immediate(Smi::FromInt(1)));
   4029     __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   4030     __ jmp(&finish_store);
   4031 
   4032     __ bind(&check_capacity);
   4033     // eax: value
   4034     // ecx: key
   4035     // edx: receiver
   4036     // edi: elements
   4037     // Make sure that the backing store can hold additional elements.
   4038     __ cmp(ecx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
   4039     __ j(above_equal, &slow);
   4040 
   4041     // Grow the array and finish the store.
   4042     __ add(FieldOperand(edx, JSArray::kLengthOffset),
   4043            Immediate(Smi::FromInt(1)));
   4044     __ jmp(&finish_store);
   4045 
   4046     __ bind(&prepare_slow);
   4047     // Restore the key, which is known to be the array length.
   4048     __ mov(ecx, Immediate(0));
   4049 
   4050     __ bind(&slow);
   4051     Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
   4052     __ jmp(ic_slow, RelocInfo::CODE_TARGET);
   4053   }
   4054 }
   4055 
   4056 
   4057 #undef __
   4058 
   4059 } }  // namespace v8::internal
   4060 
   4061 #endif  // V8_TARGET_ARCH_IA32
   4062