Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_IA32
     31 
     32 #include "ic-inl.h"
     33 #include "codegen.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 #define __ ACCESS_MASM(masm)
     40 
     41 
     42 static void ProbeTable(Isolate* isolate,
     43                        MacroAssembler* masm,
     44                        Code::Flags flags,
     45                        StubCache::Table table,
     46                        Register name,
     47                        Register receiver,
     48                        // Number of the cache entry pointer-size scaled.
     49                        Register offset,
     50                        Register extra) {
     51   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     52   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     53   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
     54 
     55   Label miss;
     56 
     57   // Multiply by 3 because there are 3 fields per entry (name, code, map).
     58   __ lea(offset, Operand(offset, offset, times_2, 0));
     59 
     60   if (extra.is_valid()) {
     61     // Get the code entry from the cache.
     62     __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
     63 
     64     // Check that the key in the entry matches the name.
     65     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
     66     __ j(not_equal, &miss);
     67 
     68     // Check the map matches.
     69     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
     70     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
     71     __ j(not_equal, &miss);
     72 
     73     // Check that the flags match what we're looking for.
     74     __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
     75     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
     76     __ cmp(offset, flags);
     77     __ j(not_equal, &miss);
     78 
     79 #ifdef DEBUG
     80     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
     81       __ jmp(&miss);
     82     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
     83       __ jmp(&miss);
     84     }
     85 #endif
     86 
     87     // Jump to the first instruction in the code stub.
     88     __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
     89     __ jmp(extra);
     90 
     91     __ bind(&miss);
     92   } else {
     93     // Save the offset on the stack.
     94     __ push(offset);
     95 
     96     // Check that the key in the entry matches the name.
     97     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
     98     __ j(not_equal, &miss);
     99 
    100     // Check the map matches.
    101     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
    102     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
    103     __ j(not_equal, &miss);
    104 
    105     // Restore offset register.
    106     __ mov(offset, Operand(esp, 0));
    107 
    108     // Get the code entry from the cache.
    109     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
    110 
    111     // Check that the flags match what we're looking for.
    112     __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
    113     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
    114     __ cmp(offset, flags);
    115     __ j(not_equal, &miss);
    116 
    117 #ifdef DEBUG
    118     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
    119       __ jmp(&miss);
    120     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
    121       __ jmp(&miss);
    122     }
    123 #endif
    124 
    125     // Restore offset and re-load code entry from cache.
    126     __ pop(offset);
    127     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
    128 
    129     // Jump to the first instruction in the code stub.
    130     __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
    131     __ jmp(offset);
    132 
    133     // Pop at miss.
    134     __ bind(&miss);
    135     __ pop(offset);
    136   }
    137 }
    138 
    139 
    140 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
    141                                                     Label* miss_label,
    142                                                     Register receiver,
    143                                                     Handle<Name> name,
    144                                                     Register scratch0,
    145                                                     Register scratch1) {
    146   ASSERT(name->IsUniqueName());
    147   ASSERT(!receiver.is(scratch0));
    148   Counters* counters = masm->isolate()->counters();
    149   __ IncrementCounter(counters->negative_lookups(), 1);
    150   __ IncrementCounter(counters->negative_lookups_miss(), 1);
    151 
    152   __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
    153 
    154   const int kInterceptorOrAccessCheckNeededMask =
    155       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    156 
    157   // Bail out if the receiver has a named interceptor or requires access checks.
    158   __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
    159             kInterceptorOrAccessCheckNeededMask);
    160   __ j(not_zero, miss_label);
    161 
    162   // Check that receiver is a JSObject.
    163   __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
    164   __ j(below, miss_label);
    165 
    166   // Load properties array.
    167   Register properties = scratch0;
    168   __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
    169 
    170   // Check that the properties array is a dictionary.
    171   __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
    172          Immediate(masm->isolate()->factory()->hash_table_map()));
    173   __ j(not_equal, miss_label);
    174 
    175   Label done;
    176   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
    177                                                    miss_label,
    178                                                    &done,
    179                                                    properties,
    180                                                    name,
    181                                                    scratch1);
    182   __ bind(&done);
    183   __ DecrementCounter(counters->negative_lookups_miss(), 1);
    184 }
    185 
    186 
    187 void StubCache::GenerateProbe(MacroAssembler* masm,
    188                               Code::Flags flags,
    189                               Register receiver,
    190                               Register name,
    191                               Register scratch,
    192                               Register extra,
    193                               Register extra2,
    194                               Register extra3) {
    195   Label miss;
    196 
    197   // Assert that code is valid.  The multiplying code relies on the entry size
    198   // being 12.
    199   ASSERT(sizeof(Entry) == 12);
    200 
    201   // Assert the flags do not name a specific type.
    202   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    203 
    204   // Assert that there are no register conflicts.
    205   ASSERT(!scratch.is(receiver));
    206   ASSERT(!scratch.is(name));
    207   ASSERT(!extra.is(receiver));
    208   ASSERT(!extra.is(name));
    209   ASSERT(!extra.is(scratch));
    210 
    211   // Assert scratch and extra registers are valid, and extra2/3 are unused.
    212   ASSERT(!scratch.is(no_reg));
    213   ASSERT(extra2.is(no_reg));
    214   ASSERT(extra3.is(no_reg));
    215 
    216   Register offset = scratch;
    217   scratch = no_reg;
    218 
    219   Counters* counters = masm->isolate()->counters();
    220   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
    221 
    222   // Check that the receiver isn't a smi.
    223   __ JumpIfSmi(receiver, &miss);
    224 
    225   // Get the map of the receiver and compute the hash.
    226   __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
    227   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
    228   __ xor_(offset, flags);
    229   // We mask out the last two bits because they are not part of the hash and
    230   // they are always 01 for maps.  Also in the two 'and' instructions below.
    231   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
    232   // ProbeTable expects the offset to be pointer scaled, which it is, because
    233   // the heap object tag size is 2 and the pointer size log 2 is also 2.
    234   ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
    235 
    236   // Probe the primary table.
    237   ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
    238 
    239   // Primary miss: Compute hash for secondary probe.
    240   __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
    241   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
    242   __ xor_(offset, flags);
    243   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
    244   __ sub(offset, name);
    245   __ add(offset, Immediate(flags));
    246   __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
    247 
    248   // Probe the secondary table.
    249   ProbeTable(
    250       isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
    251 
    252   // Cache miss: Fall-through and let caller handle the miss by
    253   // entering the runtime system.
    254   __ bind(&miss);
    255   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
    256 }
    257 
    258 
    259 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    260                                                        int index,
    261                                                        Register prototype) {
    262   __ LoadGlobalFunction(index, prototype);
    263   __ LoadGlobalFunctionInitialMap(prototype, prototype);
    264   // Load the prototype from the initial map.
    265   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
    266 }
    267 
    268 
    269 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    270     MacroAssembler* masm,
    271     int index,
    272     Register prototype,
    273     Label* miss) {
    274   // Check we're still in the same context.
    275   __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)),
    276          masm->isolate()->global_object());
    277   __ j(not_equal, miss);
    278   // Get the global function with the given index.
    279   Handle<JSFunction> function(
    280       JSFunction::cast(masm->isolate()->native_context()->get(index)));
    281   // Load its initial map. The global functions all have initial maps.
    282   __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
    283   // Load the prototype from the initial map.
    284   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
    285 }
    286 
    287 
    288 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    289                                            Register receiver,
    290                                            Register scratch,
    291                                            Label* miss_label) {
    292   // Check that the receiver isn't a smi.
    293   __ JumpIfSmi(receiver, miss_label);
    294 
    295   // Check that the object is a JS array.
    296   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
    297   __ j(not_equal, miss_label);
    298 
    299   // Load length directly from the JS array.
    300   __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
    301   __ ret(0);
    302 }
    303 
    304 
    305 // Generate code to check if an object is a string.  If the object is
    306 // a string, the map's instance type is left in the scratch register.
    307 static void GenerateStringCheck(MacroAssembler* masm,
    308                                 Register receiver,
    309                                 Register scratch,
    310                                 Label* smi,
    311                                 Label* non_string_object) {
    312   // Check that the object isn't a smi.
    313   __ JumpIfSmi(receiver, smi);
    314 
    315   // Check that the object is a string.
    316   __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
    317   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
    318   STATIC_ASSERT(kNotStringTag != 0);
    319   __ test(scratch, Immediate(kNotStringTag));
    320   __ j(not_zero, non_string_object);
    321 }
    322 
    323 
    324 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
    325                                             Register receiver,
    326                                             Register scratch1,
    327                                             Register scratch2,
    328                                             Label* miss) {
    329   Label check_wrapper;
    330 
    331   // Check if the object is a string leaving the instance type in the
    332   // scratch register.
    333   GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
    334 
    335   // Load length from the string and convert to a smi.
    336   __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
    337   __ ret(0);
    338 
    339   // Check if the object is a JSValue wrapper.
    340   __ bind(&check_wrapper);
    341   __ cmp(scratch1, JS_VALUE_TYPE);
    342   __ j(not_equal, miss);
    343 
    344   // Check if the wrapped value is a string and load the length
    345   // directly if it is.
    346   __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
    347   GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
    348   __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
    349   __ ret(0);
    350 }
    351 
    352 
    353 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    354                                                  Register receiver,
    355                                                  Register scratch1,
    356                                                  Register scratch2,
    357                                                  Label* miss_label) {
    358   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    359   __ mov(eax, scratch1);
    360   __ ret(0);
    361 }
    362 
    363 
    364 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    365                                             Register dst,
    366                                             Register src,
    367                                             bool inobject,
    368                                             int index,
    369                                             Representation representation) {
    370   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
    371   int offset = index * kPointerSize;
    372   if (!inobject) {
    373     // Calculate the offset into the properties array.
    374     offset = offset + FixedArray::kHeaderSize;
    375     __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
    376     src = dst;
    377   }
    378   __ mov(dst, FieldOperand(src, offset));
    379 }
    380 
    381 
    382 static void PushInterceptorArguments(MacroAssembler* masm,
    383                                      Register receiver,
    384                                      Register holder,
    385                                      Register name,
    386                                      Handle<JSObject> holder_obj) {
    387   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
    388   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
    389   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
    390   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
    391   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
    392   __ push(name);
    393   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
    394   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
    395   Register scratch = name;
    396   __ mov(scratch, Immediate(interceptor));
    397   __ push(scratch);
    398   __ push(receiver);
    399   __ push(holder);
    400 }
    401 
    402 
    403 static void CompileCallLoadPropertyWithInterceptor(
    404     MacroAssembler* masm,
    405     Register receiver,
    406     Register holder,
    407     Register name,
    408     Handle<JSObject> holder_obj,
    409     IC::UtilityId id) {
    410   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    411   __ CallExternalReference(
    412       ExternalReference(IC_Utility(id), masm->isolate()),
    413       StubCache::kInterceptorArgsLength);
    414 }
    415 
    416 
    417 // Number of pointers to be reserved on stack for fast API call.
    418 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
    419 
    420 
    421 // Reserves space for the extra arguments to API function in the
    422 // caller's frame.
    423 //
    424 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
    425 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
    426   // ----------- S t a t e -------------
    427   //  -- esp[0] : return address
    428   //  -- esp[4] : last argument in the internal frame of the caller
    429   // -----------------------------------
    430   __ pop(scratch);
    431   for (int i = 0; i < kFastApiCallArguments; i++) {
    432     __ push(Immediate(Smi::FromInt(0)));
    433   }
    434   __ push(scratch);
    435 }
    436 
    437 
    438 // Undoes the effects of ReserveSpaceForFastApiCall.
    439 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
    440   // ----------- S t a t e -------------
    441   //  -- esp[0]  : return address.
    442   //  -- esp[4]  : last fast api call extra argument.
    443   //  -- ...
    444   //  -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
    445   //  -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
    446   //                                          frame.
    447   // -----------------------------------
    448   __ pop(scratch);
    449   __ add(esp, Immediate(kPointerSize * kFastApiCallArguments));
    450   __ push(scratch);
    451 }
    452 
    453 
    454 static void GenerateFastApiCallBody(MacroAssembler* masm,
    455                                     const CallOptimization& optimization,
    456                                     int argc,
    457                                     bool restore_context);
    458 
    459 
    460 // Generates call to API function.
    461 static void GenerateFastApiCall(MacroAssembler* masm,
    462                                 const CallOptimization& optimization,
    463                                 int argc) {
    464   typedef FunctionCallbackArguments FCA;
    465   // Save calling context.
    466   __ mov(Operand(esp, (1 + FCA::kContextSaveIndex) * kPointerSize), esi);
    467 
    468   // Get the function and setup the context.
    469   Handle<JSFunction> function = optimization.constant_function();
    470   __ LoadHeapObject(edi, function);
    471   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
    472 
    473   // Construct the FunctionCallbackInfo.
    474   __ mov(Operand(esp, (1 + FCA::kCalleeIndex) * kPointerSize), edi);
    475   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    476   Handle<Object> call_data(api_call_info->data(), masm->isolate());
    477   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
    478     __ mov(ecx, api_call_info);
    479     __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
    480     __ mov(Operand(esp, (1 + FCA::kDataIndex) * kPointerSize), ebx);
    481   } else {
    482     __ mov(Operand(esp, (1 + FCA::kDataIndex) * kPointerSize),
    483            Immediate(call_data));
    484   }
    485   __ mov(Operand(esp, (1 + FCA::kIsolateIndex) * kPointerSize),
    486          Immediate(reinterpret_cast<int>(masm->isolate())));
    487   __ mov(Operand(esp, (1 + FCA::kReturnValueOffset) * kPointerSize),
    488          masm->isolate()->factory()->undefined_value());
    489   __ mov(Operand(esp, (1 + FCA::kReturnValueDefaultValueIndex) * kPointerSize),
    490          masm->isolate()->factory()->undefined_value());
    491 
    492   // Prepare arguments.
    493   STATIC_ASSERT(kFastApiCallArguments == 7);
    494   __ lea(eax, Operand(esp, 1 * kPointerSize));
    495 
    496   GenerateFastApiCallBody(masm, optimization, argc, false);
    497 }
    498 
    499 
    500 // Generate call to api function.
    501 // This function uses push() to generate smaller, faster code than
    502 // the version above. It is an optimization that should will be removed
    503 // when api call ICs are generated in hydrogen.
    504 static void GenerateFastApiCall(MacroAssembler* masm,
    505                                 const CallOptimization& optimization,
    506                                 Register receiver,
    507                                 Register scratch1,
    508                                 Register scratch2,
    509                                 Register scratch3,
    510                                 int argc,
    511                                 Register* values) {
    512   ASSERT(optimization.is_simple_api_call());
    513 
    514   // Copy return value.
    515   __ pop(scratch1);
    516 
    517   // receiver
    518   __ push(receiver);
    519 
    520   // Write the arguments to stack frame.
    521   for (int i = 0; i < argc; i++) {
    522     Register arg = values[argc-1-i];
    523     ASSERT(!receiver.is(arg));
    524     ASSERT(!scratch1.is(arg));
    525     ASSERT(!scratch2.is(arg));
    526     ASSERT(!scratch3.is(arg));
    527     __ push(arg);
    528   }
    529 
    530   typedef FunctionCallbackArguments FCA;
    531 
    532   STATIC_ASSERT(FCA::kHolderIndex == 0);
    533   STATIC_ASSERT(FCA::kIsolateIndex == 1);
    534   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
    535   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
    536   STATIC_ASSERT(FCA::kDataIndex == 4);
    537   STATIC_ASSERT(FCA::kCalleeIndex == 5);
    538   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
    539   STATIC_ASSERT(FCA::kArgsLength == 7);
    540 
    541   // context save
    542   __ push(esi);
    543 
    544   // Get the function and setup the context.
    545   Handle<JSFunction> function = optimization.constant_function();
    546   __ LoadHeapObject(scratch2, function);
    547   __ mov(esi, FieldOperand(scratch2, JSFunction::kContextOffset));
    548   // callee
    549   __ push(scratch2);
    550 
    551   Isolate* isolate = masm->isolate();
    552   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    553   Handle<Object> call_data(api_call_info->data(), isolate);
    554   // Push data from ExecutableAccessorInfo.
    555   if (isolate->heap()->InNewSpace(*call_data)) {
    556     __ mov(scratch2, api_call_info);
    557     __ mov(scratch3, FieldOperand(scratch2, CallHandlerInfo::kDataOffset));
    558     __ push(scratch3);
    559   } else {
    560     __ push(Immediate(call_data));
    561   }
    562   // return value
    563   __ push(Immediate(isolate->factory()->undefined_value()));
    564   // return value default
    565   __ push(Immediate(isolate->factory()->undefined_value()));
    566   // isolate
    567   __ push(Immediate(reinterpret_cast<int>(isolate)));
    568   // holder
    569   __ push(receiver);
    570 
    571   // store receiver address for GenerateFastApiCallBody
    572   ASSERT(!scratch1.is(eax));
    573   __ mov(eax, esp);
    574 
    575   // return address
    576   __ push(scratch1);
    577 
    578   GenerateFastApiCallBody(masm, optimization, argc, true);
    579 }
    580 
    581 
    582 static void GenerateFastApiCallBody(MacroAssembler* masm,
    583                                     const CallOptimization& optimization,
    584                                     int argc,
    585                                     bool restore_context) {
    586   // ----------- S t a t e -------------
    587   //  -- esp[0]              : return address
    588   //  -- esp[4] - esp[28]    : FunctionCallbackInfo, incl.
    589   //                         :  object passing the type check
    590   //                            (set by CheckPrototypes)
    591   //  -- esp[32]             : last argument
    592   //  -- ...
    593   //  -- esp[(argc + 7) * 4] : first argument
    594   //  -- esp[(argc + 8) * 4] : receiver
    595   //
    596   //  -- eax : receiver address
    597   // -----------------------------------
    598   typedef FunctionCallbackArguments FCA;
    599 
    600   // API function gets reference to the v8::Arguments. If CPU profiler
    601   // is enabled wrapper function will be called and we need to pass
    602   // address of the callback as additional parameter, always allocate
    603   // space for it.
    604   const int kApiArgc = 1 + 1;
    605 
    606   // Allocate the v8::Arguments structure in the arguments' space since
    607   // it's not controlled by GC.
    608   const int kApiStackSpace = 4;
    609 
    610   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    611 
    612   // Function address is a foreign pointer outside V8's heap.
    613   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    614   __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
    615 
    616   // FunctionCallbackInfo::implicit_args_.
    617   __ mov(ApiParameterOperand(2), eax);
    618   __ add(eax, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize));
    619   // FunctionCallbackInfo::values_.
    620   __ mov(ApiParameterOperand(3), eax);
    621   // FunctionCallbackInfo::length_.
    622   __ Set(ApiParameterOperand(4), Immediate(argc));
    623   // FunctionCallbackInfo::is_construct_call_.
    624   __ Set(ApiParameterOperand(5), Immediate(0));
    625 
    626   // v8::InvocationCallback's argument.
    627   __ lea(eax, ApiParameterOperand(2));
    628   __ mov(ApiParameterOperand(0), eax);
    629 
    630   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
    631 
    632   Operand context_restore_operand(ebp,
    633                                   (2 + FCA::kContextSaveIndex) * kPointerSize);
    634   Operand return_value_operand(ebp,
    635                                (2 + FCA::kReturnValueOffset) * kPointerSize);
    636   __ CallApiFunctionAndReturn(function_address,
    637                               thunk_address,
    638                               ApiParameterOperand(1),
    639                               argc + kFastApiCallArguments + 1,
    640                               return_value_operand,
    641                               restore_context ?
    642                                   &context_restore_operand : NULL);
    643 }
    644 
    645 
    646 class CallInterceptorCompiler BASE_EMBEDDED {
    647  public:
    648   CallInterceptorCompiler(CallStubCompiler* stub_compiler,
    649                           const ParameterCount& arguments,
    650                           Register name,
    651                           ExtraICState extra_state)
    652       : stub_compiler_(stub_compiler),
    653         arguments_(arguments),
    654         name_(name) {}
    655 
    656   void Compile(MacroAssembler* masm,
    657                Handle<JSObject> object,
    658                Handle<JSObject> holder,
    659                Handle<Name> name,
    660                LookupResult* lookup,
    661                Register receiver,
    662                Register scratch1,
    663                Register scratch2,
    664                Register scratch3,
    665                Label* miss) {
    666     ASSERT(holder->HasNamedInterceptor());
    667     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
    668 
    669     // Check that the receiver isn't a smi.
    670     __ JumpIfSmi(receiver, miss);
    671 
    672     CallOptimization optimization(lookup);
    673     if (optimization.is_constant_call()) {
    674       CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
    675                        holder, lookup, name, optimization, miss);
    676     } else {
    677       CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
    678                      name, holder, miss);
    679     }
    680   }
    681 
    682  private:
    683   void CompileCacheable(MacroAssembler* masm,
    684                         Handle<JSObject> object,
    685                         Register receiver,
    686                         Register scratch1,
    687                         Register scratch2,
    688                         Register scratch3,
    689                         Handle<JSObject> interceptor_holder,
    690                         LookupResult* lookup,
    691                         Handle<Name> name,
    692                         const CallOptimization& optimization,
    693                         Label* miss_label) {
    694     ASSERT(optimization.is_constant_call());
    695     ASSERT(!lookup->holder()->IsGlobalObject());
    696 
    697     int depth1 = kInvalidProtoDepth;
    698     int depth2 = kInvalidProtoDepth;
    699     bool can_do_fast_api_call = false;
    700     if (optimization.is_simple_api_call() &&
    701         !lookup->holder()->IsGlobalObject()) {
    702       depth1 = optimization.GetPrototypeDepthOfExpectedType(
    703           object, interceptor_holder);
    704       if (depth1 == kInvalidProtoDepth) {
    705         depth2 = optimization.GetPrototypeDepthOfExpectedType(
    706             interceptor_holder, Handle<JSObject>(lookup->holder()));
    707       }
    708       can_do_fast_api_call =
    709           depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
    710     }
    711 
    712     Counters* counters = masm->isolate()->counters();
    713     __ IncrementCounter(counters->call_const_interceptor(), 1);
    714 
    715     if (can_do_fast_api_call) {
    716       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
    717       ReserveSpaceForFastApiCall(masm, scratch1);
    718     }
    719 
    720     // Check that the maps from receiver to interceptor's holder
    721     // haven't changed and thus we can invoke interceptor.
    722     Label miss_cleanup;
    723     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
    724     Register holder =
    725         stub_compiler_->CheckPrototypes(
    726             IC::CurrentTypeOf(object, masm->isolate()), receiver,
    727             interceptor_holder, scratch1, scratch2, scratch3,
    728             name, depth1, miss);
    729 
    730     // Invoke an interceptor and if it provides a value,
    731     // branch to |regular_invoke|.
    732     Label regular_invoke;
    733     LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
    734                         &regular_invoke);
    735 
    736     // Interceptor returned nothing for this property.  Try to use cached
    737     // constant function.
    738 
    739     // Check that the maps from interceptor's holder to constant function's
    740     // holder haven't changed and thus we can use cached constant function.
    741     if (*interceptor_holder != lookup->holder()) {
    742       stub_compiler_->CheckPrototypes(
    743           IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder,
    744           handle(lookup->holder()), scratch1, scratch2, scratch3,
    745           name, depth2, miss);
    746     } else {
    747       // CheckPrototypes has a side effect of fetching a 'holder'
    748       // for API (object which is instanceof for the signature).  It's
    749       // safe to omit it here, as if present, it should be fetched
    750       // by the previous CheckPrototypes.
    751       ASSERT(depth2 == kInvalidProtoDepth);
    752     }
    753 
    754     // Invoke function.
    755     if (can_do_fast_api_call) {
    756       GenerateFastApiCall(masm, optimization, arguments_.immediate());
    757     } else {
    758       Handle<JSFunction> fun = optimization.constant_function();
    759       stub_compiler_->GenerateJumpFunction(object, fun);
    760     }
    761 
    762     // Deferred code for fast API call case---clean preallocated space.
    763     if (can_do_fast_api_call) {
    764       __ bind(&miss_cleanup);
    765       FreeSpaceForFastApiCall(masm, scratch1);
    766       __ jmp(miss_label);
    767     }
    768 
    769     // Invoke a regular function.
    770     __ bind(&regular_invoke);
    771     if (can_do_fast_api_call) {
    772       FreeSpaceForFastApiCall(masm, scratch1);
    773     }
    774   }
    775 
    776   void CompileRegular(MacroAssembler* masm,
    777                       Handle<JSObject> object,
    778                       Register receiver,
    779                       Register scratch1,
    780                       Register scratch2,
    781                       Register scratch3,
    782                       Handle<Name> name,
    783                       Handle<JSObject> interceptor_holder,
    784                       Label* miss_label) {
    785     Register holder =
    786         stub_compiler_->CheckPrototypes(
    787             IC::CurrentTypeOf(object, masm->isolate()), receiver,
    788             interceptor_holder, scratch1, scratch2, scratch3, name, miss_label);
    789 
    790     FrameScope scope(masm, StackFrame::INTERNAL);
    791     // Save the name_ register across the call.
    792     __ push(name_);
    793 
    794     CompileCallLoadPropertyWithInterceptor(
    795         masm, receiver, holder, name_, interceptor_holder,
    796         IC::kLoadPropertyWithInterceptorForCall);
    797 
    798     // Restore the name_ register.
    799     __ pop(name_);
    800 
    801     // Leave the internal frame.
    802   }
    803 
    804   void LoadWithInterceptor(MacroAssembler* masm,
    805                            Register receiver,
    806                            Register holder,
    807                            Handle<JSObject> holder_obj,
    808                            Label* interceptor_succeeded) {
    809     {
    810       FrameScope scope(masm, StackFrame::INTERNAL);
    811       __ push(receiver);
    812       __ push(holder);
    813       __ push(name_);
    814 
    815       CompileCallLoadPropertyWithInterceptor(
    816           masm, receiver, holder, name_, holder_obj,
    817           IC::kLoadPropertyWithInterceptorOnly);
    818 
    819       __ pop(name_);
    820       __ pop(holder);
    821       __ pop(receiver);
    822       // Leave the internal frame.
    823     }
    824 
    825     __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
    826     __ j(not_equal, interceptor_succeeded);
    827   }
    828 
    829   CallStubCompiler* stub_compiler_;
    830   const ParameterCount& arguments_;
    831   Register name_;
    832 };
    833 
    834 
    835 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
    836                                             Label* label,
    837                                             Handle<Name> name) {
    838   if (!label->is_unused()) {
    839     __ bind(label);
    840     __ mov(this->name(), Immediate(name));
    841   }
    842 }
    843 
    844 
    845 // Generate code to check that a global property cell is empty. Create
    846 // the property cell at compilation time if no cell exists for the
    847 // property.
    848 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
    849                                              Handle<JSGlobalObject> global,
    850                                              Handle<Name> name,
    851                                              Register scratch,
    852                                              Label* miss) {
    853   Handle<PropertyCell> cell =
    854       JSGlobalObject::EnsurePropertyCell(global, name);
    855   ASSERT(cell->value()->IsTheHole());
    856   Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
    857   if (Serializer::enabled()) {
    858     __ mov(scratch, Immediate(cell));
    859     __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
    860            Immediate(the_hole));
    861   } else {
    862     __ cmp(Operand::ForCell(cell), Immediate(the_hole));
    863   }
    864   __ j(not_equal, miss);
    865 }
    866 
    867 
    868 void StoreStubCompiler::GenerateNegativeHolderLookup(
    869     MacroAssembler* masm,
    870     Handle<JSObject> holder,
    871     Register holder_reg,
    872     Handle<Name> name,
    873     Label* miss) {
    874   if (holder->IsJSGlobalObject()) {
    875     GenerateCheckPropertyCell(
    876         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
    877   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
    878     GenerateDictionaryNegativeLookup(
    879         masm, miss, holder_reg, name, scratch1(), scratch2());
    880   }
    881 }
    882 
    883 
    884 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
    885 // store is successful.
    886 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
    887                                                 Handle<JSObject> object,
    888                                                 LookupResult* lookup,
    889                                                 Handle<Map> transition,
    890                                                 Handle<Name> name,
    891                                                 Register receiver_reg,
    892                                                 Register storage_reg,
    893                                                 Register value_reg,
    894                                                 Register scratch1,
    895                                                 Register scratch2,
    896                                                 Register unused,
    897                                                 Label* miss_label,
    898                                                 Label* slow) {
    899   int descriptor = transition->LastAdded();
    900   DescriptorArray* descriptors = transition->instance_descriptors();
    901   PropertyDetails details = descriptors->GetDetails(descriptor);
    902   Representation representation = details.representation();
    903   ASSERT(!representation.IsNone());
    904 
    905   if (details.type() == CONSTANT) {
    906     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
    907     __ CmpObject(value_reg, constant);
    908     __ j(not_equal, miss_label);
    909   } else if (FLAG_track_fields && representation.IsSmi()) {
    910       __ JumpIfNotSmi(value_reg, miss_label);
    911   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
    912     __ JumpIfSmi(value_reg, miss_label);
    913   } else if (FLAG_track_double_fields && representation.IsDouble()) {
    914     Label do_store, heap_number;
    915     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
    916 
    917     __ JumpIfNotSmi(value_reg, &heap_number);
    918     __ SmiUntag(value_reg);
    919     if (CpuFeatures::IsSupported(SSE2)) {
    920       CpuFeatureScope use_sse2(masm, SSE2);
    921       __ Cvtsi2sd(xmm0, value_reg);
    922     } else {
    923       __ push(value_reg);
    924       __ fild_s(Operand(esp, 0));
    925       __ pop(value_reg);
    926     }
    927     __ SmiTag(value_reg);
    928     __ jmp(&do_store);
    929 
    930     __ bind(&heap_number);
    931     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
    932                 miss_label, DONT_DO_SMI_CHECK);
    933     if (CpuFeatures::IsSupported(SSE2)) {
    934       CpuFeatureScope use_sse2(masm, SSE2);
    935       __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
    936     } else {
    937       __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
    938     }
    939 
    940     __ bind(&do_store);
    941     if (CpuFeatures::IsSupported(SSE2)) {
    942       CpuFeatureScope use_sse2(masm, SSE2);
    943       __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
    944     } else {
    945       __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
    946     }
    947   }
    948 
    949   // Stub never generated for non-global objects that require access
    950   // checks.
    951   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    952 
    953   // Perform map transition for the receiver if necessary.
    954   if (details.type() == FIELD &&
    955       object->map()->unused_property_fields() == 0) {
    956     // The properties must be extended before we can store the value.
    957     // We jump to a runtime call that extends the properties array.
    958     __ pop(scratch1);  // Return address.
    959     __ push(receiver_reg);
    960     __ push(Immediate(transition));
    961     __ push(value_reg);
    962     __ push(scratch1);
    963     __ TailCallExternalReference(
    964         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    965                           masm->isolate()),
    966         3,
    967         1);
    968     return;
    969   }
    970 
    971   // Update the map of the object.
    972   __ mov(scratch1, Immediate(transition));
    973   __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
    974 
    975   // Update the write barrier for the map field.
    976   __ RecordWriteField(receiver_reg,
    977                       HeapObject::kMapOffset,
    978                       scratch1,
    979                       scratch2,
    980                       kDontSaveFPRegs,
    981                       OMIT_REMEMBERED_SET,
    982                       OMIT_SMI_CHECK);
    983 
    984   if (details.type() == CONSTANT) {
    985     ASSERT(value_reg.is(eax));
    986     __ ret(0);
    987     return;
    988   }
    989 
    990   int index = transition->instance_descriptors()->GetFieldIndex(
    991       transition->LastAdded());
    992 
    993   // Adjust for the number of properties stored in the object. Even in the
    994   // face of a transition we can use the old map here because the size of the
    995   // object and the number of in-object properties is not going to change.
    996   index -= object->map()->inobject_properties();
    997 
    998   SmiCheck smi_check = representation.IsTagged()
    999       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
   1000   // TODO(verwaest): Share this code as a code stub.
   1001   if (index < 0) {
   1002     // Set the property straight into the object.
   1003     int offset = object->map()->instance_size() + (index * kPointerSize);
   1004     if (FLAG_track_double_fields && representation.IsDouble()) {
   1005       __ mov(FieldOperand(receiver_reg, offset), storage_reg);
   1006     } else {
   1007       __ mov(FieldOperand(receiver_reg, offset), value_reg);
   1008     }
   1009 
   1010     if (!FLAG_track_fields || !representation.IsSmi()) {
   1011       // Update the write barrier for the array address.
   1012       if (!FLAG_track_double_fields || !representation.IsDouble()) {
   1013         __ mov(storage_reg, value_reg);
   1014       }
   1015       __ RecordWriteField(receiver_reg,
   1016                           offset,
   1017                           storage_reg,
   1018                           scratch1,
   1019                           kDontSaveFPRegs,
   1020                           EMIT_REMEMBERED_SET,
   1021                           smi_check);
   1022     }
   1023   } else {
   1024     // Write to the properties array.
   1025     int offset = index * kPointerSize + FixedArray::kHeaderSize;
   1026     // Get the properties array (optimistically).
   1027     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
   1028     if (FLAG_track_double_fields && representation.IsDouble()) {
   1029       __ mov(FieldOperand(scratch1, offset), storage_reg);
   1030     } else {
   1031       __ mov(FieldOperand(scratch1, offset), value_reg);
   1032     }
   1033 
   1034     if (!FLAG_track_fields || !representation.IsSmi()) {
   1035       // Update the write barrier for the array address.
   1036       if (!FLAG_track_double_fields || !representation.IsDouble()) {
   1037         __ mov(storage_reg, value_reg);
   1038       }
   1039       __ RecordWriteField(scratch1,
   1040                           offset,
   1041                           storage_reg,
   1042                           receiver_reg,
   1043                           kDontSaveFPRegs,
   1044                           EMIT_REMEMBERED_SET,
   1045                           smi_check);
   1046     }
   1047   }
   1048 
   1049   // Return the value (register eax).
   1050   ASSERT(value_reg.is(eax));
   1051   __ ret(0);
   1052 }
   1053 
   1054 
   1055 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
   1056 // but may be destroyed if store is successful.
   1057 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
   1058                                            Handle<JSObject> object,
   1059                                            LookupResult* lookup,
   1060                                            Register receiver_reg,
   1061                                            Register name_reg,
   1062                                            Register value_reg,
   1063                                            Register scratch1,
   1064                                            Register scratch2,
   1065                                            Label* miss_label) {
   1066   // Stub never generated for non-global objects that require access
   1067   // checks.
   1068   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
   1069 
   1070   int index = lookup->GetFieldIndex().field_index();
   1071 
   1072   // Adjust for the number of properties stored in the object. Even in the
   1073   // face of a transition we can use the old map here because the size of the
   1074   // object and the number of in-object properties is not going to change.
   1075   index -= object->map()->inobject_properties();
   1076 
   1077   Representation representation = lookup->representation();
   1078   ASSERT(!representation.IsNone());
   1079   if (FLAG_track_fields && representation.IsSmi()) {
   1080     __ JumpIfNotSmi(value_reg, miss_label);
   1081   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
   1082     __ JumpIfSmi(value_reg, miss_label);
   1083   } else if (FLAG_track_double_fields && representation.IsDouble()) {
   1084     // Load the double storage.
   1085     if (index < 0) {
   1086       int offset = object->map()->instance_size() + (index * kPointerSize);
   1087       __ mov(scratch1, FieldOperand(receiver_reg, offset));
   1088     } else {
   1089       __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
   1090       int offset = index * kPointerSize + FixedArray::kHeaderSize;
   1091       __ mov(scratch1, FieldOperand(scratch1, offset));
   1092     }
   1093 
   1094     // Store the value into the storage.
   1095     Label do_store, heap_number;
   1096     __ JumpIfNotSmi(value_reg, &heap_number);
   1097     __ SmiUntag(value_reg);
   1098     if (CpuFeatures::IsSupported(SSE2)) {
   1099       CpuFeatureScope use_sse2(masm, SSE2);
   1100       __ Cvtsi2sd(xmm0, value_reg);
   1101     } else {
   1102       __ push(value_reg);
   1103       __ fild_s(Operand(esp, 0));
   1104       __ pop(value_reg);
   1105     }
   1106     __ SmiTag(value_reg);
   1107     __ jmp(&do_store);
   1108     __ bind(&heap_number);
   1109     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
   1110                 miss_label, DONT_DO_SMI_CHECK);
   1111     if (CpuFeatures::IsSupported(SSE2)) {
   1112       CpuFeatureScope use_sse2(masm, SSE2);
   1113       __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
   1114     } else {
   1115       __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
   1116     }
   1117     __ bind(&do_store);
   1118     if (CpuFeatures::IsSupported(SSE2)) {
   1119       CpuFeatureScope use_sse2(masm, SSE2);
   1120       __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
   1121     } else {
   1122       __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
   1123     }
   1124     // Return the value (register eax).
   1125     ASSERT(value_reg.is(eax));
   1126     __ ret(0);
   1127     return;
   1128   }
   1129 
   1130   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
   1131   // TODO(verwaest): Share this code as a code stub.
   1132   SmiCheck smi_check = representation.IsTagged()
   1133       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
   1134   if (index < 0) {
   1135     // Set the property straight into the object.
   1136     int offset = object->map()->instance_size() + (index * kPointerSize);
   1137     __ mov(FieldOperand(receiver_reg, offset), value_reg);
   1138 
   1139     if (!FLAG_track_fields || !representation.IsSmi()) {
   1140       // Update the write barrier for the array address.
   1141       // Pass the value being stored in the now unused name_reg.
   1142       __ mov(name_reg, value_reg);
   1143       __ RecordWriteField(receiver_reg,
   1144                           offset,
   1145                           name_reg,
   1146                           scratch1,
   1147                           kDontSaveFPRegs,
   1148                           EMIT_REMEMBERED_SET,
   1149                           smi_check);
   1150     }
   1151   } else {
   1152     // Write to the properties array.
   1153     int offset = index * kPointerSize + FixedArray::kHeaderSize;
   1154     // Get the properties array (optimistically).
   1155     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
   1156     __ mov(FieldOperand(scratch1, offset), value_reg);
   1157 
   1158     if (!FLAG_track_fields || !representation.IsSmi()) {
   1159       // Update the write barrier for the array address.
   1160       // Pass the value being stored in the now unused name_reg.
   1161       __ mov(name_reg, value_reg);
   1162       __ RecordWriteField(scratch1,
   1163                           offset,
   1164                           name_reg,
   1165                           receiver_reg,
   1166                           kDontSaveFPRegs,
   1167                           EMIT_REMEMBERED_SET,
   1168                           smi_check);
   1169     }
   1170   }
   1171 
   1172   // Return the value (register eax).
   1173   ASSERT(value_reg.is(eax));
   1174   __ ret(0);
   1175 }
   1176 
   1177 
   1178 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
   1179   __ jmp(code, RelocInfo::CODE_TARGET);
   1180 }
   1181 
   1182 
   1183 #undef __
   1184 #define __ ACCESS_MASM(masm())
   1185 
   1186 
   1187 Register StubCompiler::CheckPrototypes(Handle<Type> type,
   1188                                        Register object_reg,
   1189                                        Handle<JSObject> holder,
   1190                                        Register holder_reg,
   1191                                        Register scratch1,
   1192                                        Register scratch2,
   1193                                        Handle<Name> name,
   1194                                        int save_at_depth,
   1195                                        Label* miss,
   1196                                        PrototypeCheckType check) {
   1197   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
   1198   // Make sure that the type feedback oracle harvests the receiver map.
   1199   // TODO(svenpanne) Remove this hack when all ICs are reworked.
   1200   __ mov(scratch1, receiver_map);
   1201 
   1202   // Make sure there's no overlap between holder and object registers.
   1203   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
   1204   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
   1205          && !scratch2.is(scratch1));
   1206 
   1207   // Keep track of the current object in register reg.
   1208   Register reg = object_reg;
   1209   int depth = 0;
   1210 
   1211   const int kHolderIndex = FunctionCallbackArguments::kHolderIndex + 1;
   1212   if (save_at_depth == depth) {
   1213     __ mov(Operand(esp, kHolderIndex * kPointerSize), reg);
   1214   }
   1215 
   1216   Handle<JSObject> current = Handle<JSObject>::null();
   1217   if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
   1218   Handle<JSObject> prototype = Handle<JSObject>::null();
   1219   Handle<Map> current_map = receiver_map;
   1220   Handle<Map> holder_map(holder->map());
   1221   // Traverse the prototype chain and check the maps in the prototype chain for
   1222   // fast and global objects or do negative lookup for normal objects.
   1223   while (!current_map.is_identical_to(holder_map)) {
   1224     ++depth;
   1225 
   1226     // Only global objects and objects that do not require access
   1227     // checks are allowed in stubs.
   1228     ASSERT(current_map->IsJSGlobalProxyMap() ||
   1229            !current_map->is_access_check_needed());
   1230 
   1231     prototype = handle(JSObject::cast(current_map->prototype()));
   1232     if (current_map->is_dictionary_map() &&
   1233         !current_map->IsJSGlobalObjectMap() &&
   1234         !current_map->IsJSGlobalProxyMap()) {
   1235       if (!name->IsUniqueName()) {
   1236         ASSERT(name->IsString());
   1237         name = factory()->InternalizeString(Handle<String>::cast(name));
   1238       }
   1239       ASSERT(current.is_null() ||
   1240              current->property_dictionary()->FindEntry(*name) ==
   1241              NameDictionary::kNotFound);
   1242 
   1243       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
   1244                                        scratch1, scratch2);
   1245 
   1246       __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
   1247       reg = holder_reg;  // From now on the object will be in holder_reg.
   1248       __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
   1249     } else {
   1250       bool in_new_space = heap()->InNewSpace(*prototype);
   1251       if (depth != 1 || check == CHECK_ALL_MAPS) {
   1252         __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
   1253       }
   1254 
   1255       // Check access rights to the global object.  This has to happen after
   1256       // the map check so that we know that the object is actually a global
   1257       // object.
   1258       if (current_map->IsJSGlobalProxyMap()) {
   1259         __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
   1260       } else if (current_map->IsJSGlobalObjectMap()) {
   1261         GenerateCheckPropertyCell(
   1262             masm(), Handle<JSGlobalObject>::cast(current), name,
   1263             scratch2, miss);
   1264       }
   1265 
   1266       if (in_new_space) {
   1267         // Save the map in scratch1 for later.
   1268         __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
   1269       }
   1270 
   1271       reg = holder_reg;  // From now on the object will be in holder_reg.
   1272 
   1273       if (in_new_space) {
   1274         // The prototype is in new space; we cannot store a reference to it
   1275         // in the code.  Load it from the map.
   1276         __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
   1277       } else {
   1278         // The prototype is in old space; load it directly.
   1279         __ mov(reg, prototype);
   1280       }
   1281     }
   1282 
   1283     if (save_at_depth == depth) {
   1284       __ mov(Operand(esp, kHolderIndex * kPointerSize), reg);
   1285     }
   1286 
   1287     // Go to the next object in the prototype chain.
   1288     current = prototype;
   1289     current_map = handle(current->map());
   1290   }
   1291 
   1292   // Log the check depth.
   1293   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
   1294 
   1295   if (depth != 0 || check == CHECK_ALL_MAPS) {
   1296     // Check the holder map.
   1297     __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
   1298   }
   1299 
   1300   // Perform security check for access to the global object.
   1301   ASSERT(current_map->IsJSGlobalProxyMap() ||
   1302          !current_map->is_access_check_needed());
   1303   if (current_map->IsJSGlobalProxyMap()) {
   1304     __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
   1305   }
   1306 
   1307   // Return the register containing the holder.
   1308   return reg;
   1309 }
   1310 
   1311 
   1312 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   1313   if (!miss->is_unused()) {
   1314     Label success;
   1315     __ jmp(&success);
   1316     __ bind(miss);
   1317     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1318     __ bind(&success);
   1319   }
   1320 }
   1321 
   1322 
   1323 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   1324   if (!miss->is_unused()) {
   1325     Label success;
   1326     __ jmp(&success);
   1327     GenerateRestoreName(masm(), miss, name);
   1328     TailCallBuiltin(masm(), MissBuiltin(kind()));
   1329     __ bind(&success);
   1330   }
   1331 }
   1332 
   1333 
   1334 Register LoadStubCompiler::CallbackHandlerFrontend(
   1335     Handle<Type> type,
   1336     Register object_reg,
   1337     Handle<JSObject> holder,
   1338     Handle<Name> name,
   1339     Handle<Object> callback) {
   1340   Label miss;
   1341 
   1342   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
   1343 
   1344   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
   1345     ASSERT(!reg.is(scratch2()));
   1346     ASSERT(!reg.is(scratch3()));
   1347     Register dictionary = scratch1();
   1348     bool must_preserve_dictionary_reg = reg.is(dictionary);
   1349 
   1350     // Load the properties dictionary.
   1351     if (must_preserve_dictionary_reg) {
   1352       __ push(dictionary);
   1353     }
   1354     __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
   1355 
   1356     // Probe the dictionary.
   1357     Label probe_done, pop_and_miss;
   1358     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
   1359                                                      &pop_and_miss,
   1360                                                      &probe_done,
   1361                                                      dictionary,
   1362                                                      this->name(),
   1363                                                      scratch2(),
   1364                                                      scratch3());
   1365     __ bind(&pop_and_miss);
   1366     if (must_preserve_dictionary_reg) {
   1367       __ pop(dictionary);
   1368     }
   1369     __ jmp(&miss);
   1370     __ bind(&probe_done);
   1371 
   1372     // If probing finds an entry in the dictionary, scratch2 contains the
   1373     // index into the dictionary. Check that the value is the callback.
   1374     Register index = scratch2();
   1375     const int kElementsStartOffset =
   1376         NameDictionary::kHeaderSize +
   1377         NameDictionary::kElementsStartIndex * kPointerSize;
   1378     const int kValueOffset = kElementsStartOffset + kPointerSize;
   1379     __ mov(scratch3(),
   1380            Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
   1381     if (must_preserve_dictionary_reg) {
   1382       __ pop(dictionary);
   1383     }
   1384     __ cmp(scratch3(), callback);
   1385     __ j(not_equal, &miss);
   1386   }
   1387 
   1388   HandlerFrontendFooter(name, &miss);
   1389   return reg;
   1390 }
   1391 
   1392 
   1393 void LoadStubCompiler::GenerateLoadField(Register reg,
   1394                                          Handle<JSObject> holder,
   1395                                          PropertyIndex field,
   1396                                          Representation representation) {
   1397   if (!reg.is(receiver())) __ mov(receiver(), reg);
   1398   if (kind() == Code::LOAD_IC) {
   1399     LoadFieldStub stub(field.is_inobject(holder),
   1400                        field.translate(holder),
   1401                        representation);
   1402     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1403   } else {
   1404     KeyedLoadFieldStub stub(field.is_inobject(holder),
   1405                             field.translate(holder),
   1406                             representation);
   1407     GenerateTailCall(masm(), stub.GetCode(isolate()));
   1408   }
   1409 }
   1410 
   1411 
   1412 void LoadStubCompiler::GenerateLoadCallback(
   1413     const CallOptimization& call_optimization) {
   1414   GenerateFastApiCall(
   1415       masm(), call_optimization, receiver(), scratch1(),
   1416       scratch2(), name(), 0, NULL);
   1417 }
   1418 
   1419 
   1420 void LoadStubCompiler::GenerateLoadCallback(
   1421     Register reg,
   1422     Handle<ExecutableAccessorInfo> callback) {
   1423   // Insert additional parameters into the stack frame above return address.
   1424   ASSERT(!scratch3().is(reg));
   1425   __ pop(scratch3());  // Get return address to place it below.
   1426 
   1427   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
   1428   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
   1429   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
   1430   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
   1431   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
   1432   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
   1433   __ push(receiver());  // receiver
   1434   // Push data from ExecutableAccessorInfo.
   1435   if (isolate()->heap()->InNewSpace(callback->data())) {
   1436     ASSERT(!scratch2().is(reg));
   1437     __ mov(scratch2(), Immediate(callback));
   1438     __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
   1439   } else {
   1440     __ push(Immediate(Handle<Object>(callback->data(), isolate())));
   1441   }
   1442   __ push(Immediate(isolate()->factory()->undefined_value()));  // ReturnValue
   1443   // ReturnValue default value
   1444   __ push(Immediate(isolate()->factory()->undefined_value()));
   1445   __ push(Immediate(reinterpret_cast<int>(isolate())));
   1446   __ push(reg);  // holder
   1447 
   1448   // Save a pointer to where we pushed the arguments. This will be
   1449   // passed as the const PropertyAccessorInfo& to the C++ callback.
   1450   __ push(esp);
   1451 
   1452   __ push(name());  // name
   1453   __ mov(ebx, esp);  // esp points to reference to name (handler).
   1454 
   1455   __ push(scratch3());  // Restore return address.
   1456 
   1457   // array for v8::Arguments::values_, handler for name and pointer
   1458   // to the values (it considered as smi in GC).
   1459   const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
   1460   // Allocate space for opional callback address parameter in case
   1461   // CPU profiler is active.
   1462   const int kApiArgc = 2 + 1;
   1463 
   1464   Address getter_address = v8::ToCData<Address>(callback->getter());
   1465   __ PrepareCallApiFunction(kApiArgc);
   1466   __ mov(ApiParameterOperand(0), ebx);  // name.
   1467   __ add(ebx, Immediate(kPointerSize));
   1468   __ mov(ApiParameterOperand(1), ebx);  // arguments pointer.
   1469 
   1470   // Emitting a stub call may try to allocate (if the code is not
   1471   // already generated).  Do not allow the assembler to perform a
   1472   // garbage collection but instead return the allocation failure
   1473   // object.
   1474 
   1475   Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
   1476 
   1477   __ CallApiFunctionAndReturn(getter_address,
   1478                               thunk_address,
   1479                               ApiParameterOperand(2),
   1480                               kStackSpace,
   1481                               Operand(ebp, 7 * kPointerSize),
   1482                               NULL);
   1483 }
   1484 
   1485 
   1486 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
   1487   // Return the constant value.
   1488   __ LoadObject(eax, value);
   1489   __ ret(0);
   1490 }
   1491 
   1492 
   1493 void LoadStubCompiler::GenerateLoadInterceptor(
   1494     Register holder_reg,
   1495     Handle<Object> object,
   1496     Handle<JSObject> interceptor_holder,
   1497     LookupResult* lookup,
   1498     Handle<Name> name) {
   1499   ASSERT(interceptor_holder->HasNamedInterceptor());
   1500   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
   1501 
   1502   // So far the most popular follow ups for interceptor loads are FIELD
   1503   // and CALLBACKS, so inline only them, other cases may be added
   1504   // later.
   1505   bool compile_followup_inline = false;
   1506   if (lookup->IsFound() && lookup->IsCacheable()) {
   1507     if (lookup->IsField()) {
   1508       compile_followup_inline = true;
   1509     } else if (lookup->type() == CALLBACKS &&
   1510                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
   1511       ExecutableAccessorInfo* callback =
   1512           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
   1513       compile_followup_inline = callback->getter() != NULL &&
   1514           callback->IsCompatibleReceiver(*object);
   1515     }
   1516   }
   1517 
   1518   if (compile_followup_inline) {
   1519     // Compile the interceptor call, followed by inline code to load the
   1520     // property from further up the prototype chain if the call fails.
   1521     // Check that the maps haven't changed.
   1522     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
   1523 
   1524     // Preserve the receiver register explicitly whenever it is different from
   1525     // the holder and it is needed should the interceptor return without any
   1526     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
   1527     // the FIELD case might cause a miss during the prototype check.
   1528     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
   1529     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
   1530         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
   1531 
   1532     // Save necessary data before invoking an interceptor.
   1533     // Requires a frame to make GC aware of pushed pointers.
   1534     {
   1535       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
   1536 
   1537       if (must_preserve_receiver_reg) {
   1538         __ push(receiver());
   1539       }
   1540       __ push(holder_reg);
   1541       __ push(this->name());
   1542 
   1543       // Invoke an interceptor.  Note: map checks from receiver to
   1544       // interceptor's holder has been compiled before (see a caller
   1545       // of this method.)
   1546       CompileCallLoadPropertyWithInterceptor(
   1547           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
   1548           IC::kLoadPropertyWithInterceptorOnly);
   1549 
   1550       // Check if interceptor provided a value for property.  If it's
   1551       // the case, return immediately.
   1552       Label interceptor_failed;
   1553       __ cmp(eax, factory()->no_interceptor_result_sentinel());
   1554       __ j(equal, &interceptor_failed);
   1555       frame_scope.GenerateLeaveFrame();
   1556       __ ret(0);
   1557 
   1558       // Clobber registers when generating debug-code to provoke errors.
   1559       __ bind(&interceptor_failed);
   1560       if (FLAG_debug_code) {
   1561         __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
   1562         __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
   1563         __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
   1564       }
   1565 
   1566       __ pop(this->name());
   1567       __ pop(holder_reg);
   1568       if (must_preserve_receiver_reg) {
   1569         __ pop(receiver());
   1570       }
   1571 
   1572       // Leave the internal frame.
   1573     }
   1574 
   1575     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
   1576   } else {  // !compile_followup_inline
   1577     // Call the runtime system to load the interceptor.
   1578     // Check that the maps haven't changed.
   1579     __ pop(scratch2());  // save old return address
   1580     PushInterceptorArguments(masm(), receiver(), holder_reg,
   1581                              this->name(), interceptor_holder);
   1582     __ push(scratch2());  // restore old return address
   1583 
   1584     ExternalReference ref =
   1585         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
   1586                           isolate());
   1587     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
   1588   }
   1589 }
   1590 
   1591 
   1592 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
   1593   if (kind_ == Code::KEYED_CALL_IC) {
   1594     __ cmp(ecx, Immediate(name));
   1595     __ j(not_equal, miss);
   1596   }
   1597 }
   1598 
   1599 
   1600 void CallStubCompiler::GenerateFunctionCheck(Register function,
   1601                                              Register scratch,
   1602                                              Label* miss) {
   1603   __ JumpIfSmi(function, miss);
   1604   __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
   1605   __ j(not_equal, miss);
   1606 }
   1607 
   1608 
   1609 void CallStubCompiler::GenerateLoadFunctionFromCell(
   1610     Handle<Cell> cell,
   1611     Handle<JSFunction> function,
   1612     Label* miss) {
   1613   // Get the value from the cell.
   1614   if (Serializer::enabled()) {
   1615     __ mov(edi, Immediate(cell));
   1616     __ mov(edi, FieldOperand(edi, Cell::kValueOffset));
   1617   } else {
   1618     __ mov(edi, Operand::ForCell(cell));
   1619   }
   1620 
   1621   // Check that the cell contains the same function.
   1622   if (isolate()->heap()->InNewSpace(*function)) {
   1623     // We can't embed a pointer to a function in new space so we have
   1624     // to verify that the shared function info is unchanged. This has
   1625     // the nice side effect that multiple closures based on the same
   1626     // function can all use this call IC. Before we load through the
   1627     // function, we have to verify that it still is a function.
   1628     GenerateFunctionCheck(edi, ebx, miss);
   1629 
   1630     // Check the shared function info. Make sure it hasn't changed.
   1631     __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
   1632            Immediate(Handle<SharedFunctionInfo>(function->shared())));
   1633   } else {
   1634     __ cmp(edi, Immediate(function));
   1635   }
   1636   __ j(not_equal, miss);
   1637 }
   1638 
   1639 
   1640 void CallStubCompiler::GenerateMissBranch() {
   1641   Handle<Code> code =
   1642       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
   1643                                                kind_,
   1644                                                extra_state());
   1645   __ jmp(code, RelocInfo::CODE_TARGET);
   1646 }
   1647 
   1648 
   1649 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   1650                                                 Handle<JSObject> holder,
   1651                                                 PropertyIndex index,
   1652                                                 Handle<Name> name) {
   1653   Label miss;
   1654 
   1655   Register reg = HandlerFrontendHeader(
   1656       object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1657 
   1658   GenerateFastPropertyLoad(
   1659       masm(), edi, reg, index.is_inobject(holder),
   1660       index.translate(holder), Representation::Tagged());
   1661   GenerateJumpFunction(object, edi, &miss);
   1662 
   1663   HandlerFrontendFooter(&miss);
   1664 
   1665   // Return the generated code.
   1666   return GetCode(Code::FAST, name);
   1667 }
   1668 
   1669 
   1670 Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   1671     Handle<Object> object,
   1672     Handle<JSObject> holder,
   1673     Handle<Cell> cell,
   1674     Handle<JSFunction> function,
   1675     Handle<String> name,
   1676     Code::StubType type) {
   1677   Label miss;
   1678 
   1679   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1680   if (!cell.is_null()) {
   1681     ASSERT(cell->value() == *function);
   1682     GenerateLoadFunctionFromCell(cell, function, &miss);
   1683   }
   1684 
   1685   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
   1686   site->SetElementsKind(GetInitialFastElementsKind());
   1687   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   1688   const int argc = arguments().immediate();
   1689   __ mov(eax, Immediate(argc));
   1690   __ mov(ebx, site_feedback_cell);
   1691   __ mov(edi, function);
   1692 
   1693   ArrayConstructorStub stub(isolate());
   1694   __ TailCallStub(&stub);
   1695 
   1696   HandlerFrontendFooter(&miss);
   1697 
   1698   // Return the generated code.
   1699   return GetCode(type, name);
   1700 }
   1701 
   1702 
   1703 Handle<Code> CallStubCompiler::CompileArrayPushCall(
   1704     Handle<Object> object,
   1705     Handle<JSObject> holder,
   1706     Handle<Cell> cell,
   1707     Handle<JSFunction> function,
   1708     Handle<String> name,
   1709     Code::StubType type) {
   1710   // If object is not an array or is observed or sealed, bail out to regular
   1711   // call.
   1712   if (!object->IsJSArray() ||
   1713       !cell.is_null() ||
   1714       Handle<JSArray>::cast(object)->map()->is_observed() ||
   1715       !Handle<JSArray>::cast(object)->map()->is_extensible()) {
   1716     return Handle<Code>::null();
   1717   }
   1718 
   1719   Label miss;
   1720 
   1721   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1722 
   1723   const int argc = arguments().immediate();
   1724   if (argc == 0) {
   1725     // Noop, return the length.
   1726     __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
   1727     __ ret((argc + 1) * kPointerSize);
   1728   } else {
   1729     Label call_builtin;
   1730 
   1731     if (argc == 1) {  // Otherwise fall through to call builtin.
   1732       Label attempt_to_grow_elements, with_write_barrier, check_double;
   1733 
   1734       // Get the elements array of the object.
   1735       __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
   1736 
   1737       // Check that the elements are in fast mode and writable.
   1738       __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
   1739              Immediate(factory()->fixed_array_map()));
   1740       __ j(not_equal, &check_double);
   1741 
   1742       // Get the array's length into eax and calculate new length.
   1743       __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
   1744       STATIC_ASSERT(kSmiTagSize == 1);
   1745       STATIC_ASSERT(kSmiTag == 0);
   1746       __ add(eax, Immediate(Smi::FromInt(argc)));
   1747 
   1748       // Get the elements' length into ecx.
   1749       __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
   1750 
   1751       // Check if we could survive without allocation.
   1752       __ cmp(eax, ecx);
   1753       __ j(greater, &attempt_to_grow_elements);
   1754 
   1755       // Check if value is a smi.
   1756       __ mov(ecx, Operand(esp, argc * kPointerSize));
   1757       __ JumpIfNotSmi(ecx, &with_write_barrier);
   1758 
   1759       // Save new length.
   1760       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1761 
   1762       // Store the value.
   1763       __ mov(FieldOperand(edi,
   1764                           eax,
   1765                           times_half_pointer_size,
   1766                           FixedArray::kHeaderSize - argc * kPointerSize),
   1767              ecx);
   1768 
   1769       __ ret((argc + 1) * kPointerSize);
   1770 
   1771       __ bind(&check_double);
   1772 
   1773 
   1774       // Check that the elements are in double mode.
   1775       __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
   1776              Immediate(factory()->fixed_double_array_map()));
   1777       __ j(not_equal, &call_builtin);
   1778 
   1779       // Get the array's length into eax and calculate new length.
   1780       __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
   1781       STATIC_ASSERT(kSmiTagSize == 1);
   1782       STATIC_ASSERT(kSmiTag == 0);
   1783       __ add(eax, Immediate(Smi::FromInt(argc)));
   1784 
   1785       // Get the elements' length into ecx.
   1786       __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
   1787 
   1788       // Check if we could survive without allocation.
   1789       __ cmp(eax, ecx);
   1790       __ j(greater, &call_builtin);
   1791 
   1792       __ mov(ecx, Operand(esp, argc * kPointerSize));
   1793       __ StoreNumberToDoubleElements(
   1794           ecx, edi, eax, ecx, xmm0, &call_builtin, true, argc * kDoubleSize);
   1795 
   1796       // Save new length.
   1797       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1798       __ ret((argc + 1) * kPointerSize);
   1799 
   1800       __ bind(&with_write_barrier);
   1801 
   1802       __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
   1803 
   1804       if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
   1805         Label fast_object, not_fast_object;
   1806         __ CheckFastObjectElements(ebx, &not_fast_object, Label::kNear);
   1807         __ jmp(&fast_object);
   1808         // In case of fast smi-only, convert to fast object, otherwise bail out.
   1809         __ bind(&not_fast_object);
   1810         __ CheckFastSmiElements(ebx, &call_builtin);
   1811         __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
   1812                Immediate(factory()->heap_number_map()));
   1813         __ j(equal, &call_builtin);
   1814         // edi: elements array
   1815         // edx: receiver
   1816         // ebx: map
   1817         Label try_holey_map;
   1818         __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
   1819                                                FAST_ELEMENTS,
   1820                                                ebx,
   1821                                                edi,
   1822                                                &try_holey_map);
   1823 
   1824         ElementsTransitionGenerator::
   1825             GenerateMapChangeElementsTransition(masm(),
   1826                                                 DONT_TRACK_ALLOCATION_SITE,
   1827                                                 NULL);
   1828         // Restore edi.
   1829         __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
   1830         __ jmp(&fast_object);
   1831 
   1832         __ bind(&try_holey_map);
   1833         __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
   1834                                                FAST_HOLEY_ELEMENTS,
   1835                                                ebx,
   1836                                                edi,
   1837                                                &call_builtin);
   1838         ElementsTransitionGenerator::
   1839             GenerateMapChangeElementsTransition(masm(),
   1840                                                 DONT_TRACK_ALLOCATION_SITE,
   1841                                                 NULL);
   1842         // Restore edi.
   1843         __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
   1844         __ bind(&fast_object);
   1845       } else {
   1846         __ CheckFastObjectElements(ebx, &call_builtin);
   1847       }
   1848 
   1849       // Save new length.
   1850       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1851 
   1852       // Store the value.
   1853       __ lea(edx, FieldOperand(edi,
   1854                                eax, times_half_pointer_size,
   1855                                FixedArray::kHeaderSize - argc * kPointerSize));
   1856       __ mov(Operand(edx, 0), ecx);
   1857 
   1858       __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1859                      OMIT_SMI_CHECK);
   1860 
   1861       __ ret((argc + 1) * kPointerSize);
   1862 
   1863       __ bind(&attempt_to_grow_elements);
   1864       if (!FLAG_inline_new) {
   1865         __ jmp(&call_builtin);
   1866       }
   1867 
   1868       __ mov(ebx, Operand(esp, argc * kPointerSize));
   1869       // Growing elements that are SMI-only requires special handling in case
   1870       // the new element is non-Smi. For now, delegate to the builtin.
   1871       Label no_fast_elements_check;
   1872       __ JumpIfSmi(ebx, &no_fast_elements_check);
   1873       __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
   1874       __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
   1875       __ bind(&no_fast_elements_check);
   1876 
   1877       // We could be lucky and the elements array could be at the top of
   1878       // new-space.  In this case we can just grow it in place by moving the
   1879       // allocation pointer up.
   1880 
   1881       ExternalReference new_space_allocation_top =
   1882           ExternalReference::new_space_allocation_top_address(isolate());
   1883       ExternalReference new_space_allocation_limit =
   1884           ExternalReference::new_space_allocation_limit_address(isolate());
   1885 
   1886       const int kAllocationDelta = 4;
   1887       // Load top.
   1888       __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
   1889 
   1890       // Check if it's the end of elements.
   1891       __ lea(edx, FieldOperand(edi,
   1892                                eax, times_half_pointer_size,
   1893                                FixedArray::kHeaderSize - argc * kPointerSize));
   1894       __ cmp(edx, ecx);
   1895       __ j(not_equal, &call_builtin);
   1896       __ add(ecx, Immediate(kAllocationDelta * kPointerSize));
   1897       __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
   1898       __ j(above, &call_builtin);
   1899 
   1900       // We fit and could grow elements.
   1901       __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
   1902 
   1903       // Push the argument...
   1904       __ mov(Operand(edx, 0), ebx);
   1905       // ... and fill the rest with holes.
   1906       for (int i = 1; i < kAllocationDelta; i++) {
   1907         __ mov(Operand(edx, i * kPointerSize),
   1908                Immediate(factory()->the_hole_value()));
   1909       }
   1910 
   1911       // We know the elements array is in new space so we don't need the
   1912       // remembered set, but we just pushed a value onto it so we may have to
   1913       // tell the incremental marker to rescan the object that we just grew.  We
   1914       // don't need to worry about the holes because they are in old space and
   1915       // already marked black.
   1916       __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
   1917 
   1918       // Restore receiver to edx as finish sequence assumes it's here.
   1919       __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   1920 
   1921       // Increment element's and array's sizes.
   1922       __ add(FieldOperand(edi, FixedArray::kLengthOffset),
   1923              Immediate(Smi::FromInt(kAllocationDelta)));
   1924 
   1925       // NOTE: This only happen in new-space, where we don't
   1926       // care about the black-byte-count on pages. Otherwise we should
   1927       // update that too if the object is black.
   1928 
   1929       __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
   1930 
   1931       __ ret((argc + 1) * kPointerSize);
   1932     }
   1933 
   1934     __ bind(&call_builtin);
   1935     __ TailCallExternalReference(
   1936         ExternalReference(Builtins::c_ArrayPush, isolate()),
   1937         argc + 1,
   1938         1);
   1939   }
   1940 
   1941   HandlerFrontendFooter(&miss);
   1942 
   1943   // Return the generated code.
   1944   return GetCode(type, name);
   1945 }
   1946 
   1947 
   1948 Handle<Code> CallStubCompiler::CompileArrayPopCall(
   1949     Handle<Object> object,
   1950     Handle<JSObject> holder,
   1951     Handle<Cell> cell,
   1952     Handle<JSFunction> function,
   1953     Handle<String> name,
   1954     Code::StubType type) {
   1955   // If object is not an array or is observed or sealed, bail out to regular
   1956   // call.
   1957   if (!object->IsJSArray() ||
   1958       !cell.is_null() ||
   1959       Handle<JSArray>::cast(object)->map()->is_observed() ||
   1960       !Handle<JSArray>::cast(object)->map()->is_extensible()) {
   1961     return Handle<Code>::null();
   1962   }
   1963 
   1964   Label miss, return_undefined, call_builtin;
   1965 
   1966   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   1967 
   1968   // Get the elements array of the object.
   1969   __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
   1970 
   1971   // Check that the elements are in fast mode and writable.
   1972   __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
   1973          Immediate(factory()->fixed_array_map()));
   1974   __ j(not_equal, &call_builtin);
   1975 
   1976   // Get the array's length into ecx and calculate new length.
   1977   __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
   1978   __ sub(ecx, Immediate(Smi::FromInt(1)));
   1979   __ j(negative, &return_undefined);
   1980 
   1981   // Get the last element.
   1982   STATIC_ASSERT(kSmiTagSize == 1);
   1983   STATIC_ASSERT(kSmiTag == 0);
   1984   __ mov(eax, FieldOperand(ebx,
   1985                            ecx, times_half_pointer_size,
   1986                            FixedArray::kHeaderSize));
   1987   __ cmp(eax, Immediate(factory()->the_hole_value()));
   1988   __ j(equal, &call_builtin);
   1989 
   1990   // Set the array's length.
   1991   __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
   1992 
   1993   // Fill with the hole.
   1994   __ mov(FieldOperand(ebx,
   1995                       ecx, times_half_pointer_size,
   1996                       FixedArray::kHeaderSize),
   1997          Immediate(factory()->the_hole_value()));
   1998   const int argc = arguments().immediate();
   1999   __ ret((argc + 1) * kPointerSize);
   2000 
   2001   __ bind(&return_undefined);
   2002   __ mov(eax, Immediate(factory()->undefined_value()));
   2003   __ ret((argc + 1) * kPointerSize);
   2004 
   2005   __ bind(&call_builtin);
   2006   __ TailCallExternalReference(
   2007       ExternalReference(Builtins::c_ArrayPop, isolate()),
   2008       argc + 1,
   2009       1);
   2010 
   2011   HandlerFrontendFooter(&miss);
   2012 
   2013   // Return the generated code.
   2014   return GetCode(type, name);
   2015 }
   2016 
   2017 
   2018 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
   2019     Handle<Object> object,
   2020     Handle<JSObject> holder,
   2021     Handle<Cell> cell,
   2022     Handle<JSFunction> function,
   2023     Handle<String> name,
   2024     Code::StubType type) {
   2025   // If object is not a string, bail out to regular call.
   2026   if (!object->IsString() || !cell.is_null()) {
   2027     return Handle<Code>::null();
   2028   }
   2029 
   2030   const int argc = arguments().immediate();
   2031 
   2032   Label miss;
   2033   Label name_miss;
   2034   Label index_out_of_range;
   2035   Label* index_out_of_range_label = &index_out_of_range;
   2036 
   2037   if (kind_ == Code::CALL_IC &&
   2038       (CallICBase::StringStubState::decode(extra_state()) ==
   2039        DEFAULT_STRING_STUB)) {
   2040     index_out_of_range_label = &miss;
   2041   }
   2042 
   2043   HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
   2044 
   2045   Register receiver = ebx;
   2046   Register index = edi;
   2047   Register result = eax;
   2048   __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
   2049   if (argc > 0) {
   2050     __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
   2051   } else {
   2052     __ Set(index, Immediate(factory()->undefined_value()));
   2053   }
   2054 
   2055   StringCharCodeAtGenerator generator(receiver,
   2056                                       index,
   2057                                       result,
   2058                                       &miss,  // When not a string.
   2059                                       &miss,  // When not a number.
   2060                                       index_out_of_range_label,
   2061                                       STRING_INDEX_IS_NUMBER);
   2062   generator.GenerateFast(masm());
   2063   __ ret((argc + 1) * kPointerSize);
   2064 
   2065   StubRuntimeCallHelper call_helper;
   2066   generator.GenerateSlow(masm(), call_helper);
   2067 
   2068   if (index_out_of_range.is_linked()) {
   2069     __ bind(&index_out_of_range);
   2070     __ Set(eax, Immediate(factory()->nan_value()));
   2071     __ ret((argc + 1) * kPointerSize);
   2072   }
   2073 
   2074   __ bind(&miss);
   2075   // Restore function name in ecx.
   2076   __ Set(ecx, Immediate(name));
   2077   HandlerFrontendFooter(&name_miss);
   2078 
   2079   // Return the generated code.
   2080   return GetCode(type, name);
   2081 }
   2082 
   2083 
   2084 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
   2085     Handle<Object> object,
   2086     Handle<JSObject> holder,
   2087     Handle<Cell> cell,
   2088     Handle<JSFunction> function,
   2089     Handle<String> name,
   2090     Code::StubType type) {
   2091   // If object is not a string, bail out to regular call.
   2092   if (!object->IsString() || !cell.is_null()) {
   2093     return Handle<Code>::null();
   2094   }
   2095 
   2096   const int argc = arguments().immediate();
   2097 
   2098   Label miss;
   2099   Label name_miss;
   2100   Label index_out_of_range;
   2101   Label* index_out_of_range_label = &index_out_of_range;
   2102 
   2103   if (kind_ == Code::CALL_IC &&
   2104       (CallICBase::StringStubState::decode(extra_state()) ==
   2105        DEFAULT_STRING_STUB)) {
   2106     index_out_of_range_label = &miss;
   2107   }
   2108 
   2109   HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
   2110 
   2111   Register receiver = eax;
   2112   Register index = edi;
   2113   Register scratch = edx;
   2114   Register result = eax;
   2115   __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
   2116   if (argc > 0) {
   2117     __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
   2118   } else {
   2119     __ Set(index, Immediate(factory()->undefined_value()));
   2120   }
   2121 
   2122   StringCharAtGenerator generator(receiver,
   2123                                   index,
   2124                                   scratch,
   2125                                   result,
   2126                                   &miss,  // When not a string.
   2127                                   &miss,  // When not a number.
   2128                                   index_out_of_range_label,
   2129                                   STRING_INDEX_IS_NUMBER);
   2130   generator.GenerateFast(masm());
   2131   __ ret((argc + 1) * kPointerSize);
   2132 
   2133   StubRuntimeCallHelper call_helper;
   2134   generator.GenerateSlow(masm(), call_helper);
   2135 
   2136   if (index_out_of_range.is_linked()) {
   2137     __ bind(&index_out_of_range);
   2138     __ Set(eax, Immediate(factory()->empty_string()));
   2139     __ ret((argc + 1) * kPointerSize);
   2140   }
   2141 
   2142   __ bind(&miss);
   2143   // Restore function name in ecx.
   2144   __ Set(ecx, Immediate(name));
   2145   HandlerFrontendFooter(&name_miss);
   2146 
   2147   // Return the generated code.
   2148   return GetCode(type, name);
   2149 }
   2150 
   2151 
   2152 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
   2153     Handle<Object> object,
   2154     Handle<JSObject> holder,
   2155     Handle<Cell> cell,
   2156     Handle<JSFunction> function,
   2157     Handle<String> name,
   2158     Code::StubType type) {
   2159   const int argc = arguments().immediate();
   2160 
   2161   // If the object is not a JSObject or we got an unexpected number of
   2162   // arguments, bail out to the regular call.
   2163   if (!object->IsJSObject() || argc != 1) {
   2164     return Handle<Code>::null();
   2165   }
   2166 
   2167   Label miss;
   2168 
   2169   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2170   if (!cell.is_null()) {
   2171     ASSERT(cell->value() == *function);
   2172     GenerateLoadFunctionFromCell(cell, function, &miss);
   2173   }
   2174 
   2175   // Load the char code argument.
   2176   Register code = ebx;
   2177   __ mov(code, Operand(esp, 1 * kPointerSize));
   2178 
   2179   // Check the code is a smi.
   2180   Label slow;
   2181   STATIC_ASSERT(kSmiTag == 0);
   2182   __ JumpIfNotSmi(code, &slow);
   2183 
   2184   // Convert the smi code to uint16.
   2185   __ and_(code, Immediate(Smi::FromInt(0xffff)));
   2186 
   2187   StringCharFromCodeGenerator generator(code, eax);
   2188   generator.GenerateFast(masm());
   2189   __ ret(2 * kPointerSize);
   2190 
   2191   StubRuntimeCallHelper call_helper;
   2192   generator.GenerateSlow(masm(), call_helper);
   2193 
   2194   __ bind(&slow);
   2195   // We do not have to patch the receiver because the function makes no use of
   2196   // it.
   2197   GenerateJumpFunctionIgnoreReceiver(function);
   2198 
   2199   HandlerFrontendFooter(&miss);
   2200 
   2201   // Return the generated code.
   2202   return GetCode(type, name);
   2203 }
   2204 
   2205 
   2206 Handle<Code> CallStubCompiler::CompileMathFloorCall(
   2207     Handle<Object> object,
   2208     Handle<JSObject> holder,
   2209     Handle<Cell> cell,
   2210     Handle<JSFunction> function,
   2211     Handle<String> name,
   2212     Code::StubType type) {
   2213   if (!CpuFeatures::IsSupported(SSE2)) {
   2214     return Handle<Code>::null();
   2215   }
   2216 
   2217   CpuFeatureScope use_sse2(masm(), SSE2);
   2218 
   2219   const int argc = arguments().immediate();
   2220 
   2221   // If the object is not a JSObject or we got an unexpected number of
   2222   // arguments, bail out to the regular call.
   2223   if (!object->IsJSObject() || argc != 1) {
   2224     return Handle<Code>::null();
   2225   }
   2226 
   2227   Label miss;
   2228 
   2229   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2230   if (!cell.is_null()) {
   2231     ASSERT(cell->value() == *function);
   2232     GenerateLoadFunctionFromCell(cell, function, &miss);
   2233   }
   2234 
   2235   // Load the (only) argument into eax.
   2236   __ mov(eax, Operand(esp, 1 * kPointerSize));
   2237 
   2238   // Check if the argument is a smi.
   2239   Label smi;
   2240   STATIC_ASSERT(kSmiTag == 0);
   2241   __ JumpIfSmi(eax, &smi);
   2242 
   2243   // Check if the argument is a heap number and load its value into xmm0.
   2244   Label slow;
   2245   __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
   2246   __ movsd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
   2247 
   2248   // Check if the argument is strictly positive. Note this also
   2249   // discards NaN.
   2250   __ xorpd(xmm1, xmm1);
   2251   __ ucomisd(xmm0, xmm1);
   2252   __ j(below_equal, &slow);
   2253 
   2254   // Do a truncating conversion.
   2255   __ cvttsd2si(eax, Operand(xmm0));
   2256 
   2257   // Check if the result fits into a smi. Note this also checks for
   2258   // 0x80000000 which signals a failed conversion.
   2259   Label wont_fit_into_smi;
   2260   __ test(eax, Immediate(0xc0000000));
   2261   __ j(not_zero, &wont_fit_into_smi);
   2262 
   2263   // Smi tag and return.
   2264   __ SmiTag(eax);
   2265   __ bind(&smi);
   2266   __ ret(2 * kPointerSize);
   2267 
   2268   // Check if the argument is < 2^kMantissaBits.
   2269   Label already_round;
   2270   __ bind(&wont_fit_into_smi);
   2271   __ LoadPowerOf2(xmm1, ebx, HeapNumber::kMantissaBits);
   2272   __ ucomisd(xmm0, xmm1);
   2273   __ j(above_equal, &already_round);
   2274 
   2275   // Save a copy of the argument.
   2276   __ movaps(xmm2, xmm0);
   2277 
   2278   // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
   2279   __ addsd(xmm0, xmm1);
   2280   __ subsd(xmm0, xmm1);
   2281 
   2282   // Compare the argument and the tentative result to get the right mask:
   2283   //   if xmm2 < xmm0:
   2284   //     xmm2 = 1...1
   2285   //   else:
   2286   //     xmm2 = 0...0
   2287   __ cmpltsd(xmm2, xmm0);
   2288 
   2289   // Subtract 1 if the argument was less than the tentative result.
   2290   __ LoadPowerOf2(xmm1, ebx, 0);
   2291   __ andpd(xmm1, xmm2);
   2292   __ subsd(xmm0, xmm1);
   2293 
   2294   // Return a new heap number.
   2295   __ AllocateHeapNumber(eax, ebx, edx, &slow);
   2296   __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
   2297   __ ret(2 * kPointerSize);
   2298 
   2299   // Return the argument (when it's an already round heap number).
   2300   __ bind(&already_round);
   2301   __ mov(eax, Operand(esp, 1 * kPointerSize));
   2302   __ ret(2 * kPointerSize);
   2303 
   2304   __ bind(&slow);
   2305   // We do not have to patch the receiver because the function makes no use of
   2306   // it.
   2307   GenerateJumpFunctionIgnoreReceiver(function);
   2308 
   2309   HandlerFrontendFooter(&miss);
   2310 
   2311   // Return the generated code.
   2312   return GetCode(type, name);
   2313 }
   2314 
   2315 
   2316 Handle<Code> CallStubCompiler::CompileMathAbsCall(
   2317     Handle<Object> object,
   2318     Handle<JSObject> holder,
   2319     Handle<Cell> cell,
   2320     Handle<JSFunction> function,
   2321     Handle<String> name,
   2322     Code::StubType type) {
   2323   const int argc = arguments().immediate();
   2324 
   2325   // If the object is not a JSObject or we got an unexpected number of
   2326   // arguments, bail out to the regular call.
   2327   if (!object->IsJSObject() || argc != 1) {
   2328     return Handle<Code>::null();
   2329   }
   2330 
   2331   Label miss;
   2332 
   2333   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2334   if (!cell.is_null()) {
   2335     ASSERT(cell->value() == *function);
   2336     GenerateLoadFunctionFromCell(cell, function, &miss);
   2337   }
   2338 
   2339   // Load the (only) argument into eax.
   2340   __ mov(eax, Operand(esp, 1 * kPointerSize));
   2341 
   2342   // Check if the argument is a smi.
   2343   Label not_smi;
   2344   STATIC_ASSERT(kSmiTag == 0);
   2345   __ JumpIfNotSmi(eax, &not_smi);
   2346 
   2347   // Branchless abs implementation, refer to below:
   2348   // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs
   2349   // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
   2350   // otherwise.
   2351   __ mov(ebx, eax);
   2352   __ sar(ebx, kBitsPerInt - 1);
   2353 
   2354   // Do bitwise not or do nothing depending on ebx.
   2355   __ xor_(eax, ebx);
   2356 
   2357   // Add 1 or do nothing depending on ebx.
   2358   __ sub(eax, ebx);
   2359 
   2360   // If the result is still negative, go to the slow case.
   2361   // This only happens for the most negative smi.
   2362   Label slow;
   2363   __ j(negative, &slow);
   2364 
   2365   // Smi case done.
   2366   __ ret(2 * kPointerSize);
   2367 
   2368   // Check if the argument is a heap number and load its exponent and
   2369   // sign into ebx.
   2370   __ bind(&not_smi);
   2371   __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
   2372   __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset));
   2373 
   2374   // Check the sign of the argument. If the argument is positive,
   2375   // just return it.
   2376   Label negative_sign;
   2377   __ test(ebx, Immediate(HeapNumber::kSignMask));
   2378   __ j(not_zero, &negative_sign);
   2379   __ ret(2 * kPointerSize);
   2380 
   2381   // If the argument is negative, clear the sign, and return a new
   2382   // number.
   2383   __ bind(&negative_sign);
   2384   __ and_(ebx, ~HeapNumber::kSignMask);
   2385   __ mov(ecx, FieldOperand(eax, HeapNumber::kMantissaOffset));
   2386   __ AllocateHeapNumber(eax, edi, edx, &slow);
   2387   __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ebx);
   2388   __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
   2389   __ ret(2 * kPointerSize);
   2390 
   2391   __ bind(&slow);
   2392   // We do not have to patch the receiver because the function makes no use of
   2393   // it.
   2394   GenerateJumpFunctionIgnoreReceiver(function);
   2395 
   2396   HandlerFrontendFooter(&miss);
   2397 
   2398   // Return the generated code.
   2399   return GetCode(type, name);
   2400 }
   2401 
   2402 
   2403 Handle<Code> CallStubCompiler::CompileFastApiCall(
   2404     const CallOptimization& optimization,
   2405     Handle<Object> object,
   2406     Handle<JSObject> holder,
   2407     Handle<Cell> cell,
   2408     Handle<JSFunction> function,
   2409     Handle<String> name) {
   2410   ASSERT(optimization.is_simple_api_call());
   2411   // Bail out if object is a global object as we don't want to
   2412   // repatch it to global receiver.
   2413   if (object->IsGlobalObject()) return Handle<Code>::null();
   2414   if (!cell.is_null()) return Handle<Code>::null();
   2415   if (!object->IsJSObject()) return Handle<Code>::null();
   2416   int depth = optimization.GetPrototypeDepthOfExpectedType(
   2417       Handle<JSObject>::cast(object), holder);
   2418   if (depth == kInvalidProtoDepth) return Handle<Code>::null();
   2419 
   2420   Label miss, miss_before_stack_reserved;
   2421 
   2422   GenerateNameCheck(name, &miss_before_stack_reserved);
   2423 
   2424   // Get the receiver from the stack.
   2425   const int argc = arguments().immediate();
   2426   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2427 
   2428   // Check that the receiver isn't a smi.
   2429   __ JumpIfSmi(edx, &miss_before_stack_reserved);
   2430 
   2431   Counters* counters = isolate()->counters();
   2432   __ IncrementCounter(counters->call_const(), 1);
   2433   __ IncrementCounter(counters->call_const_fast_api(), 1);
   2434 
   2435   // Allocate space for v8::Arguments implicit values. Must be initialized
   2436   // before calling any runtime function.
   2437   __ sub(esp, Immediate(kFastApiCallArguments * kPointerSize));
   2438 
   2439   // Check that the maps haven't changed and find a Holder as a side effect.
   2440   CheckPrototypes(IC::CurrentTypeOf(object, isolate()), edx, holder,
   2441                   ebx, eax, edi, name, depth, &miss);
   2442 
   2443   // Move the return address on top of the stack.
   2444   __ mov(eax, Operand(esp, kFastApiCallArguments * kPointerSize));
   2445   __ mov(Operand(esp, 0 * kPointerSize), eax);
   2446 
   2447   // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
   2448   // duplicate of return address and will be overwritten.
   2449   GenerateFastApiCall(masm(), optimization, argc);
   2450 
   2451   __ bind(&miss);
   2452   __ add(esp, Immediate(kFastApiCallArguments * kPointerSize));
   2453 
   2454   HandlerFrontendFooter(&miss_before_stack_reserved);
   2455 
   2456   // Return the generated code.
   2457   return GetCode(function);
   2458 }
   2459 
   2460 
   2461 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
   2462   Label success;
   2463   // Check that the object is a boolean.
   2464   __ cmp(object, factory()->true_value());
   2465   __ j(equal, &success);
   2466   __ cmp(object, factory()->false_value());
   2467   __ j(not_equal, miss);
   2468   __ bind(&success);
   2469 }
   2470 
   2471 
   2472 void CallStubCompiler::PatchGlobalProxy(Handle<Object> object) {
   2473   if (object->IsGlobalObject()) {
   2474     const int argc = arguments().immediate();
   2475     const int receiver_offset = (argc + 1) * kPointerSize;
   2476     __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
   2477     __ mov(Operand(esp, receiver_offset), edx);
   2478   }
   2479 }
   2480 
   2481 
   2482 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object,
   2483                                                  Handle<JSObject> holder,
   2484                                                  Handle<Name> name,
   2485                                                  CheckType check,
   2486                                                  Label* miss) {
   2487   GenerateNameCheck(name, miss);
   2488 
   2489   Register reg = edx;
   2490 
   2491   const int argc = arguments().immediate();
   2492   const int receiver_offset = (argc + 1) * kPointerSize;
   2493   __ mov(reg, Operand(esp, receiver_offset));
   2494 
   2495   // Check that the receiver isn't a smi.
   2496   if (check != NUMBER_CHECK) {
   2497     __ JumpIfSmi(reg, miss);
   2498   }
   2499 
   2500   // Make sure that it's okay not to patch the on stack receiver
   2501   // unless we're doing a receiver map check.
   2502   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
   2503   switch (check) {
   2504     case RECEIVER_MAP_CHECK:
   2505       __ IncrementCounter(isolate()->counters()->call_const(), 1);
   2506 
   2507       // Check that the maps haven't changed.
   2508       reg = CheckPrototypes(IC::CurrentTypeOf(object, isolate()), reg, holder,
   2509                             ebx, eax, edi, name, miss);
   2510 
   2511       break;
   2512 
   2513     case STRING_CHECK: {
   2514       // Check that the object is a string.
   2515       __ CmpObjectType(reg, FIRST_NONSTRING_TYPE, eax);
   2516       __ j(above_equal, miss);
   2517       // Check that the maps starting from the prototype haven't changed.
   2518       GenerateDirectLoadGlobalFunctionPrototype(
   2519           masm(), Context::STRING_FUNCTION_INDEX, eax, miss);
   2520       break;
   2521     }
   2522     case SYMBOL_CHECK: {
   2523       // Check that the object is a symbol.
   2524       __ CmpObjectType(reg, SYMBOL_TYPE, eax);
   2525       __ j(not_equal, miss);
   2526       // Check that the maps starting from the prototype haven't changed.
   2527       GenerateDirectLoadGlobalFunctionPrototype(
   2528           masm(), Context::SYMBOL_FUNCTION_INDEX, eax, miss);
   2529       break;
   2530     }
   2531     case NUMBER_CHECK: {
   2532       Label fast;
   2533       // Check that the object is a smi or a heap number.
   2534       __ JumpIfSmi(reg, &fast);
   2535       __ CmpObjectType(reg, HEAP_NUMBER_TYPE, eax);
   2536       __ j(not_equal, miss);
   2537       __ bind(&fast);
   2538       // Check that the maps starting from the prototype haven't changed.
   2539       GenerateDirectLoadGlobalFunctionPrototype(
   2540           masm(), Context::NUMBER_FUNCTION_INDEX, eax, miss);
   2541       break;
   2542     }
   2543     case BOOLEAN_CHECK: {
   2544       GenerateBooleanCheck(reg, miss);
   2545       // Check that the maps starting from the prototype haven't changed.
   2546       GenerateDirectLoadGlobalFunctionPrototype(
   2547           masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, miss);
   2548       break;
   2549     }
   2550   }
   2551 
   2552   if (check != RECEIVER_MAP_CHECK) {
   2553     Handle<Object> prototype(object->GetPrototype(isolate()), isolate());
   2554     reg = CheckPrototypes(
   2555         IC::CurrentTypeOf(prototype, isolate()),
   2556         eax, holder, ebx, edx, edi, name, miss);
   2557   }
   2558 
   2559   return reg;
   2560 }
   2561 
   2562 
   2563 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
   2564                                             Register function,
   2565                                             Label* miss) {
   2566   // Check that the function really is a function.
   2567   GenerateFunctionCheck(function, ebx, miss);
   2568 
   2569   if (!function.is(edi)) __ mov(edi, function);
   2570   PatchGlobalProxy(object);
   2571 
   2572   // Invoke the function.
   2573   __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
   2574                     NullCallWrapper(), call_kind());
   2575 }
   2576 
   2577 
   2578 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
   2579                                                       Handle<JSObject> holder,
   2580                                                       Handle<Name> name) {
   2581   Label miss;
   2582 
   2583   GenerateNameCheck(name, &miss);
   2584 
   2585   // Get the number of arguments.
   2586   const int argc = arguments().immediate();
   2587 
   2588   LookupResult lookup(isolate());
   2589   LookupPostInterceptor(holder, name, &lookup);
   2590 
   2591   // Get the receiver from the stack.
   2592   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2593 
   2594   CallInterceptorCompiler compiler(this, arguments(), ecx, extra_state());
   2595   compiler.Compile(masm(), object, holder, name, &lookup, edx, ebx, edi, eax,
   2596                    &miss);
   2597 
   2598   // Restore receiver.
   2599   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
   2600 
   2601   GenerateJumpFunction(object, eax, &miss);
   2602 
   2603   HandlerFrontendFooter(&miss);
   2604 
   2605   // Return the generated code.
   2606   return GetCode(Code::FAST, name);
   2607 }
   2608 
   2609 
   2610 Handle<Code> CallStubCompiler::CompileCallGlobal(
   2611     Handle<JSObject> object,
   2612     Handle<GlobalObject> holder,
   2613     Handle<PropertyCell> cell,
   2614     Handle<JSFunction> function,
   2615     Handle<Name> name) {
   2616   if (HasCustomCallGenerator(function)) {
   2617     Handle<Code> code = CompileCustomCall(
   2618         object, holder, cell, function, Handle<String>::cast(name),
   2619         Code::NORMAL);
   2620     // A null handle means bail out to the regular compiler code below.
   2621     if (!code.is_null()) return code;
   2622   }
   2623 
   2624   Label miss;
   2625   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
   2626   // Potentially loads a closure that matches the shared function info of the
   2627   // function, rather than function.
   2628   GenerateLoadFunctionFromCell(cell, function, &miss);
   2629   GenerateJumpFunction(object, edi, function);
   2630 
   2631   HandlerFrontendFooter(&miss);
   2632 
   2633   // Return the generated code.
   2634   return GetCode(Code::NORMAL, name);
   2635 }
   2636 
   2637 
   2638 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2639     Handle<JSObject> object,
   2640     Handle<JSObject> holder,
   2641     Handle<Name> name,
   2642     Handle<ExecutableAccessorInfo> callback) {
   2643   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
   2644                   receiver(), holder, name);
   2645 
   2646   __ pop(scratch1());  // remove the return address
   2647   __ push(receiver());
   2648   __ Push(callback);
   2649   __ Push(name);
   2650   __ push(value());
   2651   __ push(scratch1());  // restore return address
   2652 
   2653   // Do tail-call to the runtime system.
   2654   ExternalReference store_callback_property =
   2655       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
   2656   __ TailCallExternalReference(store_callback_property, 4, 1);
   2657 
   2658   // Return the generated code.
   2659   return GetCode(kind(), Code::FAST, name);
   2660 }
   2661 
   2662 
   2663 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   2664     Handle<JSObject> object,
   2665     Handle<JSObject> holder,
   2666     Handle<Name> name,
   2667     const CallOptimization& call_optimization) {
   2668   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
   2669                   receiver(), holder, name);
   2670 
   2671   Register values[] = { value() };
   2672   GenerateFastApiCall(
   2673       masm(), call_optimization, receiver(), scratch1(),
   2674       scratch2(), this->name(), 1, values);
   2675 
   2676   // Return the generated code.
   2677   return GetCode(kind(), Code::FAST, name);
   2678 }
   2679 
   2680 
   2681 #undef __
   2682 #define __ ACCESS_MASM(masm)
   2683 
   2684 
   2685 void StoreStubCompiler::GenerateStoreViaSetter(
   2686     MacroAssembler* masm,
   2687     Handle<JSFunction> setter) {
   2688   // ----------- S t a t e -------------
   2689   //  -- eax    : value
   2690   //  -- ecx    : name
   2691   //  -- edx    : receiver
   2692   //  -- esp[0] : return address
   2693   // -----------------------------------
   2694   {
   2695     FrameScope scope(masm, StackFrame::INTERNAL);
   2696 
   2697     // Save value register, so we can restore it later.
   2698     __ push(eax);
   2699 
   2700     if (!setter.is_null()) {
   2701       // Call the JavaScript setter with receiver and value on the stack.
   2702       __ push(edx);
   2703       __ push(eax);
   2704       ParameterCount actual(1);
   2705       ParameterCount expected(setter);
   2706       __ InvokeFunction(setter, expected, actual,
   2707                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2708     } else {
   2709       // If we generate a global code snippet for deoptimization only, remember
   2710       // the place to continue after deoptimization.
   2711       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
   2712     }
   2713 
   2714     // We have to return the passed value, not the return value of the setter.
   2715     __ pop(eax);
   2716 
   2717     // Restore context register.
   2718     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2719   }
   2720   __ ret(0);
   2721 }
   2722 
   2723 
   2724 #undef __
   2725 #define __ ACCESS_MASM(masm())
   2726 
   2727 
   2728 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
   2729     Handle<JSObject> object,
   2730     Handle<Name> name) {
   2731   __ pop(scratch1());  // remove the return address
   2732   __ push(receiver());
   2733   __ push(this->name());
   2734   __ push(value());
   2735   __ push(scratch1());  // restore return address
   2736 
   2737   // Do tail-call to the runtime system.
   2738   ExternalReference store_ic_property =
   2739       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
   2740   __ TailCallExternalReference(store_ic_property, 3, 1);
   2741 
   2742   // Return the generated code.
   2743   return GetCode(kind(), Code::FAST, name);
   2744 }
   2745 
   2746 
   2747 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
   2748     MapHandleList* receiver_maps,
   2749     CodeHandleList* handler_stubs,
   2750     MapHandleList* transitioned_maps) {
   2751   Label miss;
   2752   __ JumpIfSmi(receiver(), &miss, Label::kNear);
   2753   __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
   2754   for (int i = 0; i < receiver_maps->length(); ++i) {
   2755     __ cmp(scratch1(), receiver_maps->at(i));
   2756     if (transitioned_maps->at(i).is_null()) {
   2757       __ j(equal, handler_stubs->at(i));
   2758     } else {
   2759       Label next_map;
   2760       __ j(not_equal, &next_map, Label::kNear);
   2761       __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
   2762       __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
   2763       __ bind(&next_map);
   2764     }
   2765   }
   2766   __ bind(&miss);
   2767   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2768 
   2769   // Return the generated code.
   2770   return GetICCode(
   2771       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
   2772 }
   2773 
   2774 
   2775 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<Type> type,
   2776                                                       Handle<JSObject> last,
   2777                                                       Handle<Name> name) {
   2778   NonexistentHandlerFrontend(type, last, name);
   2779 
   2780   // Return undefined if maps of the full prototype chain are still the
   2781   // same and no global property with this name contains a value.
   2782   __ mov(eax, isolate()->factory()->undefined_value());
   2783   __ ret(0);
   2784 
   2785   // Return the generated code.
   2786   return GetCode(kind(), Code::FAST, name);
   2787 }
   2788 
   2789 
   2790 Register* LoadStubCompiler::registers() {
   2791   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   2792   static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
   2793   return registers;
   2794 }
   2795 
   2796 
   2797 Register* KeyedLoadStubCompiler::registers() {
   2798   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   2799   static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
   2800   return registers;
   2801 }
   2802 
   2803 
   2804 Register* StoreStubCompiler::registers() {
   2805   // receiver, name, value, scratch1, scratch2, scratch3.
   2806   static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
   2807   return registers;
   2808 }
   2809 
   2810 
   2811 Register* KeyedStoreStubCompiler::registers() {
   2812   // receiver, name, value, scratch1, scratch2, scratch3.
   2813   static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
   2814   return registers;
   2815 }
   2816 
   2817 
   2818 void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
   2819                                               Register name_reg,
   2820                                               Label* miss) {
   2821   __ cmp(name_reg, Immediate(name));
   2822   __ j(not_equal, miss);
   2823 }
   2824 
   2825 
   2826 void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
   2827                                                Register name_reg,
   2828                                                Label* miss) {
   2829   __ cmp(name_reg, Immediate(name));
   2830   __ j(not_equal, miss);
   2831 }
   2832 
   2833 
   2834 #undef __
   2835 #define __ ACCESS_MASM(masm)
   2836 
   2837 
   2838 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
   2839                                              Register receiver,
   2840                                              Handle<JSFunction> getter) {
   2841   {
   2842     FrameScope scope(masm, StackFrame::INTERNAL);
   2843 
   2844     if (!getter.is_null()) {
   2845       // Call the JavaScript getter with the receiver on the stack.
   2846       __ push(receiver);
   2847       ParameterCount actual(0);
   2848       ParameterCount expected(getter);
   2849       __ InvokeFunction(getter, expected, actual,
   2850                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   2851     } else {
   2852       // If we generate a global code snippet for deoptimization only, remember
   2853       // the place to continue after deoptimization.
   2854       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
   2855     }
   2856 
   2857     // Restore context register.
   2858     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2859   }
   2860   __ ret(0);
   2861 }
   2862 
   2863 
   2864 #undef __
   2865 #define __ ACCESS_MASM(masm())
   2866 
   2867 
   2868 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
   2869     Handle<Type> type,
   2870     Handle<GlobalObject> global,
   2871     Handle<PropertyCell> cell,
   2872     Handle<Name> name,
   2873     bool is_dont_delete) {
   2874   Label miss;
   2875 
   2876   HandlerFrontendHeader(type, receiver(), global, name, &miss);
   2877   // Get the value from the cell.
   2878   if (Serializer::enabled()) {
   2879     __ mov(eax, Immediate(cell));
   2880     __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
   2881   } else {
   2882     __ mov(eax, Operand::ForCell(cell));
   2883   }
   2884 
   2885   // Check for deleted property if property can actually be deleted.
   2886   if (!is_dont_delete) {
   2887     __ cmp(eax, factory()->the_hole_value());
   2888     __ j(equal, &miss);
   2889   } else if (FLAG_debug_code) {
   2890     __ cmp(eax, factory()->the_hole_value());
   2891     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
   2892   }
   2893 
   2894   HandlerFrontendFooter(name, &miss);
   2895 
   2896   Counters* counters = isolate()->counters();
   2897   __ IncrementCounter(counters->named_load_global_stub(), 1);
   2898   // The code above already loads the result into the return register.
   2899   __ ret(0);
   2900 
   2901   // Return the generated code.
   2902   return GetCode(kind(), Code::NORMAL, name);
   2903 }
   2904 
   2905 
   2906 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
   2907     TypeHandleList* types,
   2908     CodeHandleList* handlers,
   2909     Handle<Name> name,
   2910     Code::StubType type,
   2911     IcCheckType check) {
   2912   Label miss;
   2913 
   2914   if (check == PROPERTY) {
   2915     GenerateNameCheck(name, this->name(), &miss);
   2916   }
   2917 
   2918   Label number_case;
   2919   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
   2920   __ JumpIfSmi(receiver(), smi_target);
   2921 
   2922   Register map_reg = scratch1();
   2923   __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
   2924   int receiver_count = types->length();
   2925   int number_of_handled_maps = 0;
   2926   for (int current = 0; current < receiver_count; ++current) {
   2927     Handle<Type> type = types->at(current);
   2928     Handle<Map> map = IC::TypeToMap(*type, isolate());
   2929     if (!map->is_deprecated()) {
   2930       number_of_handled_maps++;
   2931       __ cmp(map_reg, map);
   2932       if (type->Is(Type::Number())) {
   2933         ASSERT(!number_case.is_unused());
   2934         __ bind(&number_case);
   2935       }
   2936       __ j(equal, handlers->at(current));
   2937     }
   2938   }
   2939   ASSERT(number_of_handled_maps != 0);
   2940 
   2941   __ bind(&miss);
   2942   TailCallBuiltin(masm(), MissBuiltin(kind()));
   2943 
   2944   // Return the generated code.
   2945   InlineCacheState state =
   2946       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   2947   return GetICCode(kind(), type, name, state);
   2948 }
   2949 
   2950 
   2951 #undef __
   2952 #define __ ACCESS_MASM(masm)
   2953 
   2954 
   2955 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
   2956     MacroAssembler* masm) {
   2957   // ----------- S t a t e -------------
   2958   //  -- ecx    : key
   2959   //  -- edx    : receiver
   2960   //  -- esp[0] : return address
   2961   // -----------------------------------
   2962   Label slow, miss;
   2963 
   2964   // This stub is meant to be tail-jumped to, the receiver must already
   2965   // have been verified by the caller to not be a smi.
   2966   __ JumpIfNotSmi(ecx, &miss);
   2967   __ mov(ebx, ecx);
   2968   __ SmiUntag(ebx);
   2969   __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
   2970 
   2971   // Push receiver on the stack to free up a register for the dictionary
   2972   // probing.
   2973   __ push(edx);
   2974   __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
   2975   // Pop receiver before returning.
   2976   __ pop(edx);
   2977   __ ret(0);
   2978 
   2979   __ bind(&slow);
   2980   __ pop(edx);
   2981 
   2982   // ----------- S t a t e -------------
   2983   //  -- ecx    : key
   2984   //  -- edx    : receiver
   2985   //  -- esp[0] : return address
   2986   // -----------------------------------
   2987   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
   2988 
   2989   __ bind(&miss);
   2990   // ----------- S t a t e -------------
   2991   //  -- ecx    : key
   2992   //  -- edx    : receiver
   2993   //  -- esp[0] : return address
   2994   // -----------------------------------
   2995   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
   2996 }
   2997 
   2998 
   2999 #undef __
   3000 
   3001 } }  // namespace v8::internal
   3002 
   3003 #endif  // V8_TARGET_ARCH_IA32
   3004