Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_IA32
     31 
     32 #include "bootstrapper.h"
     33 #include "codegen.h"
     34 #include "cpu-profiler.h"
     35 #include "debug.h"
     36 #include "runtime.h"
     37 #include "serialize.h"
     38 
     39 namespace v8 {
     40 namespace internal {
     41 
     42 // -------------------------------------------------------------------------
     43 // MacroAssembler implementation.
     44 
     45 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     46     : Assembler(arg_isolate, buffer, size),
     47       generating_stub_(false),
     48       allow_stub_calls_(true),
     49       has_frame_(false) {
     50   if (isolate() != NULL) {
     51     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
     52                                   isolate());
     53   }
     54 }
     55 
     56 
     57 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     58   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
     59     Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
     60     mov(destination, value);
     61     return;
     62   }
     63   ExternalReference roots_array_start =
     64       ExternalReference::roots_array_start(isolate());
     65   mov(destination, Immediate(index));
     66   mov(destination, Operand::StaticArray(destination,
     67                                         times_pointer_size,
     68                                         roots_array_start));
     69 }
     70 
     71 
     72 void MacroAssembler::StoreRoot(Register source,
     73                                Register scratch,
     74                                Heap::RootListIndex index) {
     75   ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
     76   ExternalReference roots_array_start =
     77       ExternalReference::roots_array_start(isolate());
     78   mov(scratch, Immediate(index));
     79   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
     80       source);
     81 }
     82 
     83 
     84 void MacroAssembler::CompareRoot(Register with,
     85                                  Register scratch,
     86                                  Heap::RootListIndex index) {
     87   ExternalReference roots_array_start =
     88       ExternalReference::roots_array_start(isolate());
     89   mov(scratch, Immediate(index));
     90   cmp(with, Operand::StaticArray(scratch,
     91                                 times_pointer_size,
     92                                 roots_array_start));
     93 }
     94 
     95 
     96 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
     97   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
     98   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
     99   cmp(with, value);
    100 }
    101 
    102 
    103 void MacroAssembler::CompareRoot(const Operand& with,
    104                                  Heap::RootListIndex index) {
    105   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    106   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
    107   cmp(with, value);
    108 }
    109 
    110 
    111 void MacroAssembler::InNewSpace(
    112     Register object,
    113     Register scratch,
    114     Condition cc,
    115     Label* condition_met,
    116     Label::Distance condition_met_distance) {
    117   ASSERT(cc == equal || cc == not_equal);
    118   if (scratch.is(object)) {
    119     and_(scratch, Immediate(~Page::kPageAlignmentMask));
    120   } else {
    121     mov(scratch, Immediate(~Page::kPageAlignmentMask));
    122     and_(scratch, object);
    123   }
    124   // Check that we can use a test_b.
    125   ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
    126   ASSERT(MemoryChunk::IN_TO_SPACE < 8);
    127   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
    128            | (1 << MemoryChunk::IN_TO_SPACE);
    129   // If non-zero, the page belongs to new-space.
    130   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
    131          static_cast<uint8_t>(mask));
    132   j(cc, condition_met, condition_met_distance);
    133 }
    134 
    135 
    136 void MacroAssembler::RememberedSetHelper(
    137     Register object,  // Only used for debug checks.
    138     Register addr,
    139     Register scratch,
    140     SaveFPRegsMode save_fp,
    141     MacroAssembler::RememberedSetFinalAction and_then) {
    142   Label done;
    143   if (emit_debug_code()) {
    144     Label ok;
    145     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
    146     int3();
    147     bind(&ok);
    148   }
    149   // Load store buffer top.
    150   ExternalReference store_buffer =
    151       ExternalReference::store_buffer_top(isolate());
    152   mov(scratch, Operand::StaticVariable(store_buffer));
    153   // Store pointer to buffer.
    154   mov(Operand(scratch, 0), addr);
    155   // Increment buffer top.
    156   add(scratch, Immediate(kPointerSize));
    157   // Write back new top of buffer.
    158   mov(Operand::StaticVariable(store_buffer), scratch);
    159   // Call stub on end of buffer.
    160   // Check for end of buffer.
    161   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
    162   if (and_then == kReturnAtEnd) {
    163     Label buffer_overflowed;
    164     j(not_equal, &buffer_overflowed, Label::kNear);
    165     ret(0);
    166     bind(&buffer_overflowed);
    167   } else {
    168     ASSERT(and_then == kFallThroughAtEnd);
    169     j(equal, &done, Label::kNear);
    170   }
    171   StoreBufferOverflowStub store_buffer_overflow =
    172       StoreBufferOverflowStub(save_fp);
    173   CallStub(&store_buffer_overflow);
    174   if (and_then == kReturnAtEnd) {
    175     ret(0);
    176   } else {
    177     ASSERT(and_then == kFallThroughAtEnd);
    178     bind(&done);
    179   }
    180 }
    181 
    182 
    183 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
    184                                         XMMRegister scratch_reg,
    185                                         Register result_reg) {
    186   Label done;
    187   Label conv_failure;
    188   pxor(scratch_reg, scratch_reg);
    189   cvtsd2si(result_reg, input_reg);
    190   test(result_reg, Immediate(0xFFFFFF00));
    191   j(zero, &done, Label::kNear);
    192   cmp(result_reg, Immediate(0x80000000));
    193   j(equal, &conv_failure, Label::kNear);
    194   mov(result_reg, Immediate(0));
    195   setcc(above, result_reg);
    196   sub(result_reg, Immediate(1));
    197   and_(result_reg, Immediate(255));
    198   jmp(&done, Label::kNear);
    199   bind(&conv_failure);
    200   Set(result_reg, Immediate(0));
    201   ucomisd(input_reg, scratch_reg);
    202   j(below, &done, Label::kNear);
    203   Set(result_reg, Immediate(255));
    204   bind(&done);
    205 }
    206 
    207 
    208 void MacroAssembler::ClampUint8(Register reg) {
    209   Label done;
    210   test(reg, Immediate(0xFFFFFF00));
    211   j(zero, &done, Label::kNear);
    212   setcc(negative, reg);  // 1 if negative, 0 if positive.
    213   dec_b(reg);  // 0 if negative, 255 if positive.
    214   bind(&done);
    215 }
    216 
    217 
    218 static double kUint32Bias =
    219     static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
    220 
    221 
    222 void MacroAssembler::LoadUint32(XMMRegister dst,
    223                                 Register src,
    224                                 XMMRegister scratch) {
    225   Label done;
    226   cmp(src, Immediate(0));
    227   movdbl(scratch,
    228          Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE32));
    229   cvtsi2sd(dst, src);
    230   j(not_sign, &done, Label::kNear);
    231   addsd(dst, scratch);
    232   bind(&done);
    233 }
    234 
    235 
    236 void MacroAssembler::RecordWriteArray(Register object,
    237                                       Register value,
    238                                       Register index,
    239                                       SaveFPRegsMode save_fp,
    240                                       RememberedSetAction remembered_set_action,
    241                                       SmiCheck smi_check) {
    242   // First, check if a write barrier is even needed. The tests below
    243   // catch stores of Smis.
    244   Label done;
    245 
    246   // Skip barrier if writing a smi.
    247   if (smi_check == INLINE_SMI_CHECK) {
    248     ASSERT_EQ(0, kSmiTag);
    249     test(value, Immediate(kSmiTagMask));
    250     j(zero, &done);
    251   }
    252 
    253   // Array access: calculate the destination address in the same manner as
    254   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    255   // into an array of words.
    256   Register dst = index;
    257   lea(dst, Operand(object, index, times_half_pointer_size,
    258                    FixedArray::kHeaderSize - kHeapObjectTag));
    259 
    260   RecordWrite(
    261       object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
    262 
    263   bind(&done);
    264 
    265   // Clobber clobbered input registers when running with the debug-code flag
    266   // turned on to provoke errors.
    267   if (emit_debug_code()) {
    268     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    269     mov(index, Immediate(BitCast<int32_t>(kZapValue)));
    270   }
    271 }
    272 
    273 
    274 void MacroAssembler::RecordWriteField(
    275     Register object,
    276     int offset,
    277     Register value,
    278     Register dst,
    279     SaveFPRegsMode save_fp,
    280     RememberedSetAction remembered_set_action,
    281     SmiCheck smi_check) {
    282   // First, check if a write barrier is even needed. The tests below
    283   // catch stores of Smis.
    284   Label done;
    285 
    286   // Skip barrier if writing a smi.
    287   if (smi_check == INLINE_SMI_CHECK) {
    288     JumpIfSmi(value, &done, Label::kNear);
    289   }
    290 
    291   // Although the object register is tagged, the offset is relative to the start
    292   // of the object, so so offset must be a multiple of kPointerSize.
    293   ASSERT(IsAligned(offset, kPointerSize));
    294 
    295   lea(dst, FieldOperand(object, offset));
    296   if (emit_debug_code()) {
    297     Label ok;
    298     test_b(dst, (1 << kPointerSizeLog2) - 1);
    299     j(zero, &ok, Label::kNear);
    300     int3();
    301     bind(&ok);
    302   }
    303 
    304   RecordWrite(
    305       object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
    306 
    307   bind(&done);
    308 
    309   // Clobber clobbered input registers when running with the debug-code flag
    310   // turned on to provoke errors.
    311   if (emit_debug_code()) {
    312     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    313     mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
    314   }
    315 }
    316 
    317 
    318 void MacroAssembler::RecordWriteForMap(
    319     Register object,
    320     Handle<Map> map,
    321     Register scratch1,
    322     Register scratch2,
    323     SaveFPRegsMode save_fp) {
    324   Label done;
    325 
    326   Register address = scratch1;
    327   Register value = scratch2;
    328   if (emit_debug_code()) {
    329     Label ok;
    330     lea(address, FieldOperand(object, HeapObject::kMapOffset));
    331     test_b(address, (1 << kPointerSizeLog2) - 1);
    332     j(zero, &ok, Label::kNear);
    333     int3();
    334     bind(&ok);
    335   }
    336 
    337   ASSERT(!object.is(value));
    338   ASSERT(!object.is(address));
    339   ASSERT(!value.is(address));
    340   AssertNotSmi(object);
    341 
    342   if (!FLAG_incremental_marking) {
    343     return;
    344   }
    345 
    346   // A single check of the map's pages interesting flag suffices, since it is
    347   // only set during incremental collection, and then it's also guaranteed that
    348   // the from object's page's interesting flag is also set.  This optimization
    349   // relies on the fact that maps can never be in new space.
    350   ASSERT(!isolate()->heap()->InNewSpace(*map));
    351   CheckPageFlagForMap(map,
    352                       MemoryChunk::kPointersToHereAreInterestingMask,
    353                       zero,
    354                       &done,
    355                       Label::kNear);
    356 
    357   // Delay the initialization of |address| and |value| for the stub until it's
    358   // known that the will be needed. Up until this point their values are not
    359   // needed since they are embedded in the operands of instructions that need
    360   // them.
    361   lea(address, FieldOperand(object, HeapObject::kMapOffset));
    362   mov(value, Immediate(map));
    363   RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
    364   CallStub(&stub);
    365 
    366   bind(&done);
    367 
    368   // Clobber clobbered input registers when running with the debug-code flag
    369   // turned on to provoke errors.
    370   if (emit_debug_code()) {
    371     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    372     mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
    373     mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
    374   }
    375 }
    376 
    377 
    378 void MacroAssembler::RecordWrite(Register object,
    379                                  Register address,
    380                                  Register value,
    381                                  SaveFPRegsMode fp_mode,
    382                                  RememberedSetAction remembered_set_action,
    383                                  SmiCheck smi_check) {
    384   ASSERT(!object.is(value));
    385   ASSERT(!object.is(address));
    386   ASSERT(!value.is(address));
    387   AssertNotSmi(object);
    388 
    389   if (remembered_set_action == OMIT_REMEMBERED_SET &&
    390       !FLAG_incremental_marking) {
    391     return;
    392   }
    393 
    394   if (emit_debug_code()) {
    395     Label ok;
    396     cmp(value, Operand(address, 0));
    397     j(equal, &ok, Label::kNear);
    398     int3();
    399     bind(&ok);
    400   }
    401 
    402   // First, check if a write barrier is even needed. The tests below
    403   // catch stores of Smis and stores into young gen.
    404   Label done;
    405 
    406   if (smi_check == INLINE_SMI_CHECK) {
    407     // Skip barrier if writing a smi.
    408     JumpIfSmi(value, &done, Label::kNear);
    409   }
    410 
    411   CheckPageFlag(value,
    412                 value,  // Used as scratch.
    413                 MemoryChunk::kPointersToHereAreInterestingMask,
    414                 zero,
    415                 &done,
    416                 Label::kNear);
    417   CheckPageFlag(object,
    418                 value,  // Used as scratch.
    419                 MemoryChunk::kPointersFromHereAreInterestingMask,
    420                 zero,
    421                 &done,
    422                 Label::kNear);
    423 
    424   RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
    425   CallStub(&stub);
    426 
    427   bind(&done);
    428 
    429   // Clobber clobbered registers when running with the debug-code flag
    430   // turned on to provoke errors.
    431   if (emit_debug_code()) {
    432     mov(address, Immediate(BitCast<int32_t>(kZapValue)));
    433     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    434   }
    435 }
    436 
    437 
    438 #ifdef ENABLE_DEBUGGER_SUPPORT
    439 void MacroAssembler::DebugBreak() {
    440   Set(eax, Immediate(0));
    441   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
    442   CEntryStub ces(1);
    443   call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
    444 }
    445 #endif
    446 
    447 
    448 void MacroAssembler::Set(Register dst, const Immediate& x) {
    449   if (x.is_zero()) {
    450     xor_(dst, dst);  // Shorter than mov.
    451   } else {
    452     mov(dst, x);
    453   }
    454 }
    455 
    456 
    457 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
    458   mov(dst, x);
    459 }
    460 
    461 
    462 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    463   static const int kMaxImmediateBits = 17;
    464   if (!RelocInfo::IsNone(x.rmode_)) return false;
    465   return !is_intn(x.x_, kMaxImmediateBits);
    466 }
    467 
    468 
    469 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
    470   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    471     Set(dst, Immediate(x.x_ ^ jit_cookie()));
    472     xor_(dst, jit_cookie());
    473   } else {
    474     Set(dst, x);
    475   }
    476 }
    477 
    478 
    479 void MacroAssembler::SafePush(const Immediate& x) {
    480   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    481     push(Immediate(x.x_ ^ jit_cookie()));
    482     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    483   } else {
    484     push(x);
    485   }
    486 }
    487 
    488 
    489 void MacroAssembler::CmpObjectType(Register heap_object,
    490                                    InstanceType type,
    491                                    Register map) {
    492   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    493   CmpInstanceType(map, type);
    494 }
    495 
    496 
    497 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    498   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    499        static_cast<int8_t>(type));
    500 }
    501 
    502 
    503 void MacroAssembler::CheckFastElements(Register map,
    504                                        Label* fail,
    505                                        Label::Distance distance) {
    506   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    507   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    508   STATIC_ASSERT(FAST_ELEMENTS == 2);
    509   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    510   cmpb(FieldOperand(map, Map::kBitField2Offset),
    511        Map::kMaximumBitField2FastHoleyElementValue);
    512   j(above, fail, distance);
    513 }
    514 
    515 
    516 void MacroAssembler::CheckFastObjectElements(Register map,
    517                                              Label* fail,
    518                                              Label::Distance distance) {
    519   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    520   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    521   STATIC_ASSERT(FAST_ELEMENTS == 2);
    522   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    523   cmpb(FieldOperand(map, Map::kBitField2Offset),
    524        Map::kMaximumBitField2FastHoleySmiElementValue);
    525   j(below_equal, fail, distance);
    526   cmpb(FieldOperand(map, Map::kBitField2Offset),
    527        Map::kMaximumBitField2FastHoleyElementValue);
    528   j(above, fail, distance);
    529 }
    530 
    531 
    532 void MacroAssembler::CheckFastSmiElements(Register map,
    533                                           Label* fail,
    534                                           Label::Distance distance) {
    535   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    536   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    537   cmpb(FieldOperand(map, Map::kBitField2Offset),
    538        Map::kMaximumBitField2FastHoleySmiElementValue);
    539   j(above, fail, distance);
    540 }
    541 
    542 
    543 void MacroAssembler::StoreNumberToDoubleElements(
    544     Register maybe_number,
    545     Register elements,
    546     Register key,
    547     Register scratch1,
    548     XMMRegister scratch2,
    549     Label* fail,
    550     bool specialize_for_processor,
    551     int elements_offset) {
    552   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
    553   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
    554 
    555   CheckMap(maybe_number,
    556            isolate()->factory()->heap_number_map(),
    557            fail,
    558            DONT_DO_SMI_CHECK);
    559 
    560   // Double value, canonicalize NaN.
    561   uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
    562   cmp(FieldOperand(maybe_number, offset),
    563       Immediate(kNaNOrInfinityLowerBoundUpper32));
    564   j(greater_equal, &maybe_nan, Label::kNear);
    565 
    566   bind(&not_nan);
    567   ExternalReference canonical_nan_reference =
    568       ExternalReference::address_of_canonical_non_hole_nan();
    569   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
    570     CpuFeatureScope use_sse2(this, SSE2);
    571     movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
    572     bind(&have_double_value);
    573     movdbl(FieldOperand(elements, key, times_4,
    574                         FixedDoubleArray::kHeaderSize - elements_offset),
    575            scratch2);
    576   } else {
    577     fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
    578     bind(&have_double_value);
    579     fstp_d(FieldOperand(elements, key, times_4,
    580                         FixedDoubleArray::kHeaderSize - elements_offset));
    581   }
    582   jmp(&done);
    583 
    584   bind(&maybe_nan);
    585   // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
    586   // it's an Infinity, and the non-NaN code path applies.
    587   j(greater, &is_nan, Label::kNear);
    588   cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
    589   j(zero, &not_nan);
    590   bind(&is_nan);
    591   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
    592     CpuFeatureScope use_sse2(this, SSE2);
    593     movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
    594   } else {
    595     fld_d(Operand::StaticVariable(canonical_nan_reference));
    596   }
    597   jmp(&have_double_value, Label::kNear);
    598 
    599   bind(&smi_value);
    600   // Value is a smi. Convert to a double and store.
    601   // Preserve original value.
    602   mov(scratch1, maybe_number);
    603   SmiUntag(scratch1);
    604   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
    605     CpuFeatureScope fscope(this, SSE2);
    606     cvtsi2sd(scratch2, scratch1);
    607     movdbl(FieldOperand(elements, key, times_4,
    608                         FixedDoubleArray::kHeaderSize - elements_offset),
    609            scratch2);
    610   } else {
    611     push(scratch1);
    612     fild_s(Operand(esp, 0));
    613     pop(scratch1);
    614     fstp_d(FieldOperand(elements, key, times_4,
    615                         FixedDoubleArray::kHeaderSize - elements_offset));
    616   }
    617   bind(&done);
    618 }
    619 
    620 
    621 void MacroAssembler::CompareMap(Register obj,
    622                                 Handle<Map> map,
    623                                 Label* early_success) {
    624   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    625 }
    626 
    627 
    628 void MacroAssembler::CheckMap(Register obj,
    629                               Handle<Map> map,
    630                               Label* fail,
    631                               SmiCheckType smi_check_type) {
    632   if (smi_check_type == DO_SMI_CHECK) {
    633     JumpIfSmi(obj, fail);
    634   }
    635 
    636   Label success;
    637   CompareMap(obj, map, &success);
    638   j(not_equal, fail);
    639   bind(&success);
    640 }
    641 
    642 
    643 void MacroAssembler::DispatchMap(Register obj,
    644                                  Register unused,
    645                                  Handle<Map> map,
    646                                  Handle<Code> success,
    647                                  SmiCheckType smi_check_type) {
    648   Label fail;
    649   if (smi_check_type == DO_SMI_CHECK) {
    650     JumpIfSmi(obj, &fail);
    651   }
    652   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
    653   j(equal, success);
    654 
    655   bind(&fail);
    656 }
    657 
    658 
    659 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    660                                              Register map,
    661                                              Register instance_type) {
    662   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    663   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    664   STATIC_ASSERT(kNotStringTag != 0);
    665   test(instance_type, Immediate(kIsNotStringMask));
    666   return zero;
    667 }
    668 
    669 
    670 Condition MacroAssembler::IsObjectNameType(Register heap_object,
    671                                            Register map,
    672                                            Register instance_type) {
    673   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    674   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    675   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
    676   return below_equal;
    677 }
    678 
    679 
    680 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
    681                                           Register map,
    682                                           Register scratch,
    683                                           Label* fail) {
    684   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    685   IsInstanceJSObjectType(map, scratch, fail);
    686 }
    687 
    688 
    689 void MacroAssembler::IsInstanceJSObjectType(Register map,
    690                                             Register scratch,
    691                                             Label* fail) {
    692   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
    693   sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
    694   cmp(scratch,
    695       LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
    696   j(above, fail);
    697 }
    698 
    699 
    700 void MacroAssembler::FCmp() {
    701   if (CpuFeatures::IsSupported(CMOV)) {
    702     fucomip();
    703     fstp(0);
    704   } else {
    705     fucompp();
    706     push(eax);
    707     fnstsw_ax();
    708     sahf();
    709     pop(eax);
    710   }
    711 }
    712 
    713 
    714 void MacroAssembler::AssertNumber(Register object) {
    715   if (emit_debug_code()) {
    716     Label ok;
    717     JumpIfSmi(object, &ok);
    718     cmp(FieldOperand(object, HeapObject::kMapOffset),
    719         isolate()->factory()->heap_number_map());
    720     Check(equal, kOperandNotANumber);
    721     bind(&ok);
    722   }
    723 }
    724 
    725 
    726 void MacroAssembler::AssertSmi(Register object) {
    727   if (emit_debug_code()) {
    728     test(object, Immediate(kSmiTagMask));
    729     Check(equal, kOperandIsNotASmi);
    730   }
    731 }
    732 
    733 
    734 void MacroAssembler::AssertString(Register object) {
    735   if (emit_debug_code()) {
    736     test(object, Immediate(kSmiTagMask));
    737     Check(not_equal, kOperandIsASmiAndNotAString);
    738     push(object);
    739     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    740     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    741     pop(object);
    742     Check(below, kOperandIsNotAString);
    743   }
    744 }
    745 
    746 
    747 void MacroAssembler::AssertName(Register object) {
    748   if (emit_debug_code()) {
    749     test(object, Immediate(kSmiTagMask));
    750     Check(not_equal, kOperandIsASmiAndNotAName);
    751     push(object);
    752     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    753     CmpInstanceType(object, LAST_NAME_TYPE);
    754     pop(object);
    755     Check(below_equal, kOperandIsNotAName);
    756   }
    757 }
    758 
    759 
    760 void MacroAssembler::AssertNotSmi(Register object) {
    761   if (emit_debug_code()) {
    762     test(object, Immediate(kSmiTagMask));
    763     Check(not_equal, kOperandIsASmi);
    764   }
    765 }
    766 
    767 
    768 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    769   push(ebp);
    770   mov(ebp, esp);
    771   push(esi);
    772   push(Immediate(Smi::FromInt(type)));
    773   push(Immediate(CodeObject()));
    774   if (emit_debug_code()) {
    775     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
    776     Check(not_equal, kCodeObjectNotProperlyPatched);
    777   }
    778 }
    779 
    780 
    781 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    782   if (emit_debug_code()) {
    783     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
    784         Immediate(Smi::FromInt(type)));
    785     Check(equal, kStackFrameTypesMustMatch);
    786   }
    787   leave();
    788 }
    789 
    790 
    791 void MacroAssembler::EnterExitFramePrologue() {
    792   // Set up the frame structure on the stack.
    793   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
    794   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
    795   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
    796   push(ebp);
    797   mov(ebp, esp);
    798 
    799   // Reserve room for entry stack pointer and push the code object.
    800   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
    801   push(Immediate(0));  // Saved entry sp, patched before call.
    802   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
    803 
    804   // Save the frame pointer and the context in top.
    805   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    806                                        isolate());
    807   ExternalReference context_address(Isolate::kContextAddress,
    808                                     isolate());
    809   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
    810   mov(Operand::StaticVariable(context_address), esi);
    811 }
    812 
    813 
    814 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
    815   // Optionally save all XMM registers.
    816   if (save_doubles) {
    817     CpuFeatureScope scope(this, SSE2);
    818     int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
    819     sub(esp, Immediate(space));
    820     const int offset = -2 * kPointerSize;
    821     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
    822       XMMRegister reg = XMMRegister::from_code(i);
    823       movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
    824     }
    825   } else {
    826     sub(esp, Immediate(argc * kPointerSize));
    827   }
    828 
    829   // Get the required frame alignment for the OS.
    830   const int kFrameAlignment = OS::ActivationFrameAlignment();
    831   if (kFrameAlignment > 0) {
    832     ASSERT(IsPowerOf2(kFrameAlignment));
    833     and_(esp, -kFrameAlignment);
    834   }
    835 
    836   // Patch the saved entry sp.
    837   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
    838 }
    839 
    840 
    841 void MacroAssembler::EnterExitFrame(bool save_doubles) {
    842   EnterExitFramePrologue();
    843 
    844   // Set up argc and argv in callee-saved registers.
    845   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    846   mov(edi, eax);
    847   lea(esi, Operand(ebp, eax, times_4, offset));
    848 
    849   // Reserve space for argc, argv and isolate.
    850   EnterExitFrameEpilogue(3, save_doubles);
    851 }
    852 
    853 
    854 void MacroAssembler::EnterApiExitFrame(int argc) {
    855   EnterExitFramePrologue();
    856   EnterExitFrameEpilogue(argc, false);
    857 }
    858 
    859 
    860 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
    861   // Optionally restore all XMM registers.
    862   if (save_doubles) {
    863     CpuFeatureScope scope(this, SSE2);
    864     const int offset = -2 * kPointerSize;
    865     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
    866       XMMRegister reg = XMMRegister::from_code(i);
    867       movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
    868     }
    869   }
    870 
    871   // Get the return address from the stack and restore the frame pointer.
    872   mov(ecx, Operand(ebp, 1 * kPointerSize));
    873   mov(ebp, Operand(ebp, 0 * kPointerSize));
    874 
    875   // Pop the arguments and the receiver from the caller stack.
    876   lea(esp, Operand(esi, 1 * kPointerSize));
    877 
    878   // Push the return address to get ready to return.
    879   push(ecx);
    880 
    881   LeaveExitFrameEpilogue();
    882 }
    883 
    884 
    885 void MacroAssembler::LeaveExitFrameEpilogue() {
    886   // Restore current context from top and clear it in debug mode.
    887   ExternalReference context_address(Isolate::kContextAddress, isolate());
    888   mov(esi, Operand::StaticVariable(context_address));
    889 #ifdef DEBUG
    890   mov(Operand::StaticVariable(context_address), Immediate(0));
    891 #endif
    892 
    893   // Clear the top frame.
    894   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    895                                        isolate());
    896   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
    897 }
    898 
    899 
    900 void MacroAssembler::LeaveApiExitFrame() {
    901   mov(esp, ebp);
    902   pop(ebp);
    903 
    904   LeaveExitFrameEpilogue();
    905 }
    906 
    907 
    908 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
    909                                     int handler_index) {
    910   // Adjust this code if not the case.
    911   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
    912   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    913   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
    914   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
    915   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
    916   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
    917 
    918   // We will build up the handler from the bottom by pushing on the stack.
    919   // First push the frame pointer and context.
    920   if (kind == StackHandler::JS_ENTRY) {
    921     // The frame pointer does not point to a JS frame so we save NULL for
    922     // ebp. We expect the code throwing an exception to check ebp before
    923     // dereferencing it to restore the context.
    924     push(Immediate(0));  // NULL frame pointer.
    925     push(Immediate(Smi::FromInt(0)));  // No context.
    926   } else {
    927     push(ebp);
    928     push(esi);
    929   }
    930   // Push the state and the code object.
    931   unsigned state =
    932       StackHandler::IndexField::encode(handler_index) |
    933       StackHandler::KindField::encode(kind);
    934   push(Immediate(state));
    935   Push(CodeObject());
    936 
    937   // Link the current handler as the next handler.
    938   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    939   push(Operand::StaticVariable(handler_address));
    940   // Set this new handler as the current one.
    941   mov(Operand::StaticVariable(handler_address), esp);
    942 }
    943 
    944 
    945 void MacroAssembler::PopTryHandler() {
    946   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    947   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    948   pop(Operand::StaticVariable(handler_address));
    949   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
    950 }
    951 
    952 
    953 void MacroAssembler::JumpToHandlerEntry() {
    954   // Compute the handler entry address and jump to it.  The handler table is
    955   // a fixed array of (smi-tagged) code offsets.
    956   // eax = exception, edi = code object, edx = state.
    957   mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
    958   shr(edx, StackHandler::kKindWidth);
    959   mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
    960   SmiUntag(edx);
    961   lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
    962   jmp(edi);
    963 }
    964 
    965 
    966 void MacroAssembler::Throw(Register value) {
    967   // Adjust this code if not the case.
    968   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
    969   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    970   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
    971   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
    972   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
    973   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
    974 
    975   // The exception is expected in eax.
    976   if (!value.is(eax)) {
    977     mov(eax, value);
    978   }
    979   // Drop the stack pointer to the top of the top handler.
    980   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    981   mov(esp, Operand::StaticVariable(handler_address));
    982   // Restore the next handler.
    983   pop(Operand::StaticVariable(handler_address));
    984 
    985   // Remove the code object and state, compute the handler address in edi.
    986   pop(edi);  // Code object.
    987   pop(edx);  // Index and state.
    988 
    989   // Restore the context and frame pointer.
    990   pop(esi);  // Context.
    991   pop(ebp);  // Frame pointer.
    992 
    993   // If the handler is a JS frame, restore the context to the frame.
    994   // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
    995   // ebp or esi.
    996   Label skip;
    997   test(esi, esi);
    998   j(zero, &skip, Label::kNear);
    999   mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
   1000   bind(&skip);
   1001 
   1002   JumpToHandlerEntry();
   1003 }
   1004 
   1005 
   1006 void MacroAssembler::ThrowUncatchable(Register value) {
   1007   // Adjust this code if not the case.
   1008   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
   1009   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1010   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
   1011   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
   1012   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
   1013   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
   1014 
   1015   // The exception is expected in eax.
   1016   if (!value.is(eax)) {
   1017     mov(eax, value);
   1018   }
   1019   // Drop the stack pointer to the top of the top stack handler.
   1020   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1021   mov(esp, Operand::StaticVariable(handler_address));
   1022 
   1023   // Unwind the handlers until the top ENTRY handler is found.
   1024   Label fetch_next, check_kind;
   1025   jmp(&check_kind, Label::kNear);
   1026   bind(&fetch_next);
   1027   mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
   1028 
   1029   bind(&check_kind);
   1030   STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
   1031   test(Operand(esp, StackHandlerConstants::kStateOffset),
   1032        Immediate(StackHandler::KindField::kMask));
   1033   j(not_zero, &fetch_next);
   1034 
   1035   // Set the top handler address to next handler past the top ENTRY handler.
   1036   pop(Operand::StaticVariable(handler_address));
   1037 
   1038   // Remove the code object and state, compute the handler address in edi.
   1039   pop(edi);  // Code object.
   1040   pop(edx);  // Index and state.
   1041 
   1042   // Clear the context pointer and frame pointer (0 was saved in the handler).
   1043   pop(esi);
   1044   pop(ebp);
   1045 
   1046   JumpToHandlerEntry();
   1047 }
   1048 
   1049 
   1050 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
   1051                                             Register scratch1,
   1052                                             Register scratch2,
   1053                                             Label* miss) {
   1054   Label same_contexts;
   1055 
   1056   ASSERT(!holder_reg.is(scratch1));
   1057   ASSERT(!holder_reg.is(scratch2));
   1058   ASSERT(!scratch1.is(scratch2));
   1059 
   1060   // Load current lexical context from the stack frame.
   1061   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
   1062 
   1063   // When generating debug code, make sure the lexical context is set.
   1064   if (emit_debug_code()) {
   1065     cmp(scratch1, Immediate(0));
   1066     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
   1067   }
   1068   // Load the native context of the current context.
   1069   int offset =
   1070       Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
   1071   mov(scratch1, FieldOperand(scratch1, offset));
   1072   mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
   1073 
   1074   // Check the context is a native context.
   1075   if (emit_debug_code()) {
   1076     // Read the first word and compare to native_context_map.
   1077     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
   1078         isolate()->factory()->native_context_map());
   1079     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1080   }
   1081 
   1082   // Check if both contexts are the same.
   1083   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1084   j(equal, &same_contexts);
   1085 
   1086   // Compare security tokens, save holder_reg on the stack so we can use it
   1087   // as a temporary register.
   1088   //
   1089   // Check that the security token in the calling global object is
   1090   // compatible with the security token in the receiving global
   1091   // object.
   1092   mov(scratch2,
   1093       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1094 
   1095   // Check the context is a native context.
   1096   if (emit_debug_code()) {
   1097     cmp(scratch2, isolate()->factory()->null_value());
   1098     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
   1099 
   1100     // Read the first word and compare to native_context_map(),
   1101     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
   1102         isolate()->factory()->native_context_map());
   1103     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1104   }
   1105 
   1106   int token_offset = Context::kHeaderSize +
   1107                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
   1108   mov(scratch1, FieldOperand(scratch1, token_offset));
   1109   cmp(scratch1, FieldOperand(scratch2, token_offset));
   1110   j(not_equal, miss);
   1111 
   1112   bind(&same_contexts);
   1113 }
   1114 
   1115 
   1116 // Compute the hash code from the untagged key.  This must be kept in sync
   1117 // with ComputeIntegerHash in utils.h.
   1118 //
   1119 // Note: r0 will contain hash code
   1120 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
   1121   // Xor original key with a seed.
   1122   if (Serializer::enabled()) {
   1123     ExternalReference roots_array_start =
   1124         ExternalReference::roots_array_start(isolate());
   1125     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
   1126     mov(scratch,
   1127         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
   1128     SmiUntag(scratch);
   1129     xor_(r0, scratch);
   1130   } else {
   1131     int32_t seed = isolate()->heap()->HashSeed();
   1132     xor_(r0, Immediate(seed));
   1133   }
   1134 
   1135   // hash = ~hash + (hash << 15);
   1136   mov(scratch, r0);
   1137   not_(r0);
   1138   shl(scratch, 15);
   1139   add(r0, scratch);
   1140   // hash = hash ^ (hash >> 12);
   1141   mov(scratch, r0);
   1142   shr(scratch, 12);
   1143   xor_(r0, scratch);
   1144   // hash = hash + (hash << 2);
   1145   lea(r0, Operand(r0, r0, times_4, 0));
   1146   // hash = hash ^ (hash >> 4);
   1147   mov(scratch, r0);
   1148   shr(scratch, 4);
   1149   xor_(r0, scratch);
   1150   // hash = hash * 2057;
   1151   imul(r0, r0, 2057);
   1152   // hash = hash ^ (hash >> 16);
   1153   mov(scratch, r0);
   1154   shr(scratch, 16);
   1155   xor_(r0, scratch);
   1156 }
   1157 
   1158 
   1159 
   1160 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
   1161                                               Register elements,
   1162                                               Register key,
   1163                                               Register r0,
   1164                                               Register r1,
   1165                                               Register r2,
   1166                                               Register result) {
   1167   // Register use:
   1168   //
   1169   // elements - holds the slow-case elements of the receiver and is unchanged.
   1170   //
   1171   // key      - holds the smi key on entry and is unchanged.
   1172   //
   1173   // Scratch registers:
   1174   //
   1175   // r0 - holds the untagged key on entry and holds the hash once computed.
   1176   //
   1177   // r1 - used to hold the capacity mask of the dictionary
   1178   //
   1179   // r2 - used for the index into the dictionary.
   1180   //
   1181   // result - holds the result on exit if the load succeeds and we fall through.
   1182 
   1183   Label done;
   1184 
   1185   GetNumberHash(r0, r1);
   1186 
   1187   // Compute capacity mask.
   1188   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
   1189   shr(r1, kSmiTagSize);  // convert smi to int
   1190   dec(r1);
   1191 
   1192   // Generate an unrolled loop that performs a few probes before giving up.
   1193   const int kProbes = 4;
   1194   for (int i = 0; i < kProbes; i++) {
   1195     // Use r2 for index calculations and keep the hash intact in r0.
   1196     mov(r2, r0);
   1197     // Compute the masked index: (hash + i + i * i) & mask.
   1198     if (i > 0) {
   1199       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
   1200     }
   1201     and_(r2, r1);
   1202 
   1203     // Scale the index by multiplying by the entry size.
   1204     ASSERT(SeededNumberDictionary::kEntrySize == 3);
   1205     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
   1206 
   1207     // Check if the key matches.
   1208     cmp(key, FieldOperand(elements,
   1209                           r2,
   1210                           times_pointer_size,
   1211                           SeededNumberDictionary::kElementsStartOffset));
   1212     if (i != (kProbes - 1)) {
   1213       j(equal, &done);
   1214     } else {
   1215       j(not_equal, miss);
   1216     }
   1217   }
   1218 
   1219   bind(&done);
   1220   // Check that the value is a normal propety.
   1221   const int kDetailsOffset =
   1222       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
   1223   ASSERT_EQ(NORMAL, 0);
   1224   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
   1225        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
   1226   j(not_zero, miss);
   1227 
   1228   // Get the value at the masked, scaled index.
   1229   const int kValueOffset =
   1230       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
   1231   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
   1232 }
   1233 
   1234 
   1235 void MacroAssembler::LoadAllocationTopHelper(Register result,
   1236                                              Register scratch,
   1237                                              AllocationFlags flags) {
   1238   ExternalReference allocation_top =
   1239       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1240 
   1241   // Just return if allocation top is already known.
   1242   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   1243     // No use of scratch if allocation top is provided.
   1244     ASSERT(scratch.is(no_reg));
   1245 #ifdef DEBUG
   1246     // Assert that result actually contains top on entry.
   1247     cmp(result, Operand::StaticVariable(allocation_top));
   1248     Check(equal, kUnexpectedAllocationTop);
   1249 #endif
   1250     return;
   1251   }
   1252 
   1253   // Move address of new object to result. Use scratch register if available.
   1254   if (scratch.is(no_reg)) {
   1255     mov(result, Operand::StaticVariable(allocation_top));
   1256   } else {
   1257     mov(scratch, Immediate(allocation_top));
   1258     mov(result, Operand(scratch, 0));
   1259   }
   1260 }
   1261 
   1262 
   1263 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   1264                                                Register scratch,
   1265                                                AllocationFlags flags) {
   1266   if (emit_debug_code()) {
   1267     test(result_end, Immediate(kObjectAlignmentMask));
   1268     Check(zero, kUnalignedAllocationInNewSpace);
   1269   }
   1270 
   1271   ExternalReference allocation_top =
   1272       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1273 
   1274   // Update new top. Use scratch if available.
   1275   if (scratch.is(no_reg)) {
   1276     mov(Operand::StaticVariable(allocation_top), result_end);
   1277   } else {
   1278     mov(Operand(scratch, 0), result_end);
   1279   }
   1280 }
   1281 
   1282 
   1283 void MacroAssembler::Allocate(int object_size,
   1284                               Register result,
   1285                               Register result_end,
   1286                               Register scratch,
   1287                               Label* gc_required,
   1288                               AllocationFlags flags) {
   1289   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1290   ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
   1291   if (!FLAG_inline_new) {
   1292     if (emit_debug_code()) {
   1293       // Trash the registers to simulate an allocation failure.
   1294       mov(result, Immediate(0x7091));
   1295       if (result_end.is_valid()) {
   1296         mov(result_end, Immediate(0x7191));
   1297       }
   1298       if (scratch.is_valid()) {
   1299         mov(scratch, Immediate(0x7291));
   1300       }
   1301     }
   1302     jmp(gc_required);
   1303     return;
   1304   }
   1305   ASSERT(!result.is(result_end));
   1306 
   1307   // Load address of new object into result.
   1308   LoadAllocationTopHelper(result, scratch, flags);
   1309 
   1310   ExternalReference allocation_limit =
   1311       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1312 
   1313   // Align the next allocation. Storing the filler map without checking top is
   1314   // safe in new-space because the limit of the heap is aligned there.
   1315   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1316     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
   1317     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
   1318     Label aligned;
   1319     test(result, Immediate(kDoubleAlignmentMask));
   1320     j(zero, &aligned, Label::kNear);
   1321     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
   1322       cmp(result, Operand::StaticVariable(allocation_limit));
   1323       j(above_equal, gc_required);
   1324     }
   1325     mov(Operand(result, 0),
   1326         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1327     add(result, Immediate(kDoubleSize / 2));
   1328     bind(&aligned);
   1329   }
   1330 
   1331   // Calculate new top and bail out if space is exhausted.
   1332   Register top_reg = result_end.is_valid() ? result_end : result;
   1333   if (!top_reg.is(result)) {
   1334     mov(top_reg, result);
   1335   }
   1336   add(top_reg, Immediate(object_size));
   1337   j(carry, gc_required);
   1338   cmp(top_reg, Operand::StaticVariable(allocation_limit));
   1339   j(above, gc_required);
   1340 
   1341   // Update allocation top.
   1342   UpdateAllocationTopHelper(top_reg, scratch, flags);
   1343 
   1344   // Tag result if requested.
   1345   bool tag_result = (flags & TAG_OBJECT) != 0;
   1346   if (top_reg.is(result)) {
   1347     if (tag_result) {
   1348       sub(result, Immediate(object_size - kHeapObjectTag));
   1349     } else {
   1350       sub(result, Immediate(object_size));
   1351     }
   1352   } else if (tag_result) {
   1353     ASSERT(kHeapObjectTag == 1);
   1354     inc(result);
   1355   }
   1356 }
   1357 
   1358 
   1359 void MacroAssembler::Allocate(int header_size,
   1360                               ScaleFactor element_size,
   1361                               Register element_count,
   1362                               RegisterValueType element_count_type,
   1363                               Register result,
   1364                               Register result_end,
   1365                               Register scratch,
   1366                               Label* gc_required,
   1367                               AllocationFlags flags) {
   1368   ASSERT((flags & SIZE_IN_WORDS) == 0);
   1369   if (!FLAG_inline_new) {
   1370     if (emit_debug_code()) {
   1371       // Trash the registers to simulate an allocation failure.
   1372       mov(result, Immediate(0x7091));
   1373       mov(result_end, Immediate(0x7191));
   1374       if (scratch.is_valid()) {
   1375         mov(scratch, Immediate(0x7291));
   1376       }
   1377       // Register element_count is not modified by the function.
   1378     }
   1379     jmp(gc_required);
   1380     return;
   1381   }
   1382   ASSERT(!result.is(result_end));
   1383 
   1384   // Load address of new object into result.
   1385   LoadAllocationTopHelper(result, scratch, flags);
   1386 
   1387   ExternalReference allocation_limit =
   1388       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1389 
   1390   // Align the next allocation. Storing the filler map without checking top is
   1391   // safe in new-space because the limit of the heap is aligned there.
   1392   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1393     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
   1394     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
   1395     Label aligned;
   1396     test(result, Immediate(kDoubleAlignmentMask));
   1397     j(zero, &aligned, Label::kNear);
   1398     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
   1399       cmp(result, Operand::StaticVariable(allocation_limit));
   1400       j(above_equal, gc_required);
   1401     }
   1402     mov(Operand(result, 0),
   1403         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1404     add(result, Immediate(kDoubleSize / 2));
   1405     bind(&aligned);
   1406   }
   1407 
   1408   // Calculate new top and bail out if space is exhausted.
   1409   // We assume that element_count*element_size + header_size does not
   1410   // overflow.
   1411   if (element_count_type == REGISTER_VALUE_IS_SMI) {
   1412     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
   1413     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
   1414     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
   1415     ASSERT(element_size >= times_2);
   1416     ASSERT(kSmiTagSize == 1);
   1417     element_size = static_cast<ScaleFactor>(element_size - 1);
   1418   } else {
   1419     ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
   1420   }
   1421   lea(result_end, Operand(element_count, element_size, header_size));
   1422   add(result_end, result);
   1423   j(carry, gc_required);
   1424   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1425   j(above, gc_required);
   1426 
   1427   if ((flags & TAG_OBJECT) != 0) {
   1428     ASSERT(kHeapObjectTag == 1);
   1429     inc(result);
   1430   }
   1431 
   1432   // Update allocation top.
   1433   UpdateAllocationTopHelper(result_end, scratch, flags);
   1434 }
   1435 
   1436 
   1437 void MacroAssembler::Allocate(Register object_size,
   1438                               Register result,
   1439                               Register result_end,
   1440                               Register scratch,
   1441                               Label* gc_required,
   1442                               AllocationFlags flags) {
   1443   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1444   if (!FLAG_inline_new) {
   1445     if (emit_debug_code()) {
   1446       // Trash the registers to simulate an allocation failure.
   1447       mov(result, Immediate(0x7091));
   1448       mov(result_end, Immediate(0x7191));
   1449       if (scratch.is_valid()) {
   1450         mov(scratch, Immediate(0x7291));
   1451       }
   1452       // object_size is left unchanged by this function.
   1453     }
   1454     jmp(gc_required);
   1455     return;
   1456   }
   1457   ASSERT(!result.is(result_end));
   1458 
   1459   // Load address of new object into result.
   1460   LoadAllocationTopHelper(result, scratch, flags);
   1461 
   1462   ExternalReference allocation_limit =
   1463       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1464 
   1465   // Align the next allocation. Storing the filler map without checking top is
   1466   // safe in new-space because the limit of the heap is aligned there.
   1467   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1468     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
   1469     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
   1470     Label aligned;
   1471     test(result, Immediate(kDoubleAlignmentMask));
   1472     j(zero, &aligned, Label::kNear);
   1473     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
   1474       cmp(result, Operand::StaticVariable(allocation_limit));
   1475       j(above_equal, gc_required);
   1476     }
   1477     mov(Operand(result, 0),
   1478         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1479     add(result, Immediate(kDoubleSize / 2));
   1480     bind(&aligned);
   1481   }
   1482 
   1483   // Calculate new top and bail out if space is exhausted.
   1484   if (!object_size.is(result_end)) {
   1485     mov(result_end, object_size);
   1486   }
   1487   add(result_end, result);
   1488   j(carry, gc_required);
   1489   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1490   j(above, gc_required);
   1491 
   1492   // Tag result if requested.
   1493   if ((flags & TAG_OBJECT) != 0) {
   1494     ASSERT(kHeapObjectTag == 1);
   1495     inc(result);
   1496   }
   1497 
   1498   // Update allocation top.
   1499   UpdateAllocationTopHelper(result_end, scratch, flags);
   1500 }
   1501 
   1502 
   1503 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
   1504   ExternalReference new_space_allocation_top =
   1505       ExternalReference::new_space_allocation_top_address(isolate());
   1506 
   1507   // Make sure the object has no tag before resetting top.
   1508   and_(object, Immediate(~kHeapObjectTagMask));
   1509 #ifdef DEBUG
   1510   cmp(object, Operand::StaticVariable(new_space_allocation_top));
   1511   Check(below, kUndoAllocationOfNonAllocatedMemory);
   1512 #endif
   1513   mov(Operand::StaticVariable(new_space_allocation_top), object);
   1514 }
   1515 
   1516 
   1517 void MacroAssembler::AllocateHeapNumber(Register result,
   1518                                         Register scratch1,
   1519                                         Register scratch2,
   1520                                         Label* gc_required) {
   1521   // Allocate heap number in new space.
   1522   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
   1523            TAG_OBJECT);
   1524 
   1525   // Set the map.
   1526   mov(FieldOperand(result, HeapObject::kMapOffset),
   1527       Immediate(isolate()->factory()->heap_number_map()));
   1528 }
   1529 
   1530 
   1531 void MacroAssembler::AllocateTwoByteString(Register result,
   1532                                            Register length,
   1533                                            Register scratch1,
   1534                                            Register scratch2,
   1535                                            Register scratch3,
   1536                                            Label* gc_required) {
   1537   // Calculate the number of bytes needed for the characters in the string while
   1538   // observing object alignment.
   1539   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1540   ASSERT(kShortSize == 2);
   1541   // scratch1 = length * 2 + kObjectAlignmentMask.
   1542   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
   1543   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1544 
   1545   // Allocate two byte string in new space.
   1546   Allocate(SeqTwoByteString::kHeaderSize,
   1547            times_1,
   1548            scratch1,
   1549            REGISTER_VALUE_IS_INT32,
   1550            result,
   1551            scratch2,
   1552            scratch3,
   1553            gc_required,
   1554            TAG_OBJECT);
   1555 
   1556   // Set the map, length and hash field.
   1557   mov(FieldOperand(result, HeapObject::kMapOffset),
   1558       Immediate(isolate()->factory()->string_map()));
   1559   mov(scratch1, length);
   1560   SmiTag(scratch1);
   1561   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1562   mov(FieldOperand(result, String::kHashFieldOffset),
   1563       Immediate(String::kEmptyHashField));
   1564 }
   1565 
   1566 
   1567 void MacroAssembler::AllocateAsciiString(Register result,
   1568                                          Register length,
   1569                                          Register scratch1,
   1570                                          Register scratch2,
   1571                                          Register scratch3,
   1572                                          Label* gc_required) {
   1573   // Calculate the number of bytes needed for the characters in the string while
   1574   // observing object alignment.
   1575   ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1576   mov(scratch1, length);
   1577   ASSERT(kCharSize == 1);
   1578   add(scratch1, Immediate(kObjectAlignmentMask));
   1579   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1580 
   1581   // Allocate ASCII string in new space.
   1582   Allocate(SeqOneByteString::kHeaderSize,
   1583            times_1,
   1584            scratch1,
   1585            REGISTER_VALUE_IS_INT32,
   1586            result,
   1587            scratch2,
   1588            scratch3,
   1589            gc_required,
   1590            TAG_OBJECT);
   1591 
   1592   // Set the map, length and hash field.
   1593   mov(FieldOperand(result, HeapObject::kMapOffset),
   1594       Immediate(isolate()->factory()->ascii_string_map()));
   1595   mov(scratch1, length);
   1596   SmiTag(scratch1);
   1597   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1598   mov(FieldOperand(result, String::kHashFieldOffset),
   1599       Immediate(String::kEmptyHashField));
   1600 }
   1601 
   1602 
   1603 void MacroAssembler::AllocateAsciiString(Register result,
   1604                                          int length,
   1605                                          Register scratch1,
   1606                                          Register scratch2,
   1607                                          Label* gc_required) {
   1608   ASSERT(length > 0);
   1609 
   1610   // Allocate ASCII string in new space.
   1611   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
   1612            gc_required, TAG_OBJECT);
   1613 
   1614   // Set the map, length and hash field.
   1615   mov(FieldOperand(result, HeapObject::kMapOffset),
   1616       Immediate(isolate()->factory()->ascii_string_map()));
   1617   mov(FieldOperand(result, String::kLengthOffset),
   1618       Immediate(Smi::FromInt(length)));
   1619   mov(FieldOperand(result, String::kHashFieldOffset),
   1620       Immediate(String::kEmptyHashField));
   1621 }
   1622 
   1623 
   1624 void MacroAssembler::AllocateTwoByteConsString(Register result,
   1625                                         Register scratch1,
   1626                                         Register scratch2,
   1627                                         Label* gc_required) {
   1628   // Allocate heap number in new space.
   1629   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
   1630            TAG_OBJECT);
   1631 
   1632   // Set the map. The other fields are left uninitialized.
   1633   mov(FieldOperand(result, HeapObject::kMapOffset),
   1634       Immediate(isolate()->factory()->cons_string_map()));
   1635 }
   1636 
   1637 
   1638 void MacroAssembler::AllocateAsciiConsString(Register result,
   1639                                              Register scratch1,
   1640                                              Register scratch2,
   1641                                              Label* gc_required) {
   1642   Label allocate_new_space, install_map;
   1643   AllocationFlags flags = TAG_OBJECT;
   1644 
   1645   ExternalReference high_promotion_mode = ExternalReference::
   1646       new_space_high_promotion_mode_active_address(isolate());
   1647 
   1648   test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
   1649   j(zero, &allocate_new_space);
   1650 
   1651   Allocate(ConsString::kSize,
   1652            result,
   1653            scratch1,
   1654            scratch2,
   1655            gc_required,
   1656            static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
   1657   jmp(&install_map);
   1658 
   1659   bind(&allocate_new_space);
   1660   Allocate(ConsString::kSize,
   1661            result,
   1662            scratch1,
   1663            scratch2,
   1664            gc_required,
   1665            flags);
   1666 
   1667   bind(&install_map);
   1668   // Set the map. The other fields are left uninitialized.
   1669   mov(FieldOperand(result, HeapObject::kMapOffset),
   1670       Immediate(isolate()->factory()->cons_ascii_string_map()));
   1671 }
   1672 
   1673 
   1674 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
   1675                                           Register scratch1,
   1676                                           Register scratch2,
   1677                                           Label* gc_required) {
   1678   // Allocate heap number in new space.
   1679   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1680            TAG_OBJECT);
   1681 
   1682   // Set the map. The other fields are left uninitialized.
   1683   mov(FieldOperand(result, HeapObject::kMapOffset),
   1684       Immediate(isolate()->factory()->sliced_string_map()));
   1685 }
   1686 
   1687 
   1688 void MacroAssembler::AllocateAsciiSlicedString(Register result,
   1689                                                Register scratch1,
   1690                                                Register scratch2,
   1691                                                Label* gc_required) {
   1692   // Allocate heap number in new space.
   1693   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1694            TAG_OBJECT);
   1695 
   1696   // Set the map. The other fields are left uninitialized.
   1697   mov(FieldOperand(result, HeapObject::kMapOffset),
   1698       Immediate(isolate()->factory()->sliced_ascii_string_map()));
   1699 }
   1700 
   1701 
   1702 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   1703 // long or aligned copies.  The contents of scratch and length are destroyed.
   1704 // Source and destination are incremented by length.
   1705 // Many variants of movsb, loop unrolling, word moves, and indexed operands
   1706 // have been tried here already, and this is fastest.
   1707 // A simpler loop is faster on small copies, but 30% slower on large ones.
   1708 // The cld() instruction must have been emitted, to set the direction flag(),
   1709 // before calling this function.
   1710 void MacroAssembler::CopyBytes(Register source,
   1711                                Register destination,
   1712                                Register length,
   1713                                Register scratch) {
   1714   Label loop, done, short_string, short_loop;
   1715   // Experimentation shows that the short string loop is faster if length < 10.
   1716   cmp(length, Immediate(10));
   1717   j(less_equal, &short_string);
   1718 
   1719   ASSERT(source.is(esi));
   1720   ASSERT(destination.is(edi));
   1721   ASSERT(length.is(ecx));
   1722 
   1723   // Because source is 4-byte aligned in our uses of this function,
   1724   // we keep source aligned for the rep_movs call by copying the odd bytes
   1725   // at the end of the ranges.
   1726   mov(scratch, Operand(source, length, times_1, -4));
   1727   mov(Operand(destination, length, times_1, -4), scratch);
   1728   mov(scratch, ecx);
   1729   shr(ecx, 2);
   1730   rep_movs();
   1731   and_(scratch, Immediate(0x3));
   1732   add(destination, scratch);
   1733   jmp(&done);
   1734 
   1735   bind(&short_string);
   1736   test(length, length);
   1737   j(zero, &done);
   1738 
   1739   bind(&short_loop);
   1740   mov_b(scratch, Operand(source, 0));
   1741   mov_b(Operand(destination, 0), scratch);
   1742   inc(source);
   1743   inc(destination);
   1744   dec(length);
   1745   j(not_zero, &short_loop);
   1746 
   1747   bind(&done);
   1748 }
   1749 
   1750 
   1751 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
   1752                                                 Register end_offset,
   1753                                                 Register filler) {
   1754   Label loop, entry;
   1755   jmp(&entry);
   1756   bind(&loop);
   1757   mov(Operand(start_offset, 0), filler);
   1758   add(start_offset, Immediate(kPointerSize));
   1759   bind(&entry);
   1760   cmp(start_offset, end_offset);
   1761   j(less, &loop);
   1762 }
   1763 
   1764 
   1765 void MacroAssembler::BooleanBitTest(Register object,
   1766                                     int field_offset,
   1767                                     int bit_index) {
   1768   bit_index += kSmiTagSize + kSmiShiftSize;
   1769   ASSERT(IsPowerOf2(kBitsPerByte));
   1770   int byte_index = bit_index / kBitsPerByte;
   1771   int byte_bit_index = bit_index & (kBitsPerByte - 1);
   1772   test_b(FieldOperand(object, field_offset + byte_index),
   1773          static_cast<byte>(1 << byte_bit_index));
   1774 }
   1775 
   1776 
   1777 
   1778 void MacroAssembler::NegativeZeroTest(Register result,
   1779                                       Register op,
   1780                                       Label* then_label) {
   1781   Label ok;
   1782   test(result, result);
   1783   j(not_zero, &ok);
   1784   test(op, op);
   1785   j(sign, then_label);
   1786   bind(&ok);
   1787 }
   1788 
   1789 
   1790 void MacroAssembler::NegativeZeroTest(Register result,
   1791                                       Register op1,
   1792                                       Register op2,
   1793                                       Register scratch,
   1794                                       Label* then_label) {
   1795   Label ok;
   1796   test(result, result);
   1797   j(not_zero, &ok);
   1798   mov(scratch, op1);
   1799   or_(scratch, op2);
   1800   j(sign, then_label);
   1801   bind(&ok);
   1802 }
   1803 
   1804 
   1805 void MacroAssembler::TryGetFunctionPrototype(Register function,
   1806                                              Register result,
   1807                                              Register scratch,
   1808                                              Label* miss,
   1809                                              bool miss_on_bound_function) {
   1810   // Check that the receiver isn't a smi.
   1811   JumpIfSmi(function, miss);
   1812 
   1813   // Check that the function really is a function.
   1814   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   1815   j(not_equal, miss);
   1816 
   1817   if (miss_on_bound_function) {
   1818     // If a bound function, go to miss label.
   1819     mov(scratch,
   1820         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
   1821     BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
   1822                    SharedFunctionInfo::kBoundFunction);
   1823     j(not_zero, miss);
   1824   }
   1825 
   1826   // Make sure that the function has an instance prototype.
   1827   Label non_instance;
   1828   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
   1829   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
   1830   j(not_zero, &non_instance);
   1831 
   1832   // Get the prototype or initial map from the function.
   1833   mov(result,
   1834       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1835 
   1836   // If the prototype or initial map is the hole, don't return it and
   1837   // simply miss the cache instead. This will allow us to allocate a
   1838   // prototype object on-demand in the runtime system.
   1839   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
   1840   j(equal, miss);
   1841 
   1842   // If the function does not have an initial map, we're done.
   1843   Label done;
   1844   CmpObjectType(result, MAP_TYPE, scratch);
   1845   j(not_equal, &done);
   1846 
   1847   // Get the prototype from the initial map.
   1848   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   1849   jmp(&done);
   1850 
   1851   // Non-instance prototype: Fetch prototype from constructor field
   1852   // in initial map.
   1853   bind(&non_instance);
   1854   mov(result, FieldOperand(result, Map::kConstructorOffset));
   1855 
   1856   // All done.
   1857   bind(&done);
   1858 }
   1859 
   1860 
   1861 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
   1862   ASSERT(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
   1863   call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
   1864 }
   1865 
   1866 
   1867 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1868   ASSERT(allow_stub_calls_ ||
   1869          stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   1870   jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
   1871 }
   1872 
   1873 
   1874 void MacroAssembler::StubReturn(int argc) {
   1875   ASSERT(argc >= 1 && generating_stub());
   1876   ret((argc - 1) * kPointerSize);
   1877 }
   1878 
   1879 
   1880 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
   1881   if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
   1882   return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
   1883 }
   1884 
   1885 
   1886 void MacroAssembler::IllegalOperation(int num_arguments) {
   1887   if (num_arguments > 0) {
   1888     add(esp, Immediate(num_arguments * kPointerSize));
   1889   }
   1890   mov(eax, Immediate(isolate()->factory()->undefined_value()));
   1891 }
   1892 
   1893 
   1894 void MacroAssembler::IndexFromHash(Register hash, Register index) {
   1895   // The assert checks that the constants for the maximum number of digits
   1896   // for an array index cached in the hash field and the number of bits
   1897   // reserved for it does not conflict.
   1898   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
   1899          (1 << String::kArrayIndexValueBits));
   1900   // We want the smi-tagged index in key.  kArrayIndexValueMask has zeros in
   1901   // the low kHashShift bits.
   1902   and_(hash, String::kArrayIndexValueMask);
   1903   STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
   1904   if (String::kHashShift > kSmiTagSize) {
   1905     shr(hash, String::kHashShift - kSmiTagSize);
   1906   }
   1907   if (!index.is(hash)) {
   1908     mov(index, hash);
   1909   }
   1910 }
   1911 
   1912 
   1913 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
   1914   CallRuntime(Runtime::FunctionForId(id), num_arguments);
   1915 }
   1916 
   1917 
   1918 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
   1919   const Runtime::Function* function = Runtime::FunctionForId(id);
   1920   Set(eax, Immediate(function->nargs));
   1921   mov(ebx, Immediate(ExternalReference(function, isolate())));
   1922   CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? kSaveFPRegs
   1923                                                    : kDontSaveFPRegs);
   1924   CallStub(&ces);
   1925 }
   1926 
   1927 
   1928 void MacroAssembler::CallRuntime(const Runtime::Function* f,
   1929                                  int num_arguments) {
   1930   // If the expected number of arguments of the runtime function is
   1931   // constant, we check that the actual number of arguments match the
   1932   // expectation.
   1933   if (f->nargs >= 0 && f->nargs != num_arguments) {
   1934     IllegalOperation(num_arguments);
   1935     return;
   1936   }
   1937 
   1938   // TODO(1236192): Most runtime routines don't need the number of
   1939   // arguments passed in because it is constant. At some point we
   1940   // should remove this need and make the runtime routine entry code
   1941   // smarter.
   1942   Set(eax, Immediate(num_arguments));
   1943   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1944   CEntryStub ces(1);
   1945   CallStub(&ces);
   1946 }
   1947 
   1948 
   1949 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1950                                            int num_arguments) {
   1951   mov(eax, Immediate(num_arguments));
   1952   mov(ebx, Immediate(ref));
   1953 
   1954   CEntryStub stub(1);
   1955   CallStub(&stub);
   1956 }
   1957 
   1958 
   1959 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
   1960                                                int num_arguments,
   1961                                                int result_size) {
   1962   // TODO(1236192): Most runtime routines don't need the number of
   1963   // arguments passed in because it is constant. At some point we
   1964   // should remove this need and make the runtime routine entry code
   1965   // smarter.
   1966   Set(eax, Immediate(num_arguments));
   1967   JumpToExternalReference(ext);
   1968 }
   1969 
   1970 
   1971 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
   1972                                      int num_arguments,
   1973                                      int result_size) {
   1974   TailCallExternalReference(ExternalReference(fid, isolate()),
   1975                             num_arguments,
   1976                             result_size);
   1977 }
   1978 
   1979 
   1980 // If true, a Handle<T> returned by value from a function with cdecl calling
   1981 // convention will be returned directly as a value of location_ field in a
   1982 // register eax.
   1983 // If false, it is returned as a pointer to a preallocated by caller memory
   1984 // region. Pointer to this region should be passed to a function as an
   1985 // implicit first argument.
   1986 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
   1987 static const bool kReturnHandlesDirectly = true;
   1988 #else
   1989 static const bool kReturnHandlesDirectly = false;
   1990 #endif
   1991 
   1992 
   1993 Operand ApiParameterOperand(int index, bool returns_handle) {
   1994   int offset = (index +(kReturnHandlesDirectly || !returns_handle ? 0 : 1));
   1995   return Operand(esp, offset * kPointerSize);
   1996 }
   1997 
   1998 
   1999 void MacroAssembler::PrepareCallApiFunction(int argc, bool returns_handle) {
   2000   if (kReturnHandlesDirectly || !returns_handle) {
   2001     EnterApiExitFrame(argc);
   2002     // When handles are returned directly we don't have to allocate extra
   2003     // space for and pass an out parameter.
   2004     if (emit_debug_code()) {
   2005       mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
   2006     }
   2007   } else {
   2008     // We allocate two additional slots: return value and pointer to it.
   2009     EnterApiExitFrame(argc + 2);
   2010 
   2011     // The argument slots are filled as follows:
   2012     //
   2013     //   n + 1: output slot
   2014     //   n: arg n
   2015     //   ...
   2016     //   1: arg1
   2017     //   0: pointer to the output slot
   2018 
   2019     lea(esi, Operand(esp, (argc + 1) * kPointerSize));
   2020     mov(Operand(esp, 0 * kPointerSize), esi);
   2021     if (emit_debug_code()) {
   2022       mov(Operand(esi, 0), Immediate(0));
   2023     }
   2024   }
   2025 }
   2026 
   2027 
   2028 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
   2029                                               Address thunk_address,
   2030                                               Operand thunk_last_arg,
   2031                                               int stack_space,
   2032                                               bool returns_handle,
   2033                                               int return_value_offset) {
   2034   ExternalReference next_address =
   2035       ExternalReference::handle_scope_next_address(isolate());
   2036   ExternalReference limit_address =
   2037       ExternalReference::handle_scope_limit_address(isolate());
   2038   ExternalReference level_address =
   2039       ExternalReference::handle_scope_level_address(isolate());
   2040 
   2041   // Allocate HandleScope in callee-save registers.
   2042   mov(ebx, Operand::StaticVariable(next_address));
   2043   mov(edi, Operand::StaticVariable(limit_address));
   2044   add(Operand::StaticVariable(level_address), Immediate(1));
   2045 
   2046   if (FLAG_log_timer_events) {
   2047     FrameScope frame(this, StackFrame::MANUAL);
   2048     PushSafepointRegisters();
   2049     PrepareCallCFunction(1, eax);
   2050     mov(Operand(esp, 0),
   2051         Immediate(ExternalReference::isolate_address(isolate())));
   2052     CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
   2053     PopSafepointRegisters();
   2054   }
   2055 
   2056 
   2057   Label profiler_disabled;
   2058   Label end_profiler_check;
   2059   bool* is_profiling_flag =
   2060       isolate()->cpu_profiler()->is_profiling_address();
   2061   STATIC_ASSERT(sizeof(*is_profiling_flag) == 1);
   2062   mov(eax, Immediate(reinterpret_cast<Address>(is_profiling_flag)));
   2063   cmpb(Operand(eax, 0), 0);
   2064   j(zero, &profiler_disabled);
   2065 
   2066   // Additional parameter is the address of the actual getter function.
   2067   mov(thunk_last_arg, Immediate(function_address));
   2068   // Call the api function.
   2069   call(thunk_address, RelocInfo::RUNTIME_ENTRY);
   2070   jmp(&end_profiler_check);
   2071 
   2072   bind(&profiler_disabled);
   2073   // Call the api function.
   2074   call(function_address, RelocInfo::RUNTIME_ENTRY);
   2075   bind(&end_profiler_check);
   2076 
   2077   if (FLAG_log_timer_events) {
   2078     FrameScope frame(this, StackFrame::MANUAL);
   2079     PushSafepointRegisters();
   2080     PrepareCallCFunction(1, eax);
   2081     mov(Operand(esp, 0),
   2082         Immediate(ExternalReference::isolate_address(isolate())));
   2083     CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
   2084     PopSafepointRegisters();
   2085   }
   2086 
   2087   Label prologue;
   2088   if (returns_handle) {
   2089     if (!kReturnHandlesDirectly) {
   2090       // PrepareCallApiFunction saved pointer to the output slot into
   2091       // callee-save register esi.
   2092       mov(eax, Operand(esi, 0));
   2093     }
   2094     Label empty_handle;
   2095     // Check if the result handle holds 0.
   2096     test(eax, eax);
   2097     j(zero, &empty_handle);
   2098     // It was non-zero.  Dereference to get the result value.
   2099     mov(eax, Operand(eax, 0));
   2100     jmp(&prologue);
   2101     bind(&empty_handle);
   2102   }
   2103   // Load the value from ReturnValue
   2104   mov(eax, Operand(ebp, return_value_offset * kPointerSize));
   2105 
   2106   Label promote_scheduled_exception;
   2107   Label delete_allocated_handles;
   2108   Label leave_exit_frame;
   2109 
   2110   bind(&prologue);
   2111   // No more valid handles (the result handle was the last one). Restore
   2112   // previous handle scope.
   2113   mov(Operand::StaticVariable(next_address), ebx);
   2114   sub(Operand::StaticVariable(level_address), Immediate(1));
   2115   Assert(above_equal, kInvalidHandleScopeLevel);
   2116   cmp(edi, Operand::StaticVariable(limit_address));
   2117   j(not_equal, &delete_allocated_handles);
   2118   bind(&leave_exit_frame);
   2119 
   2120   // Check if the function scheduled an exception.
   2121   ExternalReference scheduled_exception_address =
   2122       ExternalReference::scheduled_exception_address(isolate());
   2123   cmp(Operand::StaticVariable(scheduled_exception_address),
   2124       Immediate(isolate()->factory()->the_hole_value()));
   2125   j(not_equal, &promote_scheduled_exception);
   2126 
   2127 #if ENABLE_EXTRA_CHECKS
   2128   // Check if the function returned a valid JavaScript value.
   2129   Label ok;
   2130   Register return_value = eax;
   2131   Register map = ecx;
   2132 
   2133   JumpIfSmi(return_value, &ok, Label::kNear);
   2134   mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
   2135 
   2136   CmpInstanceType(map, FIRST_NONSTRING_TYPE);
   2137   j(below, &ok, Label::kNear);
   2138 
   2139   CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
   2140   j(above_equal, &ok, Label::kNear);
   2141 
   2142   cmp(map, isolate()->factory()->heap_number_map());
   2143   j(equal, &ok, Label::kNear);
   2144 
   2145   cmp(return_value, isolate()->factory()->undefined_value());
   2146   j(equal, &ok, Label::kNear);
   2147 
   2148   cmp(return_value, isolate()->factory()->true_value());
   2149   j(equal, &ok, Label::kNear);
   2150 
   2151   cmp(return_value, isolate()->factory()->false_value());
   2152   j(equal, &ok, Label::kNear);
   2153 
   2154   cmp(return_value, isolate()->factory()->null_value());
   2155   j(equal, &ok, Label::kNear);
   2156 
   2157   Abort(kAPICallReturnedInvalidObject);
   2158 
   2159   bind(&ok);
   2160 #endif
   2161 
   2162   LeaveApiExitFrame();
   2163   ret(stack_space * kPointerSize);
   2164 
   2165   bind(&promote_scheduled_exception);
   2166   TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
   2167 
   2168   // HandleScope limit has changed. Delete allocated extensions.
   2169   ExternalReference delete_extensions =
   2170       ExternalReference::delete_handle_scope_extensions(isolate());
   2171   bind(&delete_allocated_handles);
   2172   mov(Operand::StaticVariable(limit_address), edi);
   2173   mov(edi, eax);
   2174   mov(Operand(esp, 0),
   2175       Immediate(ExternalReference::isolate_address(isolate())));
   2176   mov(eax, Immediate(delete_extensions));
   2177   call(eax);
   2178   mov(eax, edi);
   2179   jmp(&leave_exit_frame);
   2180 }
   2181 
   2182 
   2183 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   2184   // Set the entry point and jump to the C entry runtime stub.
   2185   mov(ebx, Immediate(ext));
   2186   CEntryStub ces(1);
   2187   jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
   2188 }
   2189 
   2190 
   2191 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
   2192   // This macro takes the dst register to make the code more readable
   2193   // at the call sites. However, the dst register has to be ecx to
   2194   // follow the calling convention which requires the call type to be
   2195   // in ecx.
   2196   ASSERT(dst.is(ecx));
   2197   if (call_kind == CALL_AS_FUNCTION) {
   2198     // Set to some non-zero smi by updating the least significant
   2199     // byte.
   2200     mov_b(dst, 1 << kSmiTagSize);
   2201   } else {
   2202     // Set to smi zero by clearing the register.
   2203     xor_(dst, dst);
   2204   }
   2205 }
   2206 
   2207 
   2208 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   2209                                     const ParameterCount& actual,
   2210                                     Handle<Code> code_constant,
   2211                                     const Operand& code_operand,
   2212                                     Label* done,
   2213                                     bool* definitely_mismatches,
   2214                                     InvokeFlag flag,
   2215                                     Label::Distance done_near,
   2216                                     const CallWrapper& call_wrapper,
   2217                                     CallKind call_kind) {
   2218   bool definitely_matches = false;
   2219   *definitely_mismatches = false;
   2220   Label invoke;
   2221   if (expected.is_immediate()) {
   2222     ASSERT(actual.is_immediate());
   2223     if (expected.immediate() == actual.immediate()) {
   2224       definitely_matches = true;
   2225     } else {
   2226       mov(eax, actual.immediate());
   2227       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   2228       if (expected.immediate() == sentinel) {
   2229         // Don't worry about adapting arguments for builtins that
   2230         // don't want that done. Skip adaption code by making it look
   2231         // like we have a match between expected and actual number of
   2232         // arguments.
   2233         definitely_matches = true;
   2234       } else {
   2235         *definitely_mismatches = true;
   2236         mov(ebx, expected.immediate());
   2237       }
   2238     }
   2239   } else {
   2240     if (actual.is_immediate()) {
   2241       // Expected is in register, actual is immediate. This is the
   2242       // case when we invoke function values without going through the
   2243       // IC mechanism.
   2244       cmp(expected.reg(), actual.immediate());
   2245       j(equal, &invoke);
   2246       ASSERT(expected.reg().is(ebx));
   2247       mov(eax, actual.immediate());
   2248     } else if (!expected.reg().is(actual.reg())) {
   2249       // Both expected and actual are in (different) registers. This
   2250       // is the case when we invoke functions using call and apply.
   2251       cmp(expected.reg(), actual.reg());
   2252       j(equal, &invoke);
   2253       ASSERT(actual.reg().is(eax));
   2254       ASSERT(expected.reg().is(ebx));
   2255     }
   2256   }
   2257 
   2258   if (!definitely_matches) {
   2259     Handle<Code> adaptor =
   2260         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   2261     if (!code_constant.is_null()) {
   2262       mov(edx, Immediate(code_constant));
   2263       add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
   2264     } else if (!code_operand.is_reg(edx)) {
   2265       mov(edx, code_operand);
   2266     }
   2267 
   2268     if (flag == CALL_FUNCTION) {
   2269       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
   2270       SetCallKind(ecx, call_kind);
   2271       call(adaptor, RelocInfo::CODE_TARGET);
   2272       call_wrapper.AfterCall();
   2273       if (!*definitely_mismatches) {
   2274         jmp(done, done_near);
   2275       }
   2276     } else {
   2277       SetCallKind(ecx, call_kind);
   2278       jmp(adaptor, RelocInfo::CODE_TARGET);
   2279     }
   2280     bind(&invoke);
   2281   }
   2282 }
   2283 
   2284 
   2285 void MacroAssembler::InvokeCode(const Operand& code,
   2286                                 const ParameterCount& expected,
   2287                                 const ParameterCount& actual,
   2288                                 InvokeFlag flag,
   2289                                 const CallWrapper& call_wrapper,
   2290                                 CallKind call_kind) {
   2291   // You can't call a function without a valid frame.
   2292   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2293 
   2294   Label done;
   2295   bool definitely_mismatches = false;
   2296   InvokePrologue(expected, actual, Handle<Code>::null(), code,
   2297                  &done, &definitely_mismatches, flag, Label::kNear,
   2298                  call_wrapper, call_kind);
   2299   if (!definitely_mismatches) {
   2300     if (flag == CALL_FUNCTION) {
   2301       call_wrapper.BeforeCall(CallSize(code));
   2302       SetCallKind(ecx, call_kind);
   2303       call(code);
   2304       call_wrapper.AfterCall();
   2305     } else {
   2306       ASSERT(flag == JUMP_FUNCTION);
   2307       SetCallKind(ecx, call_kind);
   2308       jmp(code);
   2309     }
   2310     bind(&done);
   2311   }
   2312 }
   2313 
   2314 
   2315 void MacroAssembler::InvokeCode(Handle<Code> code,
   2316                                 const ParameterCount& expected,
   2317                                 const ParameterCount& actual,
   2318                                 RelocInfo::Mode rmode,
   2319                                 InvokeFlag flag,
   2320                                 const CallWrapper& call_wrapper,
   2321                                 CallKind call_kind) {
   2322   // You can't call a function without a valid frame.
   2323   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2324 
   2325   Label done;
   2326   Operand dummy(eax, 0);
   2327   bool definitely_mismatches = false;
   2328   InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
   2329                  flag, Label::kNear, call_wrapper, call_kind);
   2330   if (!definitely_mismatches) {
   2331     if (flag == CALL_FUNCTION) {
   2332       call_wrapper.BeforeCall(CallSize(code, rmode));
   2333       SetCallKind(ecx, call_kind);
   2334       call(code, rmode);
   2335       call_wrapper.AfterCall();
   2336     } else {
   2337       ASSERT(flag == JUMP_FUNCTION);
   2338       SetCallKind(ecx, call_kind);
   2339       jmp(code, rmode);
   2340     }
   2341     bind(&done);
   2342   }
   2343 }
   2344 
   2345 
   2346 void MacroAssembler::InvokeFunction(Register fun,
   2347                                     const ParameterCount& actual,
   2348                                     InvokeFlag flag,
   2349                                     const CallWrapper& call_wrapper,
   2350                                     CallKind call_kind) {
   2351   // You can't call a function without a valid frame.
   2352   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2353 
   2354   ASSERT(fun.is(edi));
   2355   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2356   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2357   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   2358   SmiUntag(ebx);
   2359 
   2360   ParameterCount expected(ebx);
   2361   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2362              expected, actual, flag, call_wrapper, call_kind);
   2363 }
   2364 
   2365 
   2366 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
   2367                                     const ParameterCount& expected,
   2368                                     const ParameterCount& actual,
   2369                                     InvokeFlag flag,
   2370                                     const CallWrapper& call_wrapper,
   2371                                     CallKind call_kind) {
   2372   // You can't call a function without a valid frame.
   2373   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2374 
   2375   // Get the function and setup the context.
   2376   LoadHeapObject(edi, function);
   2377   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2378 
   2379   // We call indirectly through the code field in the function to
   2380   // allow recompilation to take effect without changing any of the
   2381   // call sites.
   2382   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2383              expected, actual, flag, call_wrapper, call_kind);
   2384 }
   2385 
   2386 
   2387 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
   2388                                    InvokeFlag flag,
   2389                                    const CallWrapper& call_wrapper) {
   2390   // You can't call a builtin without a valid frame.
   2391   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2392 
   2393   // Rely on the assertion to check that the number of provided
   2394   // arguments match the expected number of arguments. Fake a
   2395   // parameter count to avoid emitting code to do the check.
   2396   ParameterCount expected(0);
   2397   GetBuiltinFunction(edi, id);
   2398   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2399              expected, expected, flag, call_wrapper, CALL_AS_METHOD);
   2400 }
   2401 
   2402 
   2403 void MacroAssembler::GetBuiltinFunction(Register target,
   2404                                         Builtins::JavaScript id) {
   2405   // Load the JavaScript builtin function from the builtins object.
   2406   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2407   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
   2408   mov(target, FieldOperand(target,
   2409                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
   2410 }
   2411 
   2412 
   2413 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
   2414   ASSERT(!target.is(edi));
   2415   // Load the JavaScript builtin function from the builtins object.
   2416   GetBuiltinFunction(edi, id);
   2417   // Load the code entry point from the function into the target register.
   2418   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   2419 }
   2420 
   2421 
   2422 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   2423   if (context_chain_length > 0) {
   2424     // Move up the chain of contexts to the context containing the slot.
   2425     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2426     for (int i = 1; i < context_chain_length; i++) {
   2427       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2428     }
   2429   } else {
   2430     // Slot is in the current function context.  Move it into the
   2431     // destination register in case we store into it (the write barrier
   2432     // cannot be allowed to destroy the context in esi).
   2433     mov(dst, esi);
   2434   }
   2435 
   2436   // We should not have found a with context by walking the context chain
   2437   // (i.e., the static scope chain and runtime context chain do not agree).
   2438   // A variable occurring in such a scope should have slot type LOOKUP and
   2439   // not CONTEXT.
   2440   if (emit_debug_code()) {
   2441     cmp(FieldOperand(dst, HeapObject::kMapOffset),
   2442         isolate()->factory()->with_context_map());
   2443     Check(not_equal, kVariableResolvedToWithContext);
   2444   }
   2445 }
   2446 
   2447 
   2448 void MacroAssembler::LoadTransitionedArrayMapConditional(
   2449     ElementsKind expected_kind,
   2450     ElementsKind transitioned_kind,
   2451     Register map_in_out,
   2452     Register scratch,
   2453     Label* no_map_match) {
   2454   // Load the global or builtins object from the current context.
   2455   mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2456   mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
   2457 
   2458   // Check that the function's map is the same as the expected cached map.
   2459   mov(scratch, Operand(scratch,
   2460                        Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
   2461 
   2462   size_t offset = expected_kind * kPointerSize +
   2463       FixedArrayBase::kHeaderSize;
   2464   cmp(map_in_out, FieldOperand(scratch, offset));
   2465   j(not_equal, no_map_match);
   2466 
   2467   // Use the transitioned cached map.
   2468   offset = transitioned_kind * kPointerSize +
   2469       FixedArrayBase::kHeaderSize;
   2470   mov(map_in_out, FieldOperand(scratch, offset));
   2471 }
   2472 
   2473 
   2474 void MacroAssembler::LoadInitialArrayMap(
   2475     Register function_in, Register scratch,
   2476     Register map_out, bool can_have_holes) {
   2477   ASSERT(!function_in.is(map_out));
   2478   Label done;
   2479   mov(map_out, FieldOperand(function_in,
   2480                             JSFunction::kPrototypeOrInitialMapOffset));
   2481   if (!FLAG_smi_only_arrays) {
   2482     ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
   2483     LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
   2484                                         kind,
   2485                                         map_out,
   2486                                         scratch,
   2487                                         &done);
   2488   } else if (can_have_holes) {
   2489     LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
   2490                                         FAST_HOLEY_SMI_ELEMENTS,
   2491                                         map_out,
   2492                                         scratch,
   2493                                         &done);
   2494   }
   2495   bind(&done);
   2496 }
   2497 
   2498 
   2499 void MacroAssembler::LoadGlobalContext(Register global_context) {
   2500   // Load the global or builtins object from the current context.
   2501   mov(global_context,
   2502       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2503   // Load the native context from the global or builtins object.
   2504   mov(global_context,
   2505       FieldOperand(global_context, GlobalObject::kNativeContextOffset));
   2506 }
   2507 
   2508 
   2509 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2510   // Load the global or builtins object from the current context.
   2511   mov(function,
   2512       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2513   // Load the native context from the global or builtins object.
   2514   mov(function,
   2515       FieldOperand(function, GlobalObject::kNativeContextOffset));
   2516   // Load the function from the native context.
   2517   mov(function, Operand(function, Context::SlotOffset(index)));
   2518 }
   2519 
   2520 
   2521 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2522                                                   Register map) {
   2523   // Load the initial map.  The global functions all have initial maps.
   2524   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2525   if (emit_debug_code()) {
   2526     Label ok, fail;
   2527     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
   2528     jmp(&ok);
   2529     bind(&fail);
   2530     Abort(kGlobalFunctionsMustHaveInitialMap);
   2531     bind(&ok);
   2532   }
   2533 }
   2534 
   2535 
   2536 // Store the value in register src in the safepoint register stack
   2537 // slot for register dst.
   2538 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   2539   mov(SafepointRegisterSlot(dst), src);
   2540 }
   2541 
   2542 
   2543 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   2544   mov(SafepointRegisterSlot(dst), src);
   2545 }
   2546 
   2547 
   2548 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   2549   mov(dst, SafepointRegisterSlot(src));
   2550 }
   2551 
   2552 
   2553 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   2554   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   2555 }
   2556 
   2557 
   2558 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   2559   // The registers are pushed starting with the lowest encoding,
   2560   // which means that lowest encodings are furthest away from
   2561   // the stack pointer.
   2562   ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   2563   return kNumSafepointRegisters - reg_code - 1;
   2564 }
   2565 
   2566 
   2567 void MacroAssembler::LoadHeapObject(Register result,
   2568                                     Handle<HeapObject> object) {
   2569   AllowDeferredHandleDereference embedding_raw_address;
   2570   if (isolate()->heap()->InNewSpace(*object)) {
   2571     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2572     mov(result, Operand::ForCell(cell));
   2573   } else {
   2574     mov(result, object);
   2575   }
   2576 }
   2577 
   2578 
   2579 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
   2580   AllowDeferredHandleDereference using_raw_address;
   2581   if (isolate()->heap()->InNewSpace(*object)) {
   2582     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2583     cmp(reg, Operand::ForCell(cell));
   2584   } else {
   2585     cmp(reg, object);
   2586   }
   2587 }
   2588 
   2589 
   2590 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
   2591   AllowDeferredHandleDereference using_raw_address;
   2592   if (isolate()->heap()->InNewSpace(*object)) {
   2593     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2594     push(Operand::ForCell(cell));
   2595   } else {
   2596     Push(object);
   2597   }
   2598 }
   2599 
   2600 
   2601 void MacroAssembler::Ret() {
   2602   ret(0);
   2603 }
   2604 
   2605 
   2606 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   2607   if (is_uint16(bytes_dropped)) {
   2608     ret(bytes_dropped);
   2609   } else {
   2610     pop(scratch);
   2611     add(esp, Immediate(bytes_dropped));
   2612     push(scratch);
   2613     ret(0);
   2614   }
   2615 }
   2616 
   2617 
   2618 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
   2619   // Make sure the floating point stack is either empty or has depth items.
   2620   ASSERT(depth <= 7);
   2621 
   2622   // The top-of-stack (tos) is 7 if there is one item pushed.
   2623   int tos = (8 - depth) % 8;
   2624   const int kTopMask = 0x3800;
   2625   push(eax);
   2626   fwait();
   2627   fnstsw_ax();
   2628   and_(eax, kTopMask);
   2629   shr(eax, 11);
   2630   cmp(eax, Immediate(tos));
   2631   Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
   2632   fnclex();
   2633   pop(eax);
   2634 }
   2635 
   2636 
   2637 void MacroAssembler::Drop(int stack_elements) {
   2638   if (stack_elements > 0) {
   2639     add(esp, Immediate(stack_elements * kPointerSize));
   2640   }
   2641 }
   2642 
   2643 
   2644 void MacroAssembler::Move(Register dst, Register src) {
   2645   if (!dst.is(src)) {
   2646     mov(dst, src);
   2647   }
   2648 }
   2649 
   2650 
   2651 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2652   if (FLAG_native_code_counters && counter->Enabled()) {
   2653     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   2654   }
   2655 }
   2656 
   2657 
   2658 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2659   ASSERT(value > 0);
   2660   if (FLAG_native_code_counters && counter->Enabled()) {
   2661     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2662     if (value == 1) {
   2663       inc(operand);
   2664     } else {
   2665       add(operand, Immediate(value));
   2666     }
   2667   }
   2668 }
   2669 
   2670 
   2671 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2672   ASSERT(value > 0);
   2673   if (FLAG_native_code_counters && counter->Enabled()) {
   2674     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2675     if (value == 1) {
   2676       dec(operand);
   2677     } else {
   2678       sub(operand, Immediate(value));
   2679     }
   2680   }
   2681 }
   2682 
   2683 
   2684 void MacroAssembler::IncrementCounter(Condition cc,
   2685                                       StatsCounter* counter,
   2686                                       int value) {
   2687   ASSERT(value > 0);
   2688   if (FLAG_native_code_counters && counter->Enabled()) {
   2689     Label skip;
   2690     j(NegateCondition(cc), &skip);
   2691     pushfd();
   2692     IncrementCounter(counter, value);
   2693     popfd();
   2694     bind(&skip);
   2695   }
   2696 }
   2697 
   2698 
   2699 void MacroAssembler::DecrementCounter(Condition cc,
   2700                                       StatsCounter* counter,
   2701                                       int value) {
   2702   ASSERT(value > 0);
   2703   if (FLAG_native_code_counters && counter->Enabled()) {
   2704     Label skip;
   2705     j(NegateCondition(cc), &skip);
   2706     pushfd();
   2707     DecrementCounter(counter, value);
   2708     popfd();
   2709     bind(&skip);
   2710   }
   2711 }
   2712 
   2713 
   2714 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
   2715   if (emit_debug_code()) Check(cc, reason);
   2716 }
   2717 
   2718 
   2719 void MacroAssembler::AssertFastElements(Register elements) {
   2720   if (emit_debug_code()) {
   2721     Factory* factory = isolate()->factory();
   2722     Label ok;
   2723     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2724         Immediate(factory->fixed_array_map()));
   2725     j(equal, &ok);
   2726     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2727         Immediate(factory->fixed_double_array_map()));
   2728     j(equal, &ok);
   2729     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2730         Immediate(factory->fixed_cow_array_map()));
   2731     j(equal, &ok);
   2732     Abort(kJSObjectWithFastElementsMapHasSlowElements);
   2733     bind(&ok);
   2734   }
   2735 }
   2736 
   2737 
   2738 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
   2739   Label L;
   2740   j(cc, &L);
   2741   Abort(reason);
   2742   // will not return here
   2743   bind(&L);
   2744 }
   2745 
   2746 
   2747 void MacroAssembler::CheckStackAlignment() {
   2748   int frame_alignment = OS::ActivationFrameAlignment();
   2749   int frame_alignment_mask = frame_alignment - 1;
   2750   if (frame_alignment > kPointerSize) {
   2751     ASSERT(IsPowerOf2(frame_alignment));
   2752     Label alignment_as_expected;
   2753     test(esp, Immediate(frame_alignment_mask));
   2754     j(zero, &alignment_as_expected);
   2755     // Abort if stack is not aligned.
   2756     int3();
   2757     bind(&alignment_as_expected);
   2758   }
   2759 }
   2760 
   2761 
   2762 void MacroAssembler::Abort(BailoutReason reason) {
   2763   // We want to pass the msg string like a smi to avoid GC
   2764   // problems, however msg is not guaranteed to be aligned
   2765   // properly. Instead, we pass an aligned pointer that is
   2766   // a proper v8 smi, but also pass the alignment difference
   2767   // from the real pointer as a smi.
   2768   const char* msg = GetBailoutReason(reason);
   2769   intptr_t p1 = reinterpret_cast<intptr_t>(msg);
   2770   intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
   2771   ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
   2772 #ifdef DEBUG
   2773   if (msg != NULL) {
   2774     RecordComment("Abort message: ");
   2775     RecordComment(msg);
   2776   }
   2777 #endif
   2778 
   2779   push(eax);
   2780   push(Immediate(p0));
   2781   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
   2782   // Disable stub call restrictions to always allow calls to abort.
   2783   if (!has_frame_) {
   2784     // We don't actually want to generate a pile of code for this, so just
   2785     // claim there is a stack frame, without generating one.
   2786     FrameScope scope(this, StackFrame::NONE);
   2787     CallRuntime(Runtime::kAbort, 2);
   2788   } else {
   2789     CallRuntime(Runtime::kAbort, 2);
   2790   }
   2791   // will not return here
   2792   int3();
   2793 }
   2794 
   2795 
   2796 void MacroAssembler::LoadInstanceDescriptors(Register map,
   2797                                              Register descriptors) {
   2798   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
   2799 }
   2800 
   2801 
   2802 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
   2803   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2804   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
   2805 }
   2806 
   2807 
   2808 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
   2809                                   Register scratch,
   2810                                   int power) {
   2811   ASSERT(is_uintn(power + HeapNumber::kExponentBias,
   2812                   HeapNumber::kExponentBits));
   2813   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
   2814   movd(dst, scratch);
   2815   psllq(dst, HeapNumber::kMantissaBits);
   2816 }
   2817 
   2818 
   2819 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
   2820     Register instance_type,
   2821     Register scratch,
   2822     Label* failure) {
   2823   if (!scratch.is(instance_type)) {
   2824     mov(scratch, instance_type);
   2825   }
   2826   and_(scratch,
   2827        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   2828   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
   2829   j(not_equal, failure);
   2830 }
   2831 
   2832 
   2833 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
   2834                                                          Register object2,
   2835                                                          Register scratch1,
   2836                                                          Register scratch2,
   2837                                                          Label* failure) {
   2838   // Check that both objects are not smis.
   2839   STATIC_ASSERT(kSmiTag == 0);
   2840   mov(scratch1, object1);
   2841   and_(scratch1, object2);
   2842   JumpIfSmi(scratch1, failure);
   2843 
   2844   // Load instance type for both strings.
   2845   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   2846   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   2847   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   2848   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   2849 
   2850   // Check that both are flat ASCII strings.
   2851   const int kFlatAsciiStringMask =
   2852       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   2853   const int kFlatAsciiStringTag =
   2854       kStringTag | kOneByteStringTag | kSeqStringTag;
   2855   // Interleave bits from both instance types and compare them in one check.
   2856   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
   2857   and_(scratch1, kFlatAsciiStringMask);
   2858   and_(scratch2, kFlatAsciiStringMask);
   2859   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   2860   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
   2861   j(not_equal, failure);
   2862 }
   2863 
   2864 
   2865 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
   2866                                          Label* not_unique_name,
   2867                                          Label::Distance distance) {
   2868   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2869   Label succeed;
   2870   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2871   j(zero, &succeed);
   2872   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
   2873   j(not_equal, not_unique_name, distance);
   2874 
   2875   bind(&succeed);
   2876 }
   2877 
   2878 
   2879 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2880   int frame_alignment = OS::ActivationFrameAlignment();
   2881   if (frame_alignment != 0) {
   2882     // Make stack end at alignment and make room for num_arguments words
   2883     // and the original value of esp.
   2884     mov(scratch, esp);
   2885     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
   2886     ASSERT(IsPowerOf2(frame_alignment));
   2887     and_(esp, -frame_alignment);
   2888     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2889   } else {
   2890     sub(esp, Immediate(num_arguments * kPointerSize));
   2891   }
   2892 }
   2893 
   2894 
   2895 void MacroAssembler::CallCFunction(ExternalReference function,
   2896                                    int num_arguments) {
   2897   // Trashing eax is ok as it will be the return value.
   2898   mov(eax, Immediate(function));
   2899   CallCFunction(eax, num_arguments);
   2900 }
   2901 
   2902 
   2903 void MacroAssembler::CallCFunction(Register function,
   2904                                    int num_arguments) {
   2905   ASSERT(has_frame());
   2906   // Check stack alignment.
   2907   if (emit_debug_code()) {
   2908     CheckStackAlignment();
   2909   }
   2910 
   2911   call(function);
   2912   if (OS::ActivationFrameAlignment() != 0) {
   2913     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2914   } else {
   2915     add(esp, Immediate(num_arguments * kPointerSize));
   2916   }
   2917 }
   2918 
   2919 
   2920 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
   2921   if (r1.is(r2)) return true;
   2922   if (r1.is(r3)) return true;
   2923   if (r1.is(r4)) return true;
   2924   if (r2.is(r3)) return true;
   2925   if (r2.is(r4)) return true;
   2926   if (r3.is(r4)) return true;
   2927   return false;
   2928 }
   2929 
   2930 
   2931 CodePatcher::CodePatcher(byte* address, int size)
   2932     : address_(address),
   2933       size_(size),
   2934       masm_(NULL, address, size + Assembler::kGap) {
   2935   // Create a new macro assembler pointing to the address of the code to patch.
   2936   // The size is adjusted with kGap on order for the assembler to generate size
   2937   // bytes of instructions without failing with buffer size constraints.
   2938   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2939 }
   2940 
   2941 
   2942 CodePatcher::~CodePatcher() {
   2943   // Indicate that code has changed.
   2944   CPU::FlushICache(address_, size_);
   2945 
   2946   // Check that the code was patched as expected.
   2947   ASSERT(masm_.pc_ == address_ + size_);
   2948   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2949 }
   2950 
   2951 
   2952 void MacroAssembler::CheckPageFlag(
   2953     Register object,
   2954     Register scratch,
   2955     int mask,
   2956     Condition cc,
   2957     Label* condition_met,
   2958     Label::Distance condition_met_distance) {
   2959   ASSERT(cc == zero || cc == not_zero);
   2960   if (scratch.is(object)) {
   2961     and_(scratch, Immediate(~Page::kPageAlignmentMask));
   2962   } else {
   2963     mov(scratch, Immediate(~Page::kPageAlignmentMask));
   2964     and_(scratch, object);
   2965   }
   2966   if (mask < (1 << kBitsPerByte)) {
   2967     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
   2968            static_cast<uint8_t>(mask));
   2969   } else {
   2970     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   2971   }
   2972   j(cc, condition_met, condition_met_distance);
   2973 }
   2974 
   2975 
   2976 void MacroAssembler::CheckPageFlagForMap(
   2977     Handle<Map> map,
   2978     int mask,
   2979     Condition cc,
   2980     Label* condition_met,
   2981     Label::Distance condition_met_distance) {
   2982   ASSERT(cc == zero || cc == not_zero);
   2983   Page* page = Page::FromAddress(map->address());
   2984   ExternalReference reference(ExternalReference::page_flags(page));
   2985   // The inlined static address check of the page's flags relies
   2986   // on maps never being compacted.
   2987   ASSERT(!isolate()->heap()->mark_compact_collector()->
   2988          IsOnEvacuationCandidate(*map));
   2989   if (mask < (1 << kBitsPerByte)) {
   2990     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
   2991   } else {
   2992     test(Operand::StaticVariable(reference), Immediate(mask));
   2993   }
   2994   j(cc, condition_met, condition_met_distance);
   2995 }
   2996 
   2997 
   2998 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
   2999                                         Register scratch,
   3000                                         Label* if_deprecated) {
   3001   if (map->CanBeDeprecated()) {
   3002     mov(scratch, map);
   3003     mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
   3004     and_(scratch, Immediate(Smi::FromInt(Map::Deprecated::kMask)));
   3005     j(not_zero, if_deprecated);
   3006   }
   3007 }
   3008 
   3009 
   3010 void MacroAssembler::JumpIfBlack(Register object,
   3011                                  Register scratch0,
   3012                                  Register scratch1,
   3013                                  Label* on_black,
   3014                                  Label::Distance on_black_near) {
   3015   HasColor(object, scratch0, scratch1,
   3016            on_black, on_black_near,
   3017            1, 0);  // kBlackBitPattern.
   3018   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
   3019 }
   3020 
   3021 
   3022 void MacroAssembler::HasColor(Register object,
   3023                               Register bitmap_scratch,
   3024                               Register mask_scratch,
   3025                               Label* has_color,
   3026                               Label::Distance has_color_distance,
   3027                               int first_bit,
   3028                               int second_bit) {
   3029   ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
   3030 
   3031   GetMarkBits(object, bitmap_scratch, mask_scratch);
   3032 
   3033   Label other_color, word_boundary;
   3034   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3035   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
   3036   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
   3037   j(zero, &word_boundary, Label::kNear);
   3038   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3039   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   3040   jmp(&other_color, Label::kNear);
   3041 
   3042   bind(&word_boundary);
   3043   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
   3044 
   3045   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   3046   bind(&other_color);
   3047 }
   3048 
   3049 
   3050 void MacroAssembler::GetMarkBits(Register addr_reg,
   3051                                  Register bitmap_reg,
   3052                                  Register mask_reg) {
   3053   ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
   3054   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
   3055   and_(bitmap_reg, addr_reg);
   3056   mov(ecx, addr_reg);
   3057   int shift =
   3058       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
   3059   shr(ecx, shift);
   3060   and_(ecx,
   3061        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
   3062 
   3063   add(bitmap_reg, ecx);
   3064   mov(ecx, addr_reg);
   3065   shr(ecx, kPointerSizeLog2);
   3066   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
   3067   mov(mask_reg, Immediate(1));
   3068   shl_cl(mask_reg);
   3069 }
   3070 
   3071 
   3072 void MacroAssembler::EnsureNotWhite(
   3073     Register value,
   3074     Register bitmap_scratch,
   3075     Register mask_scratch,
   3076     Label* value_is_white_and_not_data,
   3077     Label::Distance distance) {
   3078   ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
   3079   GetMarkBits(value, bitmap_scratch, mask_scratch);
   3080 
   3081   // If the value is black or grey we don't need to do anything.
   3082   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
   3083   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
   3084   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
   3085   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
   3086 
   3087   Label done;
   3088 
   3089   // Since both black and grey have a 1 in the first position and white does
   3090   // not have a 1 there we only need to check one bit.
   3091   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3092   j(not_zero, &done, Label::kNear);
   3093 
   3094   if (emit_debug_code()) {
   3095     // Check for impossible bit pattern.
   3096     Label ok;
   3097     push(mask_scratch);
   3098     // shl.  May overflow making the check conservative.
   3099     add(mask_scratch, mask_scratch);
   3100     test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3101     j(zero, &ok, Label::kNear);
   3102     int3();
   3103     bind(&ok);
   3104     pop(mask_scratch);
   3105   }
   3106 
   3107   // Value is white.  We check whether it is data that doesn't need scanning.
   3108   // Currently only checks for HeapNumber and non-cons strings.
   3109   Register map = ecx;  // Holds map while checking type.
   3110   Register length = ecx;  // Holds length of object after checking type.
   3111   Label not_heap_number;
   3112   Label is_data_object;
   3113 
   3114   // Check for heap-number
   3115   mov(map, FieldOperand(value, HeapObject::kMapOffset));
   3116   cmp(map, isolate()->factory()->heap_number_map());
   3117   j(not_equal, &not_heap_number, Label::kNear);
   3118   mov(length, Immediate(HeapNumber::kSize));
   3119   jmp(&is_data_object, Label::kNear);
   3120 
   3121   bind(&not_heap_number);
   3122   // Check for strings.
   3123   ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
   3124   ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
   3125   // If it's a string and it's not a cons string then it's an object containing
   3126   // no GC pointers.
   3127   Register instance_type = ecx;
   3128   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
   3129   test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
   3130   j(not_zero, value_is_white_and_not_data);
   3131   // It's a non-indirect (non-cons and non-slice) string.
   3132   // If it's external, the length is just ExternalString::kSize.
   3133   // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
   3134   Label not_external;
   3135   // External strings are the only ones with the kExternalStringTag bit
   3136   // set.
   3137   ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
   3138   ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
   3139   test_b(instance_type, kExternalStringTag);
   3140   j(zero, &not_external, Label::kNear);
   3141   mov(length, Immediate(ExternalString::kSize));
   3142   jmp(&is_data_object, Label::kNear);
   3143 
   3144   bind(&not_external);
   3145   // Sequential string, either ASCII or UC16.
   3146   ASSERT(kOneByteStringTag == 0x04);
   3147   and_(length, Immediate(kStringEncodingMask));
   3148   xor_(length, Immediate(kStringEncodingMask));
   3149   add(length, Immediate(0x04));
   3150   // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
   3151   // by 2. If we multiply the string length as smi by this, it still
   3152   // won't overflow a 32-bit value.
   3153   ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
   3154   ASSERT(SeqOneByteString::kMaxSize <=
   3155          static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
   3156   imul(length, FieldOperand(value, String::kLengthOffset));
   3157   shr(length, 2 + kSmiTagSize + kSmiShiftSize);
   3158   add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
   3159   and_(length, Immediate(~kObjectAlignmentMask));
   3160 
   3161   bind(&is_data_object);
   3162   // Value is a data object, and it is white.  Mark it black.  Since we know
   3163   // that the object is white we can make it black by flipping one bit.
   3164   or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
   3165 
   3166   and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
   3167   add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
   3168       length);
   3169   if (emit_debug_code()) {
   3170     mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
   3171     cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
   3172     Check(less_equal, kLiveBytesCountOverflowChunkSize);
   3173   }
   3174 
   3175   bind(&done);
   3176 }
   3177 
   3178 
   3179 void MacroAssembler::EnumLength(Register dst, Register map) {
   3180   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
   3181   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   3182   and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
   3183 }
   3184 
   3185 
   3186 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
   3187   Label next, start;
   3188   mov(ecx, eax);
   3189 
   3190   // Check if the enum length field is properly initialized, indicating that
   3191   // there is an enum cache.
   3192   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   3193 
   3194   EnumLength(edx, ebx);
   3195   cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
   3196   j(equal, call_runtime);
   3197 
   3198   jmp(&start);
   3199 
   3200   bind(&next);
   3201   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   3202 
   3203   // For all objects but the receiver, check that the cache is empty.
   3204   EnumLength(edx, ebx);
   3205   cmp(edx, Immediate(Smi::FromInt(0)));
   3206   j(not_equal, call_runtime);
   3207 
   3208   bind(&start);
   3209 
   3210   // Check that there are no elements. Register rcx contains the current JS
   3211   // object we've reached through the prototype chain.
   3212   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
   3213   cmp(ecx, isolate()->factory()->empty_fixed_array());
   3214   j(not_equal, call_runtime);
   3215 
   3216   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   3217   cmp(ecx, isolate()->factory()->null_value());
   3218   j(not_equal, &next);
   3219 }
   3220 
   3221 
   3222 void MacroAssembler::TestJSArrayForAllocationMemento(
   3223     Register receiver_reg,
   3224     Register scratch_reg) {
   3225   Label no_memento_available;
   3226 
   3227   ExternalReference new_space_start =
   3228       ExternalReference::new_space_start(isolate());
   3229   ExternalReference new_space_allocation_top =
   3230       ExternalReference::new_space_allocation_top_address(isolate());
   3231 
   3232   lea(scratch_reg, Operand(receiver_reg,
   3233       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
   3234   cmp(scratch_reg, Immediate(new_space_start));
   3235   j(less, &no_memento_available);
   3236   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   3237   j(greater, &no_memento_available);
   3238   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
   3239       Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map())));
   3240   bind(&no_memento_available);
   3241 }
   3242 
   3243 
   3244 } }  // namespace v8::internal
   3245 
   3246 #endif  // V8_TARGET_ARCH_IA32
   3247