Home | History | Annotate | Download | only in x87
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_X87
      8 
      9 #include "src/bootstrapper.h"
     10 #include "src/codegen.h"
     11 #include "src/cpu-profiler.h"
     12 #include "src/debug.h"
     13 #include "src/isolate-inl.h"
     14 #include "src/runtime.h"
     15 #include "src/serialize.h"
     16 
     17 namespace v8 {
     18 namespace internal {
     19 
     20 // -------------------------------------------------------------------------
     21 // MacroAssembler implementation.
     22 
     23 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     24     : Assembler(arg_isolate, buffer, size),
     25       generating_stub_(false),
     26       has_frame_(false) {
     27   if (isolate() != NULL) {
     28     // TODO(titzer): should we just use a null handle here instead?
     29     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
     30                                   isolate());
     31   }
     32 }
     33 
     34 
     35 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
     36   ASSERT(!r.IsDouble());
     37   if (r.IsInteger8()) {
     38     movsx_b(dst, src);
     39   } else if (r.IsUInteger8()) {
     40     movzx_b(dst, src);
     41   } else if (r.IsInteger16()) {
     42     movsx_w(dst, src);
     43   } else if (r.IsUInteger16()) {
     44     movzx_w(dst, src);
     45   } else {
     46     mov(dst, src);
     47   }
     48 }
     49 
     50 
     51 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
     52   ASSERT(!r.IsDouble());
     53   if (r.IsInteger8() || r.IsUInteger8()) {
     54     mov_b(dst, src);
     55   } else if (r.IsInteger16() || r.IsUInteger16()) {
     56     mov_w(dst, src);
     57   } else {
     58     if (r.IsHeapObject()) {
     59       AssertNotSmi(src);
     60     } else if (r.IsSmi()) {
     61       AssertSmi(src);
     62     }
     63     mov(dst, src);
     64   }
     65 }
     66 
     67 
     68 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     69   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
     70     Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
     71     mov(destination, value);
     72     return;
     73   }
     74   ExternalReference roots_array_start =
     75       ExternalReference::roots_array_start(isolate());
     76   mov(destination, Immediate(index));
     77   mov(destination, Operand::StaticArray(destination,
     78                                         times_pointer_size,
     79                                         roots_array_start));
     80 }
     81 
     82 
     83 void MacroAssembler::StoreRoot(Register source,
     84                                Register scratch,
     85                                Heap::RootListIndex index) {
     86   ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
     87   ExternalReference roots_array_start =
     88       ExternalReference::roots_array_start(isolate());
     89   mov(scratch, Immediate(index));
     90   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
     91       source);
     92 }
     93 
     94 
     95 void MacroAssembler::CompareRoot(Register with,
     96                                  Register scratch,
     97                                  Heap::RootListIndex index) {
     98   ExternalReference roots_array_start =
     99       ExternalReference::roots_array_start(isolate());
    100   mov(scratch, Immediate(index));
    101   cmp(with, Operand::StaticArray(scratch,
    102                                 times_pointer_size,
    103                                 roots_array_start));
    104 }
    105 
    106 
    107 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
    108   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    109   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
    110   cmp(with, value);
    111 }
    112 
    113 
    114 void MacroAssembler::CompareRoot(const Operand& with,
    115                                  Heap::RootListIndex index) {
    116   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    117   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
    118   cmp(with, value);
    119 }
    120 
    121 
    122 void MacroAssembler::InNewSpace(
    123     Register object,
    124     Register scratch,
    125     Condition cc,
    126     Label* condition_met,
    127     Label::Distance condition_met_distance) {
    128   ASSERT(cc == equal || cc == not_equal);
    129   if (scratch.is(object)) {
    130     and_(scratch, Immediate(~Page::kPageAlignmentMask));
    131   } else {
    132     mov(scratch, Immediate(~Page::kPageAlignmentMask));
    133     and_(scratch, object);
    134   }
    135   // Check that we can use a test_b.
    136   ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
    137   ASSERT(MemoryChunk::IN_TO_SPACE < 8);
    138   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
    139            | (1 << MemoryChunk::IN_TO_SPACE);
    140   // If non-zero, the page belongs to new-space.
    141   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
    142          static_cast<uint8_t>(mask));
    143   j(cc, condition_met, condition_met_distance);
    144 }
    145 
    146 
    147 void MacroAssembler::RememberedSetHelper(
    148     Register object,  // Only used for debug checks.
    149     Register addr,
    150     Register scratch,
    151     MacroAssembler::RememberedSetFinalAction and_then) {
    152   Label done;
    153   if (emit_debug_code()) {
    154     Label ok;
    155     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
    156     int3();
    157     bind(&ok);
    158   }
    159   // Load store buffer top.
    160   ExternalReference store_buffer =
    161       ExternalReference::store_buffer_top(isolate());
    162   mov(scratch, Operand::StaticVariable(store_buffer));
    163   // Store pointer to buffer.
    164   mov(Operand(scratch, 0), addr);
    165   // Increment buffer top.
    166   add(scratch, Immediate(kPointerSize));
    167   // Write back new top of buffer.
    168   mov(Operand::StaticVariable(store_buffer), scratch);
    169   // Call stub on end of buffer.
    170   // Check for end of buffer.
    171   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
    172   if (and_then == kReturnAtEnd) {
    173     Label buffer_overflowed;
    174     j(not_equal, &buffer_overflowed, Label::kNear);
    175     ret(0);
    176     bind(&buffer_overflowed);
    177   } else {
    178     ASSERT(and_then == kFallThroughAtEnd);
    179     j(equal, &done, Label::kNear);
    180   }
    181   StoreBufferOverflowStub store_buffer_overflow =
    182       StoreBufferOverflowStub(isolate());
    183   CallStub(&store_buffer_overflow);
    184   if (and_then == kReturnAtEnd) {
    185     ret(0);
    186   } else {
    187     ASSERT(and_then == kFallThroughAtEnd);
    188     bind(&done);
    189   }
    190 }
    191 
    192 
    193 void MacroAssembler::ClampUint8(Register reg) {
    194   Label done;
    195   test(reg, Immediate(0xFFFFFF00));
    196   j(zero, &done, Label::kNear);
    197   setcc(negative, reg);  // 1 if negative, 0 if positive.
    198   dec_b(reg);  // 0 if negative, 255 if positive.
    199   bind(&done);
    200 }
    201 
    202 
    203 void MacroAssembler::SlowTruncateToI(Register result_reg,
    204                                      Register input_reg,
    205                                      int offset) {
    206   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
    207   call(stub.GetCode(), RelocInfo::CODE_TARGET);
    208 }
    209 
    210 
    211 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
    212   sub(esp, Immediate(kDoubleSize));
    213   fst_d(MemOperand(esp, 0));
    214   SlowTruncateToI(result_reg, esp, 0);
    215   add(esp, Immediate(kDoubleSize));
    216 }
    217 
    218 
    219 void MacroAssembler::X87TOSToI(Register result_reg,
    220                                MinusZeroMode minus_zero_mode,
    221                                Label* conversion_failed,
    222                                Label::Distance dst) {
    223   Label done;
    224   sub(esp, Immediate(kPointerSize));
    225   fld(0);
    226   fist_s(MemOperand(esp, 0));
    227   fild_s(MemOperand(esp, 0));
    228   pop(result_reg);
    229   FCmp();
    230   j(not_equal, conversion_failed, dst);
    231   j(parity_even, conversion_failed, dst);
    232   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    233     test(result_reg, Operand(result_reg));
    234     j(not_zero, &done, Label::kNear);
    235     // To check for minus zero, we load the value again as float, and check
    236     // if that is still 0.
    237     sub(esp, Immediate(kPointerSize));
    238     fst_s(MemOperand(esp, 0));
    239     pop(result_reg);
    240     test(result_reg, Operand(result_reg));
    241     j(not_zero, conversion_failed, dst);
    242   }
    243   bind(&done);
    244 }
    245 
    246 
    247 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
    248                                            Register input_reg) {
    249   Label done, slow_case;
    250 
    251   SlowTruncateToI(result_reg, input_reg);
    252   bind(&done);
    253 }
    254 
    255 
    256 void MacroAssembler::TaggedToI(Register result_reg,
    257                                Register input_reg,
    258                                MinusZeroMode minus_zero_mode,
    259                                Label* lost_precision) {
    260   Label done;
    261 
    262   cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
    263       isolate()->factory()->heap_number_map());
    264   j(not_equal, lost_precision, Label::kNear);
    265 
    266   // TODO(olivf) Converting a number on the fpu is actually quite slow. We
    267   // should first try a fast conversion and then bailout to this slow case.
    268   Label lost_precision_pop, zero_check;
    269   Label* lost_precision_int = (minus_zero_mode == FAIL_ON_MINUS_ZERO)
    270       ? &lost_precision_pop : lost_precision;
    271   sub(esp, Immediate(kPointerSize));
    272   fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
    273   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) fld(0);
    274   fist_s(MemOperand(esp, 0));
    275   fild_s(MemOperand(esp, 0));
    276   FCmp();
    277   pop(result_reg);
    278   j(not_equal, lost_precision_int, Label::kNear);
    279   j(parity_even, lost_precision_int, Label::kNear);  // NaN.
    280   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    281     test(result_reg, Operand(result_reg));
    282     j(zero, &zero_check, Label::kNear);
    283     fstp(0);
    284     jmp(&done, Label::kNear);
    285     bind(&zero_check);
    286     // To check for minus zero, we load the value again as float, and check
    287     // if that is still 0.
    288     sub(esp, Immediate(kPointerSize));
    289     fstp_s(Operand(esp, 0));
    290     pop(result_reg);
    291     test(result_reg, Operand(result_reg));
    292     j(zero, &done, Label::kNear);
    293     jmp(lost_precision, Label::kNear);
    294 
    295     bind(&lost_precision_pop);
    296     fstp(0);
    297     jmp(lost_precision, Label::kNear);
    298   }
    299   bind(&done);
    300 }
    301 
    302 
    303 void MacroAssembler::LoadUint32NoSSE2(Register src) {
    304   Label done;
    305   push(src);
    306   fild_s(Operand(esp, 0));
    307   cmp(src, Immediate(0));
    308   j(not_sign, &done, Label::kNear);
    309   ExternalReference uint32_bias =
    310         ExternalReference::address_of_uint32_bias();
    311   fld_d(Operand::StaticVariable(uint32_bias));
    312   faddp(1);
    313   bind(&done);
    314   add(esp, Immediate(kPointerSize));
    315 }
    316 
    317 
    318 void MacroAssembler::RecordWriteArray(
    319     Register object,
    320     Register value,
    321     Register index,
    322     RememberedSetAction remembered_set_action,
    323     SmiCheck smi_check,
    324     PointersToHereCheck pointers_to_here_check_for_value) {
    325   // First, check if a write barrier is even needed. The tests below
    326   // catch stores of Smis.
    327   Label done;
    328 
    329   // Skip barrier if writing a smi.
    330   if (smi_check == INLINE_SMI_CHECK) {
    331     ASSERT_EQ(0, kSmiTag);
    332     test(value, Immediate(kSmiTagMask));
    333     j(zero, &done);
    334   }
    335 
    336   // Array access: calculate the destination address in the same manner as
    337   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    338   // into an array of words.
    339   Register dst = index;
    340   lea(dst, Operand(object, index, times_half_pointer_size,
    341                    FixedArray::kHeaderSize - kHeapObjectTag));
    342 
    343   RecordWrite(object, dst, value, remembered_set_action, OMIT_SMI_CHECK,
    344               pointers_to_here_check_for_value);
    345 
    346   bind(&done);
    347 
    348   // Clobber clobbered input registers when running with the debug-code flag
    349   // turned on to provoke errors.
    350   if (emit_debug_code()) {
    351     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    352     mov(index, Immediate(BitCast<int32_t>(kZapValue)));
    353   }
    354 }
    355 
    356 
    357 void MacroAssembler::RecordWriteField(
    358     Register object,
    359     int offset,
    360     Register value,
    361     Register dst,
    362     RememberedSetAction remembered_set_action,
    363     SmiCheck smi_check,
    364     PointersToHereCheck pointers_to_here_check_for_value) {
    365   // First, check if a write barrier is even needed. The tests below
    366   // catch stores of Smis.
    367   Label done;
    368 
    369   // Skip barrier if writing a smi.
    370   if (smi_check == INLINE_SMI_CHECK) {
    371     JumpIfSmi(value, &done, Label::kNear);
    372   }
    373 
    374   // Although the object register is tagged, the offset is relative to the start
    375   // of the object, so so offset must be a multiple of kPointerSize.
    376   ASSERT(IsAligned(offset, kPointerSize));
    377 
    378   lea(dst, FieldOperand(object, offset));
    379   if (emit_debug_code()) {
    380     Label ok;
    381     test_b(dst, (1 << kPointerSizeLog2) - 1);
    382     j(zero, &ok, Label::kNear);
    383     int3();
    384     bind(&ok);
    385   }
    386 
    387   RecordWrite(object, dst, value, remembered_set_action, OMIT_SMI_CHECK,
    388               pointers_to_here_check_for_value);
    389 
    390   bind(&done);
    391 
    392   // Clobber clobbered input registers when running with the debug-code flag
    393   // turned on to provoke errors.
    394   if (emit_debug_code()) {
    395     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    396     mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
    397   }
    398 }
    399 
    400 
    401 void MacroAssembler::RecordWriteForMap(
    402     Register object,
    403     Handle<Map> map,
    404     Register scratch1,
    405     Register scratch2) {
    406   Label done;
    407 
    408   Register address = scratch1;
    409   Register value = scratch2;
    410   if (emit_debug_code()) {
    411     Label ok;
    412     lea(address, FieldOperand(object, HeapObject::kMapOffset));
    413     test_b(address, (1 << kPointerSizeLog2) - 1);
    414     j(zero, &ok, Label::kNear);
    415     int3();
    416     bind(&ok);
    417   }
    418 
    419   ASSERT(!object.is(value));
    420   ASSERT(!object.is(address));
    421   ASSERT(!value.is(address));
    422   AssertNotSmi(object);
    423 
    424   if (!FLAG_incremental_marking) {
    425     return;
    426   }
    427 
    428   // Compute the address.
    429   lea(address, FieldOperand(object, HeapObject::kMapOffset));
    430 
    431   // Count number of write barriers in generated code.
    432   isolate()->counters()->write_barriers_static()->Increment();
    433   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    434 
    435   // A single check of the map's pages interesting flag suffices, since it is
    436   // only set during incremental collection, and then it's also guaranteed that
    437   // the from object's page's interesting flag is also set.  This optimization
    438   // relies on the fact that maps can never be in new space.
    439   ASSERT(!isolate()->heap()->InNewSpace(*map));
    440   CheckPageFlagForMap(map,
    441                       MemoryChunk::kPointersToHereAreInterestingMask,
    442                       zero,
    443                       &done,
    444                       Label::kNear);
    445 
    446   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET);
    447   CallStub(&stub);
    448 
    449   bind(&done);
    450 
    451   // Clobber clobbered input registers when running with the debug-code flag
    452   // turned on to provoke errors.
    453   if (emit_debug_code()) {
    454     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    455     mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
    456     mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
    457   }
    458 }
    459 
    460 
    461 void MacroAssembler::RecordWrite(
    462     Register object,
    463     Register address,
    464     Register value,
    465     RememberedSetAction remembered_set_action,
    466     SmiCheck smi_check,
    467     PointersToHereCheck pointers_to_here_check_for_value) {
    468   ASSERT(!object.is(value));
    469   ASSERT(!object.is(address));
    470   ASSERT(!value.is(address));
    471   AssertNotSmi(object);
    472 
    473   if (remembered_set_action == OMIT_REMEMBERED_SET &&
    474       !FLAG_incremental_marking) {
    475     return;
    476   }
    477 
    478   if (emit_debug_code()) {
    479     Label ok;
    480     cmp(value, Operand(address, 0));
    481     j(equal, &ok, Label::kNear);
    482     int3();
    483     bind(&ok);
    484   }
    485 
    486   // Count number of write barriers in generated code.
    487   isolate()->counters()->write_barriers_static()->Increment();
    488   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    489 
    490   // First, check if a write barrier is even needed. The tests below
    491   // catch stores of Smis and stores into young gen.
    492   Label done;
    493 
    494   if (smi_check == INLINE_SMI_CHECK) {
    495     // Skip barrier if writing a smi.
    496     JumpIfSmi(value, &done, Label::kNear);
    497   }
    498 
    499   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
    500     CheckPageFlag(value,
    501                   value,  // Used as scratch.
    502                   MemoryChunk::kPointersToHereAreInterestingMask,
    503                   zero,
    504                   &done,
    505                   Label::kNear);
    506   }
    507   CheckPageFlag(object,
    508                 value,  // Used as scratch.
    509                 MemoryChunk::kPointersFromHereAreInterestingMask,
    510                 zero,
    511                 &done,
    512                 Label::kNear);
    513 
    514   RecordWriteStub stub(isolate(), object, value, address,
    515                        remembered_set_action);
    516   CallStub(&stub);
    517 
    518   bind(&done);
    519 
    520   // Clobber clobbered registers when running with the debug-code flag
    521   // turned on to provoke errors.
    522   if (emit_debug_code()) {
    523     mov(address, Immediate(BitCast<int32_t>(kZapValue)));
    524     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    525   }
    526 }
    527 
    528 
    529 void MacroAssembler::DebugBreak() {
    530   Move(eax, Immediate(0));
    531   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
    532   CEntryStub ces(isolate(), 1);
    533   call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
    534 }
    535 
    536 
    537 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    538   static const int kMaxImmediateBits = 17;
    539   if (!RelocInfo::IsNone(x.rmode_)) return false;
    540   return !is_intn(x.x_, kMaxImmediateBits);
    541 }
    542 
    543 
    544 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
    545   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    546     Move(dst, Immediate(x.x_ ^ jit_cookie()));
    547     xor_(dst, jit_cookie());
    548   } else {
    549     Move(dst, x);
    550   }
    551 }
    552 
    553 
    554 void MacroAssembler::SafePush(const Immediate& x) {
    555   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    556     push(Immediate(x.x_ ^ jit_cookie()));
    557     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    558   } else {
    559     push(x);
    560   }
    561 }
    562 
    563 
    564 void MacroAssembler::CmpObjectType(Register heap_object,
    565                                    InstanceType type,
    566                                    Register map) {
    567   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    568   CmpInstanceType(map, type);
    569 }
    570 
    571 
    572 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    573   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    574        static_cast<int8_t>(type));
    575 }
    576 
    577 
    578 void MacroAssembler::CheckFastElements(Register map,
    579                                        Label* fail,
    580                                        Label::Distance distance) {
    581   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    582   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    583   STATIC_ASSERT(FAST_ELEMENTS == 2);
    584   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    585   cmpb(FieldOperand(map, Map::kBitField2Offset),
    586        Map::kMaximumBitField2FastHoleyElementValue);
    587   j(above, fail, distance);
    588 }
    589 
    590 
    591 void MacroAssembler::CheckFastObjectElements(Register map,
    592                                              Label* fail,
    593                                              Label::Distance distance) {
    594   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    595   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    596   STATIC_ASSERT(FAST_ELEMENTS == 2);
    597   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    598   cmpb(FieldOperand(map, Map::kBitField2Offset),
    599        Map::kMaximumBitField2FastHoleySmiElementValue);
    600   j(below_equal, fail, distance);
    601   cmpb(FieldOperand(map, Map::kBitField2Offset),
    602        Map::kMaximumBitField2FastHoleyElementValue);
    603   j(above, fail, distance);
    604 }
    605 
    606 
    607 void MacroAssembler::CheckFastSmiElements(Register map,
    608                                           Label* fail,
    609                                           Label::Distance distance) {
    610   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    611   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    612   cmpb(FieldOperand(map, Map::kBitField2Offset),
    613        Map::kMaximumBitField2FastHoleySmiElementValue);
    614   j(above, fail, distance);
    615 }
    616 
    617 
    618 void MacroAssembler::StoreNumberToDoubleElements(
    619     Register maybe_number,
    620     Register elements,
    621     Register key,
    622     Register scratch,
    623     Label* fail,
    624     int elements_offset) {
    625   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
    626   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
    627 
    628   CheckMap(maybe_number,
    629            isolate()->factory()->heap_number_map(),
    630            fail,
    631            DONT_DO_SMI_CHECK);
    632 
    633   // Double value, canonicalize NaN.
    634   uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
    635   cmp(FieldOperand(maybe_number, offset),
    636       Immediate(kNaNOrInfinityLowerBoundUpper32));
    637   j(greater_equal, &maybe_nan, Label::kNear);
    638 
    639   bind(&not_nan);
    640   ExternalReference canonical_nan_reference =
    641       ExternalReference::address_of_canonical_non_hole_nan();
    642   fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
    643   bind(&have_double_value);
    644   fstp_d(FieldOperand(elements, key, times_4,
    645                       FixedDoubleArray::kHeaderSize - elements_offset));
    646   jmp(&done);
    647 
    648   bind(&maybe_nan);
    649   // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
    650   // it's an Infinity, and the non-NaN code path applies.
    651   j(greater, &is_nan, Label::kNear);
    652   cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
    653   j(zero, &not_nan);
    654   bind(&is_nan);
    655   fld_d(Operand::StaticVariable(canonical_nan_reference));
    656   jmp(&have_double_value, Label::kNear);
    657 
    658   bind(&smi_value);
    659   // Value is a smi. Convert to a double and store.
    660   // Preserve original value.
    661   mov(scratch, maybe_number);
    662   SmiUntag(scratch);
    663   push(scratch);
    664   fild_s(Operand(esp, 0));
    665   pop(scratch);
    666   fstp_d(FieldOperand(elements, key, times_4,
    667                       FixedDoubleArray::kHeaderSize - elements_offset));
    668   bind(&done);
    669 }
    670 
    671 
    672 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
    673   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    674 }
    675 
    676 
    677 void MacroAssembler::CheckMap(Register obj,
    678                               Handle<Map> map,
    679                               Label* fail,
    680                               SmiCheckType smi_check_type) {
    681   if (smi_check_type == DO_SMI_CHECK) {
    682     JumpIfSmi(obj, fail);
    683   }
    684 
    685   CompareMap(obj, map);
    686   j(not_equal, fail);
    687 }
    688 
    689 
    690 void MacroAssembler::DispatchMap(Register obj,
    691                                  Register unused,
    692                                  Handle<Map> map,
    693                                  Handle<Code> success,
    694                                  SmiCheckType smi_check_type) {
    695   Label fail;
    696   if (smi_check_type == DO_SMI_CHECK) {
    697     JumpIfSmi(obj, &fail);
    698   }
    699   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
    700   j(equal, success);
    701 
    702   bind(&fail);
    703 }
    704 
    705 
    706 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    707                                              Register map,
    708                                              Register instance_type) {
    709   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    710   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    711   STATIC_ASSERT(kNotStringTag != 0);
    712   test(instance_type, Immediate(kIsNotStringMask));
    713   return zero;
    714 }
    715 
    716 
    717 Condition MacroAssembler::IsObjectNameType(Register heap_object,
    718                                            Register map,
    719                                            Register instance_type) {
    720   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    721   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    722   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
    723   return below_equal;
    724 }
    725 
    726 
    727 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
    728                                           Register map,
    729                                           Register scratch,
    730                                           Label* fail) {
    731   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    732   IsInstanceJSObjectType(map, scratch, fail);
    733 }
    734 
    735 
    736 void MacroAssembler::IsInstanceJSObjectType(Register map,
    737                                             Register scratch,
    738                                             Label* fail) {
    739   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
    740   sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
    741   cmp(scratch,
    742       LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
    743   j(above, fail);
    744 }
    745 
    746 
    747 void MacroAssembler::FCmp() {
    748   fucompp();
    749   push(eax);
    750   fnstsw_ax();
    751   sahf();
    752   pop(eax);
    753 }
    754 
    755 
    756 void MacroAssembler::AssertNumber(Register object) {
    757   if (emit_debug_code()) {
    758     Label ok;
    759     JumpIfSmi(object, &ok);
    760     cmp(FieldOperand(object, HeapObject::kMapOffset),
    761         isolate()->factory()->heap_number_map());
    762     Check(equal, kOperandNotANumber);
    763     bind(&ok);
    764   }
    765 }
    766 
    767 
    768 void MacroAssembler::AssertSmi(Register object) {
    769   if (emit_debug_code()) {
    770     test(object, Immediate(kSmiTagMask));
    771     Check(equal, kOperandIsNotASmi);
    772   }
    773 }
    774 
    775 
    776 void MacroAssembler::AssertString(Register object) {
    777   if (emit_debug_code()) {
    778     test(object, Immediate(kSmiTagMask));
    779     Check(not_equal, kOperandIsASmiAndNotAString);
    780     push(object);
    781     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    782     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    783     pop(object);
    784     Check(below, kOperandIsNotAString);
    785   }
    786 }
    787 
    788 
    789 void MacroAssembler::AssertName(Register object) {
    790   if (emit_debug_code()) {
    791     test(object, Immediate(kSmiTagMask));
    792     Check(not_equal, kOperandIsASmiAndNotAName);
    793     push(object);
    794     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    795     CmpInstanceType(object, LAST_NAME_TYPE);
    796     pop(object);
    797     Check(below_equal, kOperandIsNotAName);
    798   }
    799 }
    800 
    801 
    802 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
    803   if (emit_debug_code()) {
    804     Label done_checking;
    805     AssertNotSmi(object);
    806     cmp(object, isolate()->factory()->undefined_value());
    807     j(equal, &done_checking);
    808     cmp(FieldOperand(object, 0),
    809         Immediate(isolate()->factory()->allocation_site_map()));
    810     Assert(equal, kExpectedUndefinedOrCell);
    811     bind(&done_checking);
    812   }
    813 }
    814 
    815 
    816 void MacroAssembler::AssertNotSmi(Register object) {
    817   if (emit_debug_code()) {
    818     test(object, Immediate(kSmiTagMask));
    819     Check(not_equal, kOperandIsASmi);
    820   }
    821 }
    822 
    823 
    824 void MacroAssembler::StubPrologue() {
    825   push(ebp);  // Caller's frame pointer.
    826   mov(ebp, esp);
    827   push(esi);  // Callee's context.
    828   push(Immediate(Smi::FromInt(StackFrame::STUB)));
    829 }
    830 
    831 
    832 void MacroAssembler::Prologue(bool code_pre_aging) {
    833   PredictableCodeSizeScope predictible_code_size_scope(this,
    834       kNoCodeAgeSequenceLength);
    835   if (code_pre_aging) {
    836       // Pre-age the code.
    837     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
    838         RelocInfo::CODE_AGE_SEQUENCE);
    839     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
    840   } else {
    841     push(ebp);  // Caller's frame pointer.
    842     mov(ebp, esp);
    843     push(esi);  // Callee's context.
    844     push(edi);  // Callee's JS function.
    845   }
    846 }
    847 
    848 
    849 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    850   push(ebp);
    851   mov(ebp, esp);
    852   push(esi);
    853   push(Immediate(Smi::FromInt(type)));
    854   push(Immediate(CodeObject()));
    855   if (emit_debug_code()) {
    856     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
    857     Check(not_equal, kCodeObjectNotProperlyPatched);
    858   }
    859 }
    860 
    861 
    862 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    863   if (emit_debug_code()) {
    864     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
    865         Immediate(Smi::FromInt(type)));
    866     Check(equal, kStackFrameTypesMustMatch);
    867   }
    868   leave();
    869 }
    870 
    871 
    872 void MacroAssembler::EnterExitFramePrologue() {
    873   // Set up the frame structure on the stack.
    874   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
    875   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
    876   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
    877   push(ebp);
    878   mov(ebp, esp);
    879 
    880   // Reserve room for entry stack pointer and push the code object.
    881   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
    882   push(Immediate(0));  // Saved entry sp, patched before call.
    883   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
    884 
    885   // Save the frame pointer and the context in top.
    886   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
    887   ExternalReference context_address(Isolate::kContextAddress, isolate());
    888   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
    889   mov(Operand::StaticVariable(context_address), esi);
    890 }
    891 
    892 
    893 void MacroAssembler::EnterExitFrameEpilogue(int argc) {
    894   sub(esp, Immediate(argc * kPointerSize));
    895 
    896   // Get the required frame alignment for the OS.
    897   const int kFrameAlignment = OS::ActivationFrameAlignment();
    898   if (kFrameAlignment > 0) {
    899     ASSERT(IsPowerOf2(kFrameAlignment));
    900     and_(esp, -kFrameAlignment);
    901   }
    902 
    903   // Patch the saved entry sp.
    904   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
    905 }
    906 
    907 
    908 void MacroAssembler::EnterExitFrame() {
    909   EnterExitFramePrologue();
    910 
    911   // Set up argc and argv in callee-saved registers.
    912   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    913   mov(edi, eax);
    914   lea(esi, Operand(ebp, eax, times_4, offset));
    915 
    916   // Reserve space for argc, argv and isolate.
    917   EnterExitFrameEpilogue(3);
    918 }
    919 
    920 
    921 void MacroAssembler::EnterApiExitFrame(int argc) {
    922   EnterExitFramePrologue();
    923   EnterExitFrameEpilogue(argc);
    924 }
    925 
    926 
    927 void MacroAssembler::LeaveExitFrame() {
    928   // Get the return address from the stack and restore the frame pointer.
    929   mov(ecx, Operand(ebp, 1 * kPointerSize));
    930   mov(ebp, Operand(ebp, 0 * kPointerSize));
    931 
    932   // Pop the arguments and the receiver from the caller stack.
    933   lea(esp, Operand(esi, 1 * kPointerSize));
    934 
    935   // Push the return address to get ready to return.
    936   push(ecx);
    937 
    938   LeaveExitFrameEpilogue(true);
    939 }
    940 
    941 
    942 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
    943   // Restore current context from top and clear it in debug mode.
    944   ExternalReference context_address(Isolate::kContextAddress, isolate());
    945   if (restore_context) {
    946     mov(esi, Operand::StaticVariable(context_address));
    947   }
    948 #ifdef DEBUG
    949   mov(Operand::StaticVariable(context_address), Immediate(0));
    950 #endif
    951 
    952   // Clear the top frame.
    953   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    954                                        isolate());
    955   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
    956 }
    957 
    958 
    959 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
    960   mov(esp, ebp);
    961   pop(ebp);
    962 
    963   LeaveExitFrameEpilogue(restore_context);
    964 }
    965 
    966 
    967 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
    968                                     int handler_index) {
    969   // Adjust this code if not the case.
    970   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
    971   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    972   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
    973   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
    974   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
    975   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
    976 
    977   // We will build up the handler from the bottom by pushing on the stack.
    978   // First push the frame pointer and context.
    979   if (kind == StackHandler::JS_ENTRY) {
    980     // The frame pointer does not point to a JS frame so we save NULL for
    981     // ebp. We expect the code throwing an exception to check ebp before
    982     // dereferencing it to restore the context.
    983     push(Immediate(0));  // NULL frame pointer.
    984     push(Immediate(Smi::FromInt(0)));  // No context.
    985   } else {
    986     push(ebp);
    987     push(esi);
    988   }
    989   // Push the state and the code object.
    990   unsigned state =
    991       StackHandler::IndexField::encode(handler_index) |
    992       StackHandler::KindField::encode(kind);
    993   push(Immediate(state));
    994   Push(CodeObject());
    995 
    996   // Link the current handler as the next handler.
    997   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    998   push(Operand::StaticVariable(handler_address));
    999   // Set this new handler as the current one.
   1000   mov(Operand::StaticVariable(handler_address), esp);
   1001 }
   1002 
   1003 
   1004 void MacroAssembler::PopTryHandler() {
   1005   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1006   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1007   pop(Operand::StaticVariable(handler_address));
   1008   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
   1009 }
   1010 
   1011 
   1012 void MacroAssembler::JumpToHandlerEntry() {
   1013   // Compute the handler entry address and jump to it.  The handler table is
   1014   // a fixed array of (smi-tagged) code offsets.
   1015   // eax = exception, edi = code object, edx = state.
   1016   mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
   1017   shr(edx, StackHandler::kKindWidth);
   1018   mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
   1019   SmiUntag(edx);
   1020   lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
   1021   jmp(edi);
   1022 }
   1023 
   1024 
   1025 void MacroAssembler::Throw(Register value) {
   1026   // Adjust this code if not the case.
   1027   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
   1028   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1029   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
   1030   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
   1031   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
   1032   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
   1033 
   1034   // The exception is expected in eax.
   1035   if (!value.is(eax)) {
   1036     mov(eax, value);
   1037   }
   1038   // Drop the stack pointer to the top of the top handler.
   1039   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1040   mov(esp, Operand::StaticVariable(handler_address));
   1041   // Restore the next handler.
   1042   pop(Operand::StaticVariable(handler_address));
   1043 
   1044   // Remove the code object and state, compute the handler address in edi.
   1045   pop(edi);  // Code object.
   1046   pop(edx);  // Index and state.
   1047 
   1048   // Restore the context and frame pointer.
   1049   pop(esi);  // Context.
   1050   pop(ebp);  // Frame pointer.
   1051 
   1052   // If the handler is a JS frame, restore the context to the frame.
   1053   // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
   1054   // ebp or esi.
   1055   Label skip;
   1056   test(esi, esi);
   1057   j(zero, &skip, Label::kNear);
   1058   mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
   1059   bind(&skip);
   1060 
   1061   JumpToHandlerEntry();
   1062 }
   1063 
   1064 
   1065 void MacroAssembler::ThrowUncatchable(Register value) {
   1066   // Adjust this code if not the case.
   1067   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
   1068   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1069   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
   1070   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
   1071   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
   1072   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
   1073 
   1074   // The exception is expected in eax.
   1075   if (!value.is(eax)) {
   1076     mov(eax, value);
   1077   }
   1078   // Drop the stack pointer to the top of the top stack handler.
   1079   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1080   mov(esp, Operand::StaticVariable(handler_address));
   1081 
   1082   // Unwind the handlers until the top ENTRY handler is found.
   1083   Label fetch_next, check_kind;
   1084   jmp(&check_kind, Label::kNear);
   1085   bind(&fetch_next);
   1086   mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
   1087 
   1088   bind(&check_kind);
   1089   STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
   1090   test(Operand(esp, StackHandlerConstants::kStateOffset),
   1091        Immediate(StackHandler::KindField::kMask));
   1092   j(not_zero, &fetch_next);
   1093 
   1094   // Set the top handler address to next handler past the top ENTRY handler.
   1095   pop(Operand::StaticVariable(handler_address));
   1096 
   1097   // Remove the code object and state, compute the handler address in edi.
   1098   pop(edi);  // Code object.
   1099   pop(edx);  // Index and state.
   1100 
   1101   // Clear the context pointer and frame pointer (0 was saved in the handler).
   1102   pop(esi);
   1103   pop(ebp);
   1104 
   1105   JumpToHandlerEntry();
   1106 }
   1107 
   1108 
   1109 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
   1110                                             Register scratch1,
   1111                                             Register scratch2,
   1112                                             Label* miss) {
   1113   Label same_contexts;
   1114 
   1115   ASSERT(!holder_reg.is(scratch1));
   1116   ASSERT(!holder_reg.is(scratch2));
   1117   ASSERT(!scratch1.is(scratch2));
   1118 
   1119   // Load current lexical context from the stack frame.
   1120   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
   1121 
   1122   // When generating debug code, make sure the lexical context is set.
   1123   if (emit_debug_code()) {
   1124     cmp(scratch1, Immediate(0));
   1125     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
   1126   }
   1127   // Load the native context of the current context.
   1128   int offset =
   1129       Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
   1130   mov(scratch1, FieldOperand(scratch1, offset));
   1131   mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
   1132 
   1133   // Check the context is a native context.
   1134   if (emit_debug_code()) {
   1135     // Read the first word and compare to native_context_map.
   1136     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
   1137         isolate()->factory()->native_context_map());
   1138     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1139   }
   1140 
   1141   // Check if both contexts are the same.
   1142   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1143   j(equal, &same_contexts);
   1144 
   1145   // Compare security tokens, save holder_reg on the stack so we can use it
   1146   // as a temporary register.
   1147   //
   1148   // Check that the security token in the calling global object is
   1149   // compatible with the security token in the receiving global
   1150   // object.
   1151   mov(scratch2,
   1152       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1153 
   1154   // Check the context is a native context.
   1155   if (emit_debug_code()) {
   1156     cmp(scratch2, isolate()->factory()->null_value());
   1157     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
   1158 
   1159     // Read the first word and compare to native_context_map(),
   1160     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
   1161         isolate()->factory()->native_context_map());
   1162     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1163   }
   1164 
   1165   int token_offset = Context::kHeaderSize +
   1166                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
   1167   mov(scratch1, FieldOperand(scratch1, token_offset));
   1168   cmp(scratch1, FieldOperand(scratch2, token_offset));
   1169   j(not_equal, miss);
   1170 
   1171   bind(&same_contexts);
   1172 }
   1173 
   1174 
   1175 // Compute the hash code from the untagged key.  This must be kept in sync with
   1176 // ComputeIntegerHash in utils.h and KeyedLoadGenericElementStub in
   1177 // code-stub-hydrogen.cc
   1178 //
   1179 // Note: r0 will contain hash code
   1180 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
   1181   // Xor original key with a seed.
   1182   if (serializer_enabled()) {
   1183     ExternalReference roots_array_start =
   1184         ExternalReference::roots_array_start(isolate());
   1185     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
   1186     mov(scratch,
   1187         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
   1188     SmiUntag(scratch);
   1189     xor_(r0, scratch);
   1190   } else {
   1191     int32_t seed = isolate()->heap()->HashSeed();
   1192     xor_(r0, Immediate(seed));
   1193   }
   1194 
   1195   // hash = ~hash + (hash << 15);
   1196   mov(scratch, r0);
   1197   not_(r0);
   1198   shl(scratch, 15);
   1199   add(r0, scratch);
   1200   // hash = hash ^ (hash >> 12);
   1201   mov(scratch, r0);
   1202   shr(scratch, 12);
   1203   xor_(r0, scratch);
   1204   // hash = hash + (hash << 2);
   1205   lea(r0, Operand(r0, r0, times_4, 0));
   1206   // hash = hash ^ (hash >> 4);
   1207   mov(scratch, r0);
   1208   shr(scratch, 4);
   1209   xor_(r0, scratch);
   1210   // hash = hash * 2057;
   1211   imul(r0, r0, 2057);
   1212   // hash = hash ^ (hash >> 16);
   1213   mov(scratch, r0);
   1214   shr(scratch, 16);
   1215   xor_(r0, scratch);
   1216 }
   1217 
   1218 
   1219 
   1220 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
   1221                                               Register elements,
   1222                                               Register key,
   1223                                               Register r0,
   1224                                               Register r1,
   1225                                               Register r2,
   1226                                               Register result) {
   1227   // Register use:
   1228   //
   1229   // elements - holds the slow-case elements of the receiver and is unchanged.
   1230   //
   1231   // key      - holds the smi key on entry and is unchanged.
   1232   //
   1233   // Scratch registers:
   1234   //
   1235   // r0 - holds the untagged key on entry and holds the hash once computed.
   1236   //
   1237   // r1 - used to hold the capacity mask of the dictionary
   1238   //
   1239   // r2 - used for the index into the dictionary.
   1240   //
   1241   // result - holds the result on exit if the load succeeds and we fall through.
   1242 
   1243   Label done;
   1244 
   1245   GetNumberHash(r0, r1);
   1246 
   1247   // Compute capacity mask.
   1248   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
   1249   shr(r1, kSmiTagSize);  // convert smi to int
   1250   dec(r1);
   1251 
   1252   // Generate an unrolled loop that performs a few probes before giving up.
   1253   for (int i = 0; i < kNumberDictionaryProbes; i++) {
   1254     // Use r2 for index calculations and keep the hash intact in r0.
   1255     mov(r2, r0);
   1256     // Compute the masked index: (hash + i + i * i) & mask.
   1257     if (i > 0) {
   1258       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
   1259     }
   1260     and_(r2, r1);
   1261 
   1262     // Scale the index by multiplying by the entry size.
   1263     ASSERT(SeededNumberDictionary::kEntrySize == 3);
   1264     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
   1265 
   1266     // Check if the key matches.
   1267     cmp(key, FieldOperand(elements,
   1268                           r2,
   1269                           times_pointer_size,
   1270                           SeededNumberDictionary::kElementsStartOffset));
   1271     if (i != (kNumberDictionaryProbes - 1)) {
   1272       j(equal, &done);
   1273     } else {
   1274       j(not_equal, miss);
   1275     }
   1276   }
   1277 
   1278   bind(&done);
   1279   // Check that the value is a normal propety.
   1280   const int kDetailsOffset =
   1281       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
   1282   ASSERT_EQ(NORMAL, 0);
   1283   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
   1284        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
   1285   j(not_zero, miss);
   1286 
   1287   // Get the value at the masked, scaled index.
   1288   const int kValueOffset =
   1289       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
   1290   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
   1291 }
   1292 
   1293 
   1294 void MacroAssembler::LoadAllocationTopHelper(Register result,
   1295                                              Register scratch,
   1296                                              AllocationFlags flags) {
   1297   ExternalReference allocation_top =
   1298       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1299 
   1300   // Just return if allocation top is already known.
   1301   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   1302     // No use of scratch if allocation top is provided.
   1303     ASSERT(scratch.is(no_reg));
   1304 #ifdef DEBUG
   1305     // Assert that result actually contains top on entry.
   1306     cmp(result, Operand::StaticVariable(allocation_top));
   1307     Check(equal, kUnexpectedAllocationTop);
   1308 #endif
   1309     return;
   1310   }
   1311 
   1312   // Move address of new object to result. Use scratch register if available.
   1313   if (scratch.is(no_reg)) {
   1314     mov(result, Operand::StaticVariable(allocation_top));
   1315   } else {
   1316     mov(scratch, Immediate(allocation_top));
   1317     mov(result, Operand(scratch, 0));
   1318   }
   1319 }
   1320 
   1321 
   1322 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   1323                                                Register scratch,
   1324                                                AllocationFlags flags) {
   1325   if (emit_debug_code()) {
   1326     test(result_end, Immediate(kObjectAlignmentMask));
   1327     Check(zero, kUnalignedAllocationInNewSpace);
   1328   }
   1329 
   1330   ExternalReference allocation_top =
   1331       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1332 
   1333   // Update new top. Use scratch if available.
   1334   if (scratch.is(no_reg)) {
   1335     mov(Operand::StaticVariable(allocation_top), result_end);
   1336   } else {
   1337     mov(Operand(scratch, 0), result_end);
   1338   }
   1339 }
   1340 
   1341 
   1342 void MacroAssembler::Allocate(int object_size,
   1343                               Register result,
   1344                               Register result_end,
   1345                               Register scratch,
   1346                               Label* gc_required,
   1347                               AllocationFlags flags) {
   1348   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1349   ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
   1350   if (!FLAG_inline_new) {
   1351     if (emit_debug_code()) {
   1352       // Trash the registers to simulate an allocation failure.
   1353       mov(result, Immediate(0x7091));
   1354       if (result_end.is_valid()) {
   1355         mov(result_end, Immediate(0x7191));
   1356       }
   1357       if (scratch.is_valid()) {
   1358         mov(scratch, Immediate(0x7291));
   1359       }
   1360     }
   1361     jmp(gc_required);
   1362     return;
   1363   }
   1364   ASSERT(!result.is(result_end));
   1365 
   1366   // Load address of new object into result.
   1367   LoadAllocationTopHelper(result, scratch, flags);
   1368 
   1369   ExternalReference allocation_limit =
   1370       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1371 
   1372   // Align the next allocation. Storing the filler map without checking top is
   1373   // safe in new-space because the limit of the heap is aligned there.
   1374   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1375     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
   1376     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
   1377     Label aligned;
   1378     test(result, Immediate(kDoubleAlignmentMask));
   1379     j(zero, &aligned, Label::kNear);
   1380     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
   1381       cmp(result, Operand::StaticVariable(allocation_limit));
   1382       j(above_equal, gc_required);
   1383     }
   1384     mov(Operand(result, 0),
   1385         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1386     add(result, Immediate(kDoubleSize / 2));
   1387     bind(&aligned);
   1388   }
   1389 
   1390   // Calculate new top and bail out if space is exhausted.
   1391   Register top_reg = result_end.is_valid() ? result_end : result;
   1392   if (!top_reg.is(result)) {
   1393     mov(top_reg, result);
   1394   }
   1395   add(top_reg, Immediate(object_size));
   1396   j(carry, gc_required);
   1397   cmp(top_reg, Operand::StaticVariable(allocation_limit));
   1398   j(above, gc_required);
   1399 
   1400   // Update allocation top.
   1401   UpdateAllocationTopHelper(top_reg, scratch, flags);
   1402 
   1403   // Tag result if requested.
   1404   bool tag_result = (flags & TAG_OBJECT) != 0;
   1405   if (top_reg.is(result)) {
   1406     if (tag_result) {
   1407       sub(result, Immediate(object_size - kHeapObjectTag));
   1408     } else {
   1409       sub(result, Immediate(object_size));
   1410     }
   1411   } else if (tag_result) {
   1412     ASSERT(kHeapObjectTag == 1);
   1413     inc(result);
   1414   }
   1415 }
   1416 
   1417 
   1418 void MacroAssembler::Allocate(int header_size,
   1419                               ScaleFactor element_size,
   1420                               Register element_count,
   1421                               RegisterValueType element_count_type,
   1422                               Register result,
   1423                               Register result_end,
   1424                               Register scratch,
   1425                               Label* gc_required,
   1426                               AllocationFlags flags) {
   1427   ASSERT((flags & SIZE_IN_WORDS) == 0);
   1428   if (!FLAG_inline_new) {
   1429     if (emit_debug_code()) {
   1430       // Trash the registers to simulate an allocation failure.
   1431       mov(result, Immediate(0x7091));
   1432       mov(result_end, Immediate(0x7191));
   1433       if (scratch.is_valid()) {
   1434         mov(scratch, Immediate(0x7291));
   1435       }
   1436       // Register element_count is not modified by the function.
   1437     }
   1438     jmp(gc_required);
   1439     return;
   1440   }
   1441   ASSERT(!result.is(result_end));
   1442 
   1443   // Load address of new object into result.
   1444   LoadAllocationTopHelper(result, scratch, flags);
   1445 
   1446   ExternalReference allocation_limit =
   1447       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1448 
   1449   // Align the next allocation. Storing the filler map without checking top is
   1450   // safe in new-space because the limit of the heap is aligned there.
   1451   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1452     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
   1453     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
   1454     Label aligned;
   1455     test(result, Immediate(kDoubleAlignmentMask));
   1456     j(zero, &aligned, Label::kNear);
   1457     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
   1458       cmp(result, Operand::StaticVariable(allocation_limit));
   1459       j(above_equal, gc_required);
   1460     }
   1461     mov(Operand(result, 0),
   1462         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1463     add(result, Immediate(kDoubleSize / 2));
   1464     bind(&aligned);
   1465   }
   1466 
   1467   // Calculate new top and bail out if space is exhausted.
   1468   // We assume that element_count*element_size + header_size does not
   1469   // overflow.
   1470   if (element_count_type == REGISTER_VALUE_IS_SMI) {
   1471     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
   1472     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
   1473     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
   1474     ASSERT(element_size >= times_2);
   1475     ASSERT(kSmiTagSize == 1);
   1476     element_size = static_cast<ScaleFactor>(element_size - 1);
   1477   } else {
   1478     ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
   1479   }
   1480   lea(result_end, Operand(element_count, element_size, header_size));
   1481   add(result_end, result);
   1482   j(carry, gc_required);
   1483   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1484   j(above, gc_required);
   1485 
   1486   if ((flags & TAG_OBJECT) != 0) {
   1487     ASSERT(kHeapObjectTag == 1);
   1488     inc(result);
   1489   }
   1490 
   1491   // Update allocation top.
   1492   UpdateAllocationTopHelper(result_end, scratch, flags);
   1493 }
   1494 
   1495 
   1496 void MacroAssembler::Allocate(Register object_size,
   1497                               Register result,
   1498                               Register result_end,
   1499                               Register scratch,
   1500                               Label* gc_required,
   1501                               AllocationFlags flags) {
   1502   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1503   if (!FLAG_inline_new) {
   1504     if (emit_debug_code()) {
   1505       // Trash the registers to simulate an allocation failure.
   1506       mov(result, Immediate(0x7091));
   1507       mov(result_end, Immediate(0x7191));
   1508       if (scratch.is_valid()) {
   1509         mov(scratch, Immediate(0x7291));
   1510       }
   1511       // object_size is left unchanged by this function.
   1512     }
   1513     jmp(gc_required);
   1514     return;
   1515   }
   1516   ASSERT(!result.is(result_end));
   1517 
   1518   // Load address of new object into result.
   1519   LoadAllocationTopHelper(result, scratch, flags);
   1520 
   1521   ExternalReference allocation_limit =
   1522       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1523 
   1524   // Align the next allocation. Storing the filler map without checking top is
   1525   // safe in new-space because the limit of the heap is aligned there.
   1526   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1527     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
   1528     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
   1529     Label aligned;
   1530     test(result, Immediate(kDoubleAlignmentMask));
   1531     j(zero, &aligned, Label::kNear);
   1532     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
   1533       cmp(result, Operand::StaticVariable(allocation_limit));
   1534       j(above_equal, gc_required);
   1535     }
   1536     mov(Operand(result, 0),
   1537         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1538     add(result, Immediate(kDoubleSize / 2));
   1539     bind(&aligned);
   1540   }
   1541 
   1542   // Calculate new top and bail out if space is exhausted.
   1543   if (!object_size.is(result_end)) {
   1544     mov(result_end, object_size);
   1545   }
   1546   add(result_end, result);
   1547   j(carry, gc_required);
   1548   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1549   j(above, gc_required);
   1550 
   1551   // Tag result if requested.
   1552   if ((flags & TAG_OBJECT) != 0) {
   1553     ASSERT(kHeapObjectTag == 1);
   1554     inc(result);
   1555   }
   1556 
   1557   // Update allocation top.
   1558   UpdateAllocationTopHelper(result_end, scratch, flags);
   1559 }
   1560 
   1561 
   1562 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
   1563   ExternalReference new_space_allocation_top =
   1564       ExternalReference::new_space_allocation_top_address(isolate());
   1565 
   1566   // Make sure the object has no tag before resetting top.
   1567   and_(object, Immediate(~kHeapObjectTagMask));
   1568 #ifdef DEBUG
   1569   cmp(object, Operand::StaticVariable(new_space_allocation_top));
   1570   Check(below, kUndoAllocationOfNonAllocatedMemory);
   1571 #endif
   1572   mov(Operand::StaticVariable(new_space_allocation_top), object);
   1573 }
   1574 
   1575 
   1576 void MacroAssembler::AllocateHeapNumber(Register result,
   1577                                         Register scratch1,
   1578                                         Register scratch2,
   1579                                         Label* gc_required) {
   1580   // Allocate heap number in new space.
   1581   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
   1582            TAG_OBJECT);
   1583 
   1584   // Set the map.
   1585   mov(FieldOperand(result, HeapObject::kMapOffset),
   1586       Immediate(isolate()->factory()->heap_number_map()));
   1587 }
   1588 
   1589 
   1590 void MacroAssembler::AllocateTwoByteString(Register result,
   1591                                            Register length,
   1592                                            Register scratch1,
   1593                                            Register scratch2,
   1594                                            Register scratch3,
   1595                                            Label* gc_required) {
   1596   // Calculate the number of bytes needed for the characters in the string while
   1597   // observing object alignment.
   1598   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1599   ASSERT(kShortSize == 2);
   1600   // scratch1 = length * 2 + kObjectAlignmentMask.
   1601   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
   1602   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1603 
   1604   // Allocate two byte string in new space.
   1605   Allocate(SeqTwoByteString::kHeaderSize,
   1606            times_1,
   1607            scratch1,
   1608            REGISTER_VALUE_IS_INT32,
   1609            result,
   1610            scratch2,
   1611            scratch3,
   1612            gc_required,
   1613            TAG_OBJECT);
   1614 
   1615   // Set the map, length and hash field.
   1616   mov(FieldOperand(result, HeapObject::kMapOffset),
   1617       Immediate(isolate()->factory()->string_map()));
   1618   mov(scratch1, length);
   1619   SmiTag(scratch1);
   1620   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1621   mov(FieldOperand(result, String::kHashFieldOffset),
   1622       Immediate(String::kEmptyHashField));
   1623 }
   1624 
   1625 
   1626 void MacroAssembler::AllocateAsciiString(Register result,
   1627                                          Register length,
   1628                                          Register scratch1,
   1629                                          Register scratch2,
   1630                                          Register scratch3,
   1631                                          Label* gc_required) {
   1632   // Calculate the number of bytes needed for the characters in the string while
   1633   // observing object alignment.
   1634   ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1635   mov(scratch1, length);
   1636   ASSERT(kCharSize == 1);
   1637   add(scratch1, Immediate(kObjectAlignmentMask));
   1638   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1639 
   1640   // Allocate ASCII string in new space.
   1641   Allocate(SeqOneByteString::kHeaderSize,
   1642            times_1,
   1643            scratch1,
   1644            REGISTER_VALUE_IS_INT32,
   1645            result,
   1646            scratch2,
   1647            scratch3,
   1648            gc_required,
   1649            TAG_OBJECT);
   1650 
   1651   // Set the map, length and hash field.
   1652   mov(FieldOperand(result, HeapObject::kMapOffset),
   1653       Immediate(isolate()->factory()->ascii_string_map()));
   1654   mov(scratch1, length);
   1655   SmiTag(scratch1);
   1656   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1657   mov(FieldOperand(result, String::kHashFieldOffset),
   1658       Immediate(String::kEmptyHashField));
   1659 }
   1660 
   1661 
   1662 void MacroAssembler::AllocateAsciiString(Register result,
   1663                                          int length,
   1664                                          Register scratch1,
   1665                                          Register scratch2,
   1666                                          Label* gc_required) {
   1667   ASSERT(length > 0);
   1668 
   1669   // Allocate ASCII string in new space.
   1670   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
   1671            gc_required, TAG_OBJECT);
   1672 
   1673   // Set the map, length and hash field.
   1674   mov(FieldOperand(result, HeapObject::kMapOffset),
   1675       Immediate(isolate()->factory()->ascii_string_map()));
   1676   mov(FieldOperand(result, String::kLengthOffset),
   1677       Immediate(Smi::FromInt(length)));
   1678   mov(FieldOperand(result, String::kHashFieldOffset),
   1679       Immediate(String::kEmptyHashField));
   1680 }
   1681 
   1682 
   1683 void MacroAssembler::AllocateTwoByteConsString(Register result,
   1684                                         Register scratch1,
   1685                                         Register scratch2,
   1686                                         Label* gc_required) {
   1687   // Allocate heap number in new space.
   1688   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
   1689            TAG_OBJECT);
   1690 
   1691   // Set the map. The other fields are left uninitialized.
   1692   mov(FieldOperand(result, HeapObject::kMapOffset),
   1693       Immediate(isolate()->factory()->cons_string_map()));
   1694 }
   1695 
   1696 
   1697 void MacroAssembler::AllocateAsciiConsString(Register result,
   1698                                              Register scratch1,
   1699                                              Register scratch2,
   1700                                              Label* gc_required) {
   1701   Allocate(ConsString::kSize,
   1702            result,
   1703            scratch1,
   1704            scratch2,
   1705            gc_required,
   1706            TAG_OBJECT);
   1707 
   1708   // Set the map. The other fields are left uninitialized.
   1709   mov(FieldOperand(result, HeapObject::kMapOffset),
   1710       Immediate(isolate()->factory()->cons_ascii_string_map()));
   1711 }
   1712 
   1713 
   1714 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
   1715                                           Register scratch1,
   1716                                           Register scratch2,
   1717                                           Label* gc_required) {
   1718   // Allocate heap number in new space.
   1719   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1720            TAG_OBJECT);
   1721 
   1722   // Set the map. The other fields are left uninitialized.
   1723   mov(FieldOperand(result, HeapObject::kMapOffset),
   1724       Immediate(isolate()->factory()->sliced_string_map()));
   1725 }
   1726 
   1727 
   1728 void MacroAssembler::AllocateAsciiSlicedString(Register result,
   1729                                                Register scratch1,
   1730                                                Register scratch2,
   1731                                                Label* gc_required) {
   1732   // Allocate heap number in new space.
   1733   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1734            TAG_OBJECT);
   1735 
   1736   // Set the map. The other fields are left uninitialized.
   1737   mov(FieldOperand(result, HeapObject::kMapOffset),
   1738       Immediate(isolate()->factory()->sliced_ascii_string_map()));
   1739 }
   1740 
   1741 
   1742 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   1743 // long or aligned copies.  The contents of scratch and length are destroyed.
   1744 // Source and destination are incremented by length.
   1745 // Many variants of movsb, loop unrolling, word moves, and indexed operands
   1746 // have been tried here already, and this is fastest.
   1747 // A simpler loop is faster on small copies, but 30% slower on large ones.
   1748 // The cld() instruction must have been emitted, to set the direction flag(),
   1749 // before calling this function.
   1750 void MacroAssembler::CopyBytes(Register source,
   1751                                Register destination,
   1752                                Register length,
   1753                                Register scratch) {
   1754   Label short_loop, len4, len8, len12, done, short_string;
   1755   ASSERT(source.is(esi));
   1756   ASSERT(destination.is(edi));
   1757   ASSERT(length.is(ecx));
   1758   cmp(length, Immediate(4));
   1759   j(below, &short_string, Label::kNear);
   1760 
   1761   // Because source is 4-byte aligned in our uses of this function,
   1762   // we keep source aligned for the rep_movs call by copying the odd bytes
   1763   // at the end of the ranges.
   1764   mov(scratch, Operand(source, length, times_1, -4));
   1765   mov(Operand(destination, length, times_1, -4), scratch);
   1766 
   1767   cmp(length, Immediate(8));
   1768   j(below_equal, &len4, Label::kNear);
   1769   cmp(length, Immediate(12));
   1770   j(below_equal, &len8, Label::kNear);
   1771   cmp(length, Immediate(16));
   1772   j(below_equal, &len12, Label::kNear);
   1773 
   1774   mov(scratch, ecx);
   1775   shr(ecx, 2);
   1776   rep_movs();
   1777   and_(scratch, Immediate(0x3));
   1778   add(destination, scratch);
   1779   jmp(&done, Label::kNear);
   1780 
   1781   bind(&len12);
   1782   mov(scratch, Operand(source, 8));
   1783   mov(Operand(destination, 8), scratch);
   1784   bind(&len8);
   1785   mov(scratch, Operand(source, 4));
   1786   mov(Operand(destination, 4), scratch);
   1787   bind(&len4);
   1788   mov(scratch, Operand(source, 0));
   1789   mov(Operand(destination, 0), scratch);
   1790   add(destination, length);
   1791   jmp(&done, Label::kNear);
   1792 
   1793   bind(&short_string);
   1794   test(length, length);
   1795   j(zero, &done, Label::kNear);
   1796 
   1797   bind(&short_loop);
   1798   mov_b(scratch, Operand(source, 0));
   1799   mov_b(Operand(destination, 0), scratch);
   1800   inc(source);
   1801   inc(destination);
   1802   dec(length);
   1803   j(not_zero, &short_loop);
   1804 
   1805   bind(&done);
   1806 }
   1807 
   1808 
   1809 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
   1810                                                 Register end_offset,
   1811                                                 Register filler) {
   1812   Label loop, entry;
   1813   jmp(&entry);
   1814   bind(&loop);
   1815   mov(Operand(start_offset, 0), filler);
   1816   add(start_offset, Immediate(kPointerSize));
   1817   bind(&entry);
   1818   cmp(start_offset, end_offset);
   1819   j(less, &loop);
   1820 }
   1821 
   1822 
   1823 void MacroAssembler::BooleanBitTest(Register object,
   1824                                     int field_offset,
   1825                                     int bit_index) {
   1826   bit_index += kSmiTagSize + kSmiShiftSize;
   1827   ASSERT(IsPowerOf2(kBitsPerByte));
   1828   int byte_index = bit_index / kBitsPerByte;
   1829   int byte_bit_index = bit_index & (kBitsPerByte - 1);
   1830   test_b(FieldOperand(object, field_offset + byte_index),
   1831          static_cast<byte>(1 << byte_bit_index));
   1832 }
   1833 
   1834 
   1835 
   1836 void MacroAssembler::NegativeZeroTest(Register result,
   1837                                       Register op,
   1838                                       Label* then_label) {
   1839   Label ok;
   1840   test(result, result);
   1841   j(not_zero, &ok);
   1842   test(op, op);
   1843   j(sign, then_label);
   1844   bind(&ok);
   1845 }
   1846 
   1847 
   1848 void MacroAssembler::NegativeZeroTest(Register result,
   1849                                       Register op1,
   1850                                       Register op2,
   1851                                       Register scratch,
   1852                                       Label* then_label) {
   1853   Label ok;
   1854   test(result, result);
   1855   j(not_zero, &ok);
   1856   mov(scratch, op1);
   1857   or_(scratch, op2);
   1858   j(sign, then_label);
   1859   bind(&ok);
   1860 }
   1861 
   1862 
   1863 void MacroAssembler::TryGetFunctionPrototype(Register function,
   1864                                              Register result,
   1865                                              Register scratch,
   1866                                              Label* miss,
   1867                                              bool miss_on_bound_function) {
   1868   // Check that the receiver isn't a smi.
   1869   JumpIfSmi(function, miss);
   1870 
   1871   // Check that the function really is a function.
   1872   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   1873   j(not_equal, miss);
   1874 
   1875   if (miss_on_bound_function) {
   1876     // If a bound function, go to miss label.
   1877     mov(scratch,
   1878         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
   1879     BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
   1880                    SharedFunctionInfo::kBoundFunction);
   1881     j(not_zero, miss);
   1882   }
   1883 
   1884   // Make sure that the function has an instance prototype.
   1885   Label non_instance;
   1886   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
   1887   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
   1888   j(not_zero, &non_instance);
   1889 
   1890   // Get the prototype or initial map from the function.
   1891   mov(result,
   1892       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1893 
   1894   // If the prototype or initial map is the hole, don't return it and
   1895   // simply miss the cache instead. This will allow us to allocate a
   1896   // prototype object on-demand in the runtime system.
   1897   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
   1898   j(equal, miss);
   1899 
   1900   // If the function does not have an initial map, we're done.
   1901   Label done;
   1902   CmpObjectType(result, MAP_TYPE, scratch);
   1903   j(not_equal, &done);
   1904 
   1905   // Get the prototype from the initial map.
   1906   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   1907   jmp(&done);
   1908 
   1909   // Non-instance prototype: Fetch prototype from constructor field
   1910   // in initial map.
   1911   bind(&non_instance);
   1912   mov(result, FieldOperand(result, Map::kConstructorOffset));
   1913 
   1914   // All done.
   1915   bind(&done);
   1916 }
   1917 
   1918 
   1919 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
   1920   ASSERT(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
   1921   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
   1922 }
   1923 
   1924 
   1925 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1926   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   1927 }
   1928 
   1929 
   1930 void MacroAssembler::StubReturn(int argc) {
   1931   ASSERT(argc >= 1 && generating_stub());
   1932   ret((argc - 1) * kPointerSize);
   1933 }
   1934 
   1935 
   1936 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
   1937   return has_frame_ || !stub->SometimesSetsUpAFrame();
   1938 }
   1939 
   1940 
   1941 void MacroAssembler::IndexFromHash(Register hash, Register index) {
   1942   // The assert checks that the constants for the maximum number of digits
   1943   // for an array index cached in the hash field and the number of bits
   1944   // reserved for it does not conflict.
   1945   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
   1946          (1 << String::kArrayIndexValueBits));
   1947   if (!index.is(hash)) {
   1948     mov(index, hash);
   1949   }
   1950   DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
   1951 }
   1952 
   1953 
   1954 void MacroAssembler::CallRuntime(const Runtime::Function* f,
   1955                                  int num_arguments) {
   1956   // If the expected number of arguments of the runtime function is
   1957   // constant, we check that the actual number of arguments match the
   1958   // expectation.
   1959   CHECK(f->nargs < 0 || f->nargs == num_arguments);
   1960 
   1961   // TODO(1236192): Most runtime routines don't need the number of
   1962   // arguments passed in because it is constant. At some point we
   1963   // should remove this need and make the runtime routine entry code
   1964   // smarter.
   1965   Move(eax, Immediate(num_arguments));
   1966   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1967   CEntryStub ces(isolate(), 1);
   1968   CallStub(&ces);
   1969 }
   1970 
   1971 
   1972 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1973                                            int num_arguments) {
   1974   mov(eax, Immediate(num_arguments));
   1975   mov(ebx, Immediate(ref));
   1976 
   1977   CEntryStub stub(isolate(), 1);
   1978   CallStub(&stub);
   1979 }
   1980 
   1981 
   1982 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
   1983                                                int num_arguments,
   1984                                                int result_size) {
   1985   // TODO(1236192): Most runtime routines don't need the number of
   1986   // arguments passed in because it is constant. At some point we
   1987   // should remove this need and make the runtime routine entry code
   1988   // smarter.
   1989   Move(eax, Immediate(num_arguments));
   1990   JumpToExternalReference(ext);
   1991 }
   1992 
   1993 
   1994 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
   1995                                      int num_arguments,
   1996                                      int result_size) {
   1997   TailCallExternalReference(ExternalReference(fid, isolate()),
   1998                             num_arguments,
   1999                             result_size);
   2000 }
   2001 
   2002 
   2003 Operand ApiParameterOperand(int index) {
   2004   return Operand(esp, index * kPointerSize);
   2005 }
   2006 
   2007 
   2008 void MacroAssembler::PrepareCallApiFunction(int argc) {
   2009   EnterApiExitFrame(argc);
   2010   if (emit_debug_code()) {
   2011     mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
   2012   }
   2013 }
   2014 
   2015 
   2016 void MacroAssembler::CallApiFunctionAndReturn(
   2017     Register function_address,
   2018     ExternalReference thunk_ref,
   2019     Operand thunk_last_arg,
   2020     int stack_space,
   2021     Operand return_value_operand,
   2022     Operand* context_restore_operand) {
   2023   ExternalReference next_address =
   2024       ExternalReference::handle_scope_next_address(isolate());
   2025   ExternalReference limit_address =
   2026       ExternalReference::handle_scope_limit_address(isolate());
   2027   ExternalReference level_address =
   2028       ExternalReference::handle_scope_level_address(isolate());
   2029 
   2030   ASSERT(edx.is(function_address));
   2031   // Allocate HandleScope in callee-save registers.
   2032   mov(ebx, Operand::StaticVariable(next_address));
   2033   mov(edi, Operand::StaticVariable(limit_address));
   2034   add(Operand::StaticVariable(level_address), Immediate(1));
   2035 
   2036   if (FLAG_log_timer_events) {
   2037     FrameScope frame(this, StackFrame::MANUAL);
   2038     PushSafepointRegisters();
   2039     PrepareCallCFunction(1, eax);
   2040     mov(Operand(esp, 0),
   2041         Immediate(ExternalReference::isolate_address(isolate())));
   2042     CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
   2043     PopSafepointRegisters();
   2044   }
   2045 
   2046 
   2047   Label profiler_disabled;
   2048   Label end_profiler_check;
   2049   mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
   2050   cmpb(Operand(eax, 0), 0);
   2051   j(zero, &profiler_disabled);
   2052 
   2053   // Additional parameter is the address of the actual getter function.
   2054   mov(thunk_last_arg, function_address);
   2055   // Call the api function.
   2056   mov(eax, Immediate(thunk_ref));
   2057   call(eax);
   2058   jmp(&end_profiler_check);
   2059 
   2060   bind(&profiler_disabled);
   2061   // Call the api function.
   2062   call(function_address);
   2063   bind(&end_profiler_check);
   2064 
   2065   if (FLAG_log_timer_events) {
   2066     FrameScope frame(this, StackFrame::MANUAL);
   2067     PushSafepointRegisters();
   2068     PrepareCallCFunction(1, eax);
   2069     mov(Operand(esp, 0),
   2070         Immediate(ExternalReference::isolate_address(isolate())));
   2071     CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
   2072     PopSafepointRegisters();
   2073   }
   2074 
   2075   Label prologue;
   2076   // Load the value from ReturnValue
   2077   mov(eax, return_value_operand);
   2078 
   2079   Label promote_scheduled_exception;
   2080   Label exception_handled;
   2081   Label delete_allocated_handles;
   2082   Label leave_exit_frame;
   2083 
   2084   bind(&prologue);
   2085   // No more valid handles (the result handle was the last one). Restore
   2086   // previous handle scope.
   2087   mov(Operand::StaticVariable(next_address), ebx);
   2088   sub(Operand::StaticVariable(level_address), Immediate(1));
   2089   Assert(above_equal, kInvalidHandleScopeLevel);
   2090   cmp(edi, Operand::StaticVariable(limit_address));
   2091   j(not_equal, &delete_allocated_handles);
   2092   bind(&leave_exit_frame);
   2093 
   2094   // Check if the function scheduled an exception.
   2095   ExternalReference scheduled_exception_address =
   2096       ExternalReference::scheduled_exception_address(isolate());
   2097   cmp(Operand::StaticVariable(scheduled_exception_address),
   2098       Immediate(isolate()->factory()->the_hole_value()));
   2099   j(not_equal, &promote_scheduled_exception);
   2100   bind(&exception_handled);
   2101 
   2102 #if ENABLE_EXTRA_CHECKS
   2103   // Check if the function returned a valid JavaScript value.
   2104   Label ok;
   2105   Register return_value = eax;
   2106   Register map = ecx;
   2107 
   2108   JumpIfSmi(return_value, &ok, Label::kNear);
   2109   mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
   2110 
   2111   CmpInstanceType(map, FIRST_NONSTRING_TYPE);
   2112   j(below, &ok, Label::kNear);
   2113 
   2114   CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
   2115   j(above_equal, &ok, Label::kNear);
   2116 
   2117   cmp(map, isolate()->factory()->heap_number_map());
   2118   j(equal, &ok, Label::kNear);
   2119 
   2120   cmp(return_value, isolate()->factory()->undefined_value());
   2121   j(equal, &ok, Label::kNear);
   2122 
   2123   cmp(return_value, isolate()->factory()->true_value());
   2124   j(equal, &ok, Label::kNear);
   2125 
   2126   cmp(return_value, isolate()->factory()->false_value());
   2127   j(equal, &ok, Label::kNear);
   2128 
   2129   cmp(return_value, isolate()->factory()->null_value());
   2130   j(equal, &ok, Label::kNear);
   2131 
   2132   Abort(kAPICallReturnedInvalidObject);
   2133 
   2134   bind(&ok);
   2135 #endif
   2136 
   2137   bool restore_context = context_restore_operand != NULL;
   2138   if (restore_context) {
   2139     mov(esi, *context_restore_operand);
   2140   }
   2141   LeaveApiExitFrame(!restore_context);
   2142   ret(stack_space * kPointerSize);
   2143 
   2144   bind(&promote_scheduled_exception);
   2145   {
   2146     FrameScope frame(this, StackFrame::INTERNAL);
   2147     CallRuntime(Runtime::kHiddenPromoteScheduledException, 0);
   2148   }
   2149   jmp(&exception_handled);
   2150 
   2151   // HandleScope limit has changed. Delete allocated extensions.
   2152   ExternalReference delete_extensions =
   2153       ExternalReference::delete_handle_scope_extensions(isolate());
   2154   bind(&delete_allocated_handles);
   2155   mov(Operand::StaticVariable(limit_address), edi);
   2156   mov(edi, eax);
   2157   mov(Operand(esp, 0),
   2158       Immediate(ExternalReference::isolate_address(isolate())));
   2159   mov(eax, Immediate(delete_extensions));
   2160   call(eax);
   2161   mov(eax, edi);
   2162   jmp(&leave_exit_frame);
   2163 }
   2164 
   2165 
   2166 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   2167   // Set the entry point and jump to the C entry runtime stub.
   2168   mov(ebx, Immediate(ext));
   2169   CEntryStub ces(isolate(), 1);
   2170   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   2171 }
   2172 
   2173 
   2174 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   2175                                     const ParameterCount& actual,
   2176                                     Handle<Code> code_constant,
   2177                                     const Operand& code_operand,
   2178                                     Label* done,
   2179                                     bool* definitely_mismatches,
   2180                                     InvokeFlag flag,
   2181                                     Label::Distance done_near,
   2182                                     const CallWrapper& call_wrapper) {
   2183   bool definitely_matches = false;
   2184   *definitely_mismatches = false;
   2185   Label invoke;
   2186   if (expected.is_immediate()) {
   2187     ASSERT(actual.is_immediate());
   2188     if (expected.immediate() == actual.immediate()) {
   2189       definitely_matches = true;
   2190     } else {
   2191       mov(eax, actual.immediate());
   2192       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   2193       if (expected.immediate() == sentinel) {
   2194         // Don't worry about adapting arguments for builtins that
   2195         // don't want that done. Skip adaption code by making it look
   2196         // like we have a match between expected and actual number of
   2197         // arguments.
   2198         definitely_matches = true;
   2199       } else {
   2200         *definitely_mismatches = true;
   2201         mov(ebx, expected.immediate());
   2202       }
   2203     }
   2204   } else {
   2205     if (actual.is_immediate()) {
   2206       // Expected is in register, actual is immediate. This is the
   2207       // case when we invoke function values without going through the
   2208       // IC mechanism.
   2209       cmp(expected.reg(), actual.immediate());
   2210       j(equal, &invoke);
   2211       ASSERT(expected.reg().is(ebx));
   2212       mov(eax, actual.immediate());
   2213     } else if (!expected.reg().is(actual.reg())) {
   2214       // Both expected and actual are in (different) registers. This
   2215       // is the case when we invoke functions using call and apply.
   2216       cmp(expected.reg(), actual.reg());
   2217       j(equal, &invoke);
   2218       ASSERT(actual.reg().is(eax));
   2219       ASSERT(expected.reg().is(ebx));
   2220     }
   2221   }
   2222 
   2223   if (!definitely_matches) {
   2224     Handle<Code> adaptor =
   2225         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   2226     if (!code_constant.is_null()) {
   2227       mov(edx, Immediate(code_constant));
   2228       add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
   2229     } else if (!code_operand.is_reg(edx)) {
   2230       mov(edx, code_operand);
   2231     }
   2232 
   2233     if (flag == CALL_FUNCTION) {
   2234       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
   2235       call(adaptor, RelocInfo::CODE_TARGET);
   2236       call_wrapper.AfterCall();
   2237       if (!*definitely_mismatches) {
   2238         jmp(done, done_near);
   2239       }
   2240     } else {
   2241       jmp(adaptor, RelocInfo::CODE_TARGET);
   2242     }
   2243     bind(&invoke);
   2244   }
   2245 }
   2246 
   2247 
   2248 void MacroAssembler::InvokeCode(const Operand& code,
   2249                                 const ParameterCount& expected,
   2250                                 const ParameterCount& actual,
   2251                                 InvokeFlag flag,
   2252                                 const CallWrapper& call_wrapper) {
   2253   // You can't call a function without a valid frame.
   2254   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2255 
   2256   Label done;
   2257   bool definitely_mismatches = false;
   2258   InvokePrologue(expected, actual, Handle<Code>::null(), code,
   2259                  &done, &definitely_mismatches, flag, Label::kNear,
   2260                  call_wrapper);
   2261   if (!definitely_mismatches) {
   2262     if (flag == CALL_FUNCTION) {
   2263       call_wrapper.BeforeCall(CallSize(code));
   2264       call(code);
   2265       call_wrapper.AfterCall();
   2266     } else {
   2267       ASSERT(flag == JUMP_FUNCTION);
   2268       jmp(code);
   2269     }
   2270     bind(&done);
   2271   }
   2272 }
   2273 
   2274 
   2275 void MacroAssembler::InvokeFunction(Register fun,
   2276                                     const ParameterCount& actual,
   2277                                     InvokeFlag flag,
   2278                                     const CallWrapper& call_wrapper) {
   2279   // You can't call a function without a valid frame.
   2280   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2281 
   2282   ASSERT(fun.is(edi));
   2283   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2284   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2285   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   2286   SmiUntag(ebx);
   2287 
   2288   ParameterCount expected(ebx);
   2289   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2290              expected, actual, flag, call_wrapper);
   2291 }
   2292 
   2293 
   2294 void MacroAssembler::InvokeFunction(Register fun,
   2295                                     const ParameterCount& expected,
   2296                                     const ParameterCount& actual,
   2297                                     InvokeFlag flag,
   2298                                     const CallWrapper& call_wrapper) {
   2299   // You can't call a function without a valid frame.
   2300   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2301 
   2302   ASSERT(fun.is(edi));
   2303   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2304 
   2305   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2306              expected, actual, flag, call_wrapper);
   2307 }
   2308 
   2309 
   2310 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
   2311                                     const ParameterCount& expected,
   2312                                     const ParameterCount& actual,
   2313                                     InvokeFlag flag,
   2314                                     const CallWrapper& call_wrapper) {
   2315   LoadHeapObject(edi, function);
   2316   InvokeFunction(edi, expected, actual, flag, call_wrapper);
   2317 }
   2318 
   2319 
   2320 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
   2321                                    InvokeFlag flag,
   2322                                    const CallWrapper& call_wrapper) {
   2323   // You can't call a builtin without a valid frame.
   2324   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2325 
   2326   // Rely on the assertion to check that the number of provided
   2327   // arguments match the expected number of arguments. Fake a
   2328   // parameter count to avoid emitting code to do the check.
   2329   ParameterCount expected(0);
   2330   GetBuiltinFunction(edi, id);
   2331   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2332              expected, expected, flag, call_wrapper);
   2333 }
   2334 
   2335 
   2336 void MacroAssembler::GetBuiltinFunction(Register target,
   2337                                         Builtins::JavaScript id) {
   2338   // Load the JavaScript builtin function from the builtins object.
   2339   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2340   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
   2341   mov(target, FieldOperand(target,
   2342                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
   2343 }
   2344 
   2345 
   2346 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
   2347   ASSERT(!target.is(edi));
   2348   // Load the JavaScript builtin function from the builtins object.
   2349   GetBuiltinFunction(edi, id);
   2350   // Load the code entry point from the function into the target register.
   2351   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   2352 }
   2353 
   2354 
   2355 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   2356   if (context_chain_length > 0) {
   2357     // Move up the chain of contexts to the context containing the slot.
   2358     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2359     for (int i = 1; i < context_chain_length; i++) {
   2360       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2361     }
   2362   } else {
   2363     // Slot is in the current function context.  Move it into the
   2364     // destination register in case we store into it (the write barrier
   2365     // cannot be allowed to destroy the context in esi).
   2366     mov(dst, esi);
   2367   }
   2368 
   2369   // We should not have found a with context by walking the context chain
   2370   // (i.e., the static scope chain and runtime context chain do not agree).
   2371   // A variable occurring in such a scope should have slot type LOOKUP and
   2372   // not CONTEXT.
   2373   if (emit_debug_code()) {
   2374     cmp(FieldOperand(dst, HeapObject::kMapOffset),
   2375         isolate()->factory()->with_context_map());
   2376     Check(not_equal, kVariableResolvedToWithContext);
   2377   }
   2378 }
   2379 
   2380 
   2381 void MacroAssembler::LoadTransitionedArrayMapConditional(
   2382     ElementsKind expected_kind,
   2383     ElementsKind transitioned_kind,
   2384     Register map_in_out,
   2385     Register scratch,
   2386     Label* no_map_match) {
   2387   // Load the global or builtins object from the current context.
   2388   mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2389   mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
   2390 
   2391   // Check that the function's map is the same as the expected cached map.
   2392   mov(scratch, Operand(scratch,
   2393                        Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
   2394 
   2395   size_t offset = expected_kind * kPointerSize +
   2396       FixedArrayBase::kHeaderSize;
   2397   cmp(map_in_out, FieldOperand(scratch, offset));
   2398   j(not_equal, no_map_match);
   2399 
   2400   // Use the transitioned cached map.
   2401   offset = transitioned_kind * kPointerSize +
   2402       FixedArrayBase::kHeaderSize;
   2403   mov(map_in_out, FieldOperand(scratch, offset));
   2404 }
   2405 
   2406 
   2407 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2408   // Load the global or builtins object from the current context.
   2409   mov(function,
   2410       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2411   // Load the native context from the global or builtins object.
   2412   mov(function,
   2413       FieldOperand(function, GlobalObject::kNativeContextOffset));
   2414   // Load the function from the native context.
   2415   mov(function, Operand(function, Context::SlotOffset(index)));
   2416 }
   2417 
   2418 
   2419 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2420                                                   Register map) {
   2421   // Load the initial map.  The global functions all have initial maps.
   2422   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2423   if (emit_debug_code()) {
   2424     Label ok, fail;
   2425     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
   2426     jmp(&ok);
   2427     bind(&fail);
   2428     Abort(kGlobalFunctionsMustHaveInitialMap);
   2429     bind(&ok);
   2430   }
   2431 }
   2432 
   2433 
   2434 // Store the value in register src in the safepoint register stack
   2435 // slot for register dst.
   2436 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   2437   mov(SafepointRegisterSlot(dst), src);
   2438 }
   2439 
   2440 
   2441 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   2442   mov(SafepointRegisterSlot(dst), src);
   2443 }
   2444 
   2445 
   2446 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   2447   mov(dst, SafepointRegisterSlot(src));
   2448 }
   2449 
   2450 
   2451 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   2452   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   2453 }
   2454 
   2455 
   2456 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   2457   // The registers are pushed starting with the lowest encoding,
   2458   // which means that lowest encodings are furthest away from
   2459   // the stack pointer.
   2460   ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   2461   return kNumSafepointRegisters - reg_code - 1;
   2462 }
   2463 
   2464 
   2465 void MacroAssembler::LoadHeapObject(Register result,
   2466                                     Handle<HeapObject> object) {
   2467   AllowDeferredHandleDereference embedding_raw_address;
   2468   if (isolate()->heap()->InNewSpace(*object)) {
   2469     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2470     mov(result, Operand::ForCell(cell));
   2471   } else {
   2472     mov(result, object);
   2473   }
   2474 }
   2475 
   2476 
   2477 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
   2478   AllowDeferredHandleDereference using_raw_address;
   2479   if (isolate()->heap()->InNewSpace(*object)) {
   2480     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2481     cmp(reg, Operand::ForCell(cell));
   2482   } else {
   2483     cmp(reg, object);
   2484   }
   2485 }
   2486 
   2487 
   2488 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
   2489   AllowDeferredHandleDereference using_raw_address;
   2490   if (isolate()->heap()->InNewSpace(*object)) {
   2491     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2492     push(Operand::ForCell(cell));
   2493   } else {
   2494     Push(object);
   2495   }
   2496 }
   2497 
   2498 
   2499 void MacroAssembler::Ret() {
   2500   ret(0);
   2501 }
   2502 
   2503 
   2504 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   2505   if (is_uint16(bytes_dropped)) {
   2506     ret(bytes_dropped);
   2507   } else {
   2508     pop(scratch);
   2509     add(esp, Immediate(bytes_dropped));
   2510     push(scratch);
   2511     ret(0);
   2512   }
   2513 }
   2514 
   2515 
   2516 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
   2517   // Make sure the floating point stack is either empty or has depth items.
   2518   ASSERT(depth <= 7);
   2519   // This is very expensive.
   2520   ASSERT(FLAG_debug_code && FLAG_enable_slow_asserts);
   2521 
   2522   // The top-of-stack (tos) is 7 if there is one item pushed.
   2523   int tos = (8 - depth) % 8;
   2524   const int kTopMask = 0x3800;
   2525   push(eax);
   2526   fwait();
   2527   fnstsw_ax();
   2528   and_(eax, kTopMask);
   2529   shr(eax, 11);
   2530   cmp(eax, Immediate(tos));
   2531   Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
   2532   fnclex();
   2533   pop(eax);
   2534 }
   2535 
   2536 
   2537 void MacroAssembler::Drop(int stack_elements) {
   2538   if (stack_elements > 0) {
   2539     add(esp, Immediate(stack_elements * kPointerSize));
   2540   }
   2541 }
   2542 
   2543 
   2544 void MacroAssembler::Move(Register dst, Register src) {
   2545   if (!dst.is(src)) {
   2546     mov(dst, src);
   2547   }
   2548 }
   2549 
   2550 
   2551 void MacroAssembler::Move(Register dst, const Immediate& x) {
   2552   if (x.is_zero()) {
   2553     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
   2554   } else {
   2555     mov(dst, x);
   2556   }
   2557 }
   2558 
   2559 
   2560 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
   2561   mov(dst, x);
   2562 }
   2563 
   2564 
   2565 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2566   if (FLAG_native_code_counters && counter->Enabled()) {
   2567     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   2568   }
   2569 }
   2570 
   2571 
   2572 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2573   ASSERT(value > 0);
   2574   if (FLAG_native_code_counters && counter->Enabled()) {
   2575     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2576     if (value == 1) {
   2577       inc(operand);
   2578     } else {
   2579       add(operand, Immediate(value));
   2580     }
   2581   }
   2582 }
   2583 
   2584 
   2585 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2586   ASSERT(value > 0);
   2587   if (FLAG_native_code_counters && counter->Enabled()) {
   2588     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2589     if (value == 1) {
   2590       dec(operand);
   2591     } else {
   2592       sub(operand, Immediate(value));
   2593     }
   2594   }
   2595 }
   2596 
   2597 
   2598 void MacroAssembler::IncrementCounter(Condition cc,
   2599                                       StatsCounter* counter,
   2600                                       int value) {
   2601   ASSERT(value > 0);
   2602   if (FLAG_native_code_counters && counter->Enabled()) {
   2603     Label skip;
   2604     j(NegateCondition(cc), &skip);
   2605     pushfd();
   2606     IncrementCounter(counter, value);
   2607     popfd();
   2608     bind(&skip);
   2609   }
   2610 }
   2611 
   2612 
   2613 void MacroAssembler::DecrementCounter(Condition cc,
   2614                                       StatsCounter* counter,
   2615                                       int value) {
   2616   ASSERT(value > 0);
   2617   if (FLAG_native_code_counters && counter->Enabled()) {
   2618     Label skip;
   2619     j(NegateCondition(cc), &skip);
   2620     pushfd();
   2621     DecrementCounter(counter, value);
   2622     popfd();
   2623     bind(&skip);
   2624   }
   2625 }
   2626 
   2627 
   2628 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
   2629   if (emit_debug_code()) Check(cc, reason);
   2630 }
   2631 
   2632 
   2633 void MacroAssembler::AssertFastElements(Register elements) {
   2634   if (emit_debug_code()) {
   2635     Factory* factory = isolate()->factory();
   2636     Label ok;
   2637     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2638         Immediate(factory->fixed_array_map()));
   2639     j(equal, &ok);
   2640     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2641         Immediate(factory->fixed_double_array_map()));
   2642     j(equal, &ok);
   2643     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2644         Immediate(factory->fixed_cow_array_map()));
   2645     j(equal, &ok);
   2646     Abort(kJSObjectWithFastElementsMapHasSlowElements);
   2647     bind(&ok);
   2648   }
   2649 }
   2650 
   2651 
   2652 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
   2653   Label L;
   2654   j(cc, &L);
   2655   Abort(reason);
   2656   // will not return here
   2657   bind(&L);
   2658 }
   2659 
   2660 
   2661 void MacroAssembler::CheckStackAlignment() {
   2662   int frame_alignment = OS::ActivationFrameAlignment();
   2663   int frame_alignment_mask = frame_alignment - 1;
   2664   if (frame_alignment > kPointerSize) {
   2665     ASSERT(IsPowerOf2(frame_alignment));
   2666     Label alignment_as_expected;
   2667     test(esp, Immediate(frame_alignment_mask));
   2668     j(zero, &alignment_as_expected);
   2669     // Abort if stack is not aligned.
   2670     int3();
   2671     bind(&alignment_as_expected);
   2672   }
   2673 }
   2674 
   2675 
   2676 void MacroAssembler::Abort(BailoutReason reason) {
   2677 #ifdef DEBUG
   2678   const char* msg = GetBailoutReason(reason);
   2679   if (msg != NULL) {
   2680     RecordComment("Abort message: ");
   2681     RecordComment(msg);
   2682   }
   2683 
   2684   if (FLAG_trap_on_abort) {
   2685     int3();
   2686     return;
   2687   }
   2688 #endif
   2689 
   2690   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
   2691   // Disable stub call restrictions to always allow calls to abort.
   2692   if (!has_frame_) {
   2693     // We don't actually want to generate a pile of code for this, so just
   2694     // claim there is a stack frame, without generating one.
   2695     FrameScope scope(this, StackFrame::NONE);
   2696     CallRuntime(Runtime::kAbort, 1);
   2697   } else {
   2698     CallRuntime(Runtime::kAbort, 1);
   2699   }
   2700   // will not return here
   2701   int3();
   2702 }
   2703 
   2704 
   2705 void MacroAssembler::LoadInstanceDescriptors(Register map,
   2706                                              Register descriptors) {
   2707   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
   2708 }
   2709 
   2710 
   2711 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
   2712   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2713   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
   2714 }
   2715 
   2716 
   2717 void MacroAssembler::LookupNumberStringCache(Register object,
   2718                                              Register result,
   2719                                              Register scratch1,
   2720                                              Register scratch2,
   2721                                              Label* not_found) {
   2722   // Use of registers. Register result is used as a temporary.
   2723   Register number_string_cache = result;
   2724   Register mask = scratch1;
   2725   Register scratch = scratch2;
   2726 
   2727   // Load the number string cache.
   2728   LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
   2729   // Make the hash mask from the length of the number string cache. It
   2730   // contains two elements (number and string) for each cache entry.
   2731   mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
   2732   shr(mask, kSmiTagSize + 1);  // Untag length and divide it by two.
   2733   sub(mask, Immediate(1));  // Make mask.
   2734 
   2735   // Calculate the entry in the number string cache. The hash value in the
   2736   // number string cache for smis is just the smi value, and the hash for
   2737   // doubles is the xor of the upper and lower words. See
   2738   // Heap::GetNumberStringCache.
   2739   Label smi_hash_calculated;
   2740   Label load_result_from_cache;
   2741   Label not_smi;
   2742   STATIC_ASSERT(kSmiTag == 0);
   2743   JumpIfNotSmi(object, &not_smi, Label::kNear);
   2744   mov(scratch, object);
   2745   SmiUntag(scratch);
   2746   jmp(&smi_hash_calculated, Label::kNear);
   2747   bind(&not_smi);
   2748   cmp(FieldOperand(object, HeapObject::kMapOffset),
   2749       isolate()->factory()->heap_number_map());
   2750   j(not_equal, not_found);
   2751   STATIC_ASSERT(8 == kDoubleSize);
   2752   mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
   2753   xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
   2754   // Object is heap number and hash is now in scratch. Calculate cache index.
   2755   and_(scratch, mask);
   2756   Register index = scratch;
   2757   Register probe = mask;
   2758   mov(probe,
   2759       FieldOperand(number_string_cache,
   2760                    index,
   2761                    times_twice_pointer_size,
   2762                    FixedArray::kHeaderSize));
   2763   JumpIfSmi(probe, not_found);
   2764   fld_d(FieldOperand(object, HeapNumber::kValueOffset));
   2765   fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
   2766   FCmp();
   2767   j(parity_even, not_found);  // Bail out if NaN is involved.
   2768   j(not_equal, not_found);  // The cache did not contain this value.
   2769   jmp(&load_result_from_cache, Label::kNear);
   2770 
   2771   bind(&smi_hash_calculated);
   2772   // Object is smi and hash is now in scratch. Calculate cache index.
   2773   and_(scratch, mask);
   2774   // Check if the entry is the smi we are looking for.
   2775   cmp(object,
   2776       FieldOperand(number_string_cache,
   2777                    index,
   2778                    times_twice_pointer_size,
   2779                    FixedArray::kHeaderSize));
   2780   j(not_equal, not_found);
   2781 
   2782   // Get the result from the cache.
   2783   bind(&load_result_from_cache);
   2784   mov(result,
   2785       FieldOperand(number_string_cache,
   2786                    index,
   2787                    times_twice_pointer_size,
   2788                    FixedArray::kHeaderSize + kPointerSize));
   2789   IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
   2790 }
   2791 
   2792 
   2793 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
   2794     Register instance_type,
   2795     Register scratch,
   2796     Label* failure) {
   2797   if (!scratch.is(instance_type)) {
   2798     mov(scratch, instance_type);
   2799   }
   2800   and_(scratch,
   2801        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   2802   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
   2803   j(not_equal, failure);
   2804 }
   2805 
   2806 
   2807 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
   2808                                                          Register object2,
   2809                                                          Register scratch1,
   2810                                                          Register scratch2,
   2811                                                          Label* failure) {
   2812   // Check that both objects are not smis.
   2813   STATIC_ASSERT(kSmiTag == 0);
   2814   mov(scratch1, object1);
   2815   and_(scratch1, object2);
   2816   JumpIfSmi(scratch1, failure);
   2817 
   2818   // Load instance type for both strings.
   2819   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   2820   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   2821   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   2822   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   2823 
   2824   // Check that both are flat ASCII strings.
   2825   const int kFlatAsciiStringMask =
   2826       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   2827   const int kFlatAsciiStringTag =
   2828       kStringTag | kOneByteStringTag | kSeqStringTag;
   2829   // Interleave bits from both instance types and compare them in one check.
   2830   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
   2831   and_(scratch1, kFlatAsciiStringMask);
   2832   and_(scratch2, kFlatAsciiStringMask);
   2833   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   2834   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
   2835   j(not_equal, failure);
   2836 }
   2837 
   2838 
   2839 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
   2840                                          Label* not_unique_name,
   2841                                          Label::Distance distance) {
   2842   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2843   Label succeed;
   2844   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2845   j(zero, &succeed);
   2846   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
   2847   j(not_equal, not_unique_name, distance);
   2848 
   2849   bind(&succeed);
   2850 }
   2851 
   2852 
   2853 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
   2854                                                Register index,
   2855                                                Register value,
   2856                                                uint32_t encoding_mask) {
   2857   Label is_object;
   2858   JumpIfNotSmi(string, &is_object, Label::kNear);
   2859   Abort(kNonObject);
   2860   bind(&is_object);
   2861 
   2862   push(value);
   2863   mov(value, FieldOperand(string, HeapObject::kMapOffset));
   2864   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
   2865 
   2866   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
   2867   cmp(value, Immediate(encoding_mask));
   2868   pop(value);
   2869   Check(equal, kUnexpectedStringType);
   2870 
   2871   // The index is assumed to be untagged coming in, tag it to compare with the
   2872   // string length without using a temp register, it is restored at the end of
   2873   // this function.
   2874   SmiTag(index);
   2875   Check(no_overflow, kIndexIsTooLarge);
   2876 
   2877   cmp(index, FieldOperand(string, String::kLengthOffset));
   2878   Check(less, kIndexIsTooLarge);
   2879 
   2880   cmp(index, Immediate(Smi::FromInt(0)));
   2881   Check(greater_equal, kIndexIsNegative);
   2882 
   2883   // Restore the index
   2884   SmiUntag(index);
   2885 }
   2886 
   2887 
   2888 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2889   int frame_alignment = OS::ActivationFrameAlignment();
   2890   if (frame_alignment != 0) {
   2891     // Make stack end at alignment and make room for num_arguments words
   2892     // and the original value of esp.
   2893     mov(scratch, esp);
   2894     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
   2895     ASSERT(IsPowerOf2(frame_alignment));
   2896     and_(esp, -frame_alignment);
   2897     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2898   } else {
   2899     sub(esp, Immediate(num_arguments * kPointerSize));
   2900   }
   2901 }
   2902 
   2903 
   2904 void MacroAssembler::CallCFunction(ExternalReference function,
   2905                                    int num_arguments) {
   2906   // Trashing eax is ok as it will be the return value.
   2907   mov(eax, Immediate(function));
   2908   CallCFunction(eax, num_arguments);
   2909 }
   2910 
   2911 
   2912 void MacroAssembler::CallCFunction(Register function,
   2913                                    int num_arguments) {
   2914   ASSERT(has_frame());
   2915   // Check stack alignment.
   2916   if (emit_debug_code()) {
   2917     CheckStackAlignment();
   2918   }
   2919 
   2920   call(function);
   2921   if (OS::ActivationFrameAlignment() != 0) {
   2922     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2923   } else {
   2924     add(esp, Immediate(num_arguments * kPointerSize));
   2925   }
   2926 }
   2927 
   2928 
   2929 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
   2930   if (r1.is(r2)) return true;
   2931   if (r1.is(r3)) return true;
   2932   if (r1.is(r4)) return true;
   2933   if (r2.is(r3)) return true;
   2934   if (r2.is(r4)) return true;
   2935   if (r3.is(r4)) return true;
   2936   return false;
   2937 }
   2938 
   2939 
   2940 CodePatcher::CodePatcher(byte* address, int size)
   2941     : address_(address),
   2942       size_(size),
   2943       masm_(NULL, address, size + Assembler::kGap) {
   2944   // Create a new macro assembler pointing to the address of the code to patch.
   2945   // The size is adjusted with kGap on order for the assembler to generate size
   2946   // bytes of instructions without failing with buffer size constraints.
   2947   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2948 }
   2949 
   2950 
   2951 CodePatcher::~CodePatcher() {
   2952   // Indicate that code has changed.
   2953   CPU::FlushICache(address_, size_);
   2954 
   2955   // Check that the code was patched as expected.
   2956   ASSERT(masm_.pc_ == address_ + size_);
   2957   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2958 }
   2959 
   2960 
   2961 void MacroAssembler::CheckPageFlag(
   2962     Register object,
   2963     Register scratch,
   2964     int mask,
   2965     Condition cc,
   2966     Label* condition_met,
   2967     Label::Distance condition_met_distance) {
   2968   ASSERT(cc == zero || cc == not_zero);
   2969   if (scratch.is(object)) {
   2970     and_(scratch, Immediate(~Page::kPageAlignmentMask));
   2971   } else {
   2972     mov(scratch, Immediate(~Page::kPageAlignmentMask));
   2973     and_(scratch, object);
   2974   }
   2975   if (mask < (1 << kBitsPerByte)) {
   2976     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
   2977            static_cast<uint8_t>(mask));
   2978   } else {
   2979     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   2980   }
   2981   j(cc, condition_met, condition_met_distance);
   2982 }
   2983 
   2984 
   2985 void MacroAssembler::CheckPageFlagForMap(
   2986     Handle<Map> map,
   2987     int mask,
   2988     Condition cc,
   2989     Label* condition_met,
   2990     Label::Distance condition_met_distance) {
   2991   ASSERT(cc == zero || cc == not_zero);
   2992   Page* page = Page::FromAddress(map->address());
   2993   ExternalReference reference(ExternalReference::page_flags(page));
   2994   // The inlined static address check of the page's flags relies
   2995   // on maps never being compacted.
   2996   ASSERT(!isolate()->heap()->mark_compact_collector()->
   2997          IsOnEvacuationCandidate(*map));
   2998   if (mask < (1 << kBitsPerByte)) {
   2999     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
   3000   } else {
   3001     test(Operand::StaticVariable(reference), Immediate(mask));
   3002   }
   3003   j(cc, condition_met, condition_met_distance);
   3004 }
   3005 
   3006 
   3007 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
   3008                                         Register scratch,
   3009                                         Label* if_deprecated) {
   3010   if (map->CanBeDeprecated()) {
   3011     mov(scratch, map);
   3012     mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
   3013     and_(scratch, Immediate(Map::Deprecated::kMask));
   3014     j(not_zero, if_deprecated);
   3015   }
   3016 }
   3017 
   3018 
   3019 void MacroAssembler::JumpIfBlack(Register object,
   3020                                  Register scratch0,
   3021                                  Register scratch1,
   3022                                  Label* on_black,
   3023                                  Label::Distance on_black_near) {
   3024   HasColor(object, scratch0, scratch1,
   3025            on_black, on_black_near,
   3026            1, 0);  // kBlackBitPattern.
   3027   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
   3028 }
   3029 
   3030 
   3031 void MacroAssembler::HasColor(Register object,
   3032                               Register bitmap_scratch,
   3033                               Register mask_scratch,
   3034                               Label* has_color,
   3035                               Label::Distance has_color_distance,
   3036                               int first_bit,
   3037                               int second_bit) {
   3038   ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
   3039 
   3040   GetMarkBits(object, bitmap_scratch, mask_scratch);
   3041 
   3042   Label other_color, word_boundary;
   3043   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3044   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
   3045   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
   3046   j(zero, &word_boundary, Label::kNear);
   3047   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3048   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   3049   jmp(&other_color, Label::kNear);
   3050 
   3051   bind(&word_boundary);
   3052   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
   3053 
   3054   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   3055   bind(&other_color);
   3056 }
   3057 
   3058 
   3059 void MacroAssembler::GetMarkBits(Register addr_reg,
   3060                                  Register bitmap_reg,
   3061                                  Register mask_reg) {
   3062   ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
   3063   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
   3064   and_(bitmap_reg, addr_reg);
   3065   mov(ecx, addr_reg);
   3066   int shift =
   3067       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
   3068   shr(ecx, shift);
   3069   and_(ecx,
   3070        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
   3071 
   3072   add(bitmap_reg, ecx);
   3073   mov(ecx, addr_reg);
   3074   shr(ecx, kPointerSizeLog2);
   3075   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
   3076   mov(mask_reg, Immediate(1));
   3077   shl_cl(mask_reg);
   3078 }
   3079 
   3080 
   3081 void MacroAssembler::EnsureNotWhite(
   3082     Register value,
   3083     Register bitmap_scratch,
   3084     Register mask_scratch,
   3085     Label* value_is_white_and_not_data,
   3086     Label::Distance distance) {
   3087   ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
   3088   GetMarkBits(value, bitmap_scratch, mask_scratch);
   3089 
   3090   // If the value is black or grey we don't need to do anything.
   3091   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
   3092   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
   3093   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
   3094   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
   3095 
   3096   Label done;
   3097 
   3098   // Since both black and grey have a 1 in the first position and white does
   3099   // not have a 1 there we only need to check one bit.
   3100   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3101   j(not_zero, &done, Label::kNear);
   3102 
   3103   if (emit_debug_code()) {
   3104     // Check for impossible bit pattern.
   3105     Label ok;
   3106     push(mask_scratch);
   3107     // shl.  May overflow making the check conservative.
   3108     add(mask_scratch, mask_scratch);
   3109     test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3110     j(zero, &ok, Label::kNear);
   3111     int3();
   3112     bind(&ok);
   3113     pop(mask_scratch);
   3114   }
   3115 
   3116   // Value is white.  We check whether it is data that doesn't need scanning.
   3117   // Currently only checks for HeapNumber and non-cons strings.
   3118   Register map = ecx;  // Holds map while checking type.
   3119   Register length = ecx;  // Holds length of object after checking type.
   3120   Label not_heap_number;
   3121   Label is_data_object;
   3122 
   3123   // Check for heap-number
   3124   mov(map, FieldOperand(value, HeapObject::kMapOffset));
   3125   cmp(map, isolate()->factory()->heap_number_map());
   3126   j(not_equal, &not_heap_number, Label::kNear);
   3127   mov(length, Immediate(HeapNumber::kSize));
   3128   jmp(&is_data_object, Label::kNear);
   3129 
   3130   bind(&not_heap_number);
   3131   // Check for strings.
   3132   ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
   3133   ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
   3134   // If it's a string and it's not a cons string then it's an object containing
   3135   // no GC pointers.
   3136   Register instance_type = ecx;
   3137   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
   3138   test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
   3139   j(not_zero, value_is_white_and_not_data);
   3140   // It's a non-indirect (non-cons and non-slice) string.
   3141   // If it's external, the length is just ExternalString::kSize.
   3142   // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
   3143   Label not_external;
   3144   // External strings are the only ones with the kExternalStringTag bit
   3145   // set.
   3146   ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
   3147   ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
   3148   test_b(instance_type, kExternalStringTag);
   3149   j(zero, &not_external, Label::kNear);
   3150   mov(length, Immediate(ExternalString::kSize));
   3151   jmp(&is_data_object, Label::kNear);
   3152 
   3153   bind(&not_external);
   3154   // Sequential string, either ASCII or UC16.
   3155   ASSERT(kOneByteStringTag == 0x04);
   3156   and_(length, Immediate(kStringEncodingMask));
   3157   xor_(length, Immediate(kStringEncodingMask));
   3158   add(length, Immediate(0x04));
   3159   // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
   3160   // by 2. If we multiply the string length as smi by this, it still
   3161   // won't overflow a 32-bit value.
   3162   ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
   3163   ASSERT(SeqOneByteString::kMaxSize <=
   3164          static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
   3165   imul(length, FieldOperand(value, String::kLengthOffset));
   3166   shr(length, 2 + kSmiTagSize + kSmiShiftSize);
   3167   add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
   3168   and_(length, Immediate(~kObjectAlignmentMask));
   3169 
   3170   bind(&is_data_object);
   3171   // Value is a data object, and it is white.  Mark it black.  Since we know
   3172   // that the object is white we can make it black by flipping one bit.
   3173   or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
   3174 
   3175   and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
   3176   add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
   3177       length);
   3178   if (emit_debug_code()) {
   3179     mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
   3180     cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
   3181     Check(less_equal, kLiveBytesCountOverflowChunkSize);
   3182   }
   3183 
   3184   bind(&done);
   3185 }
   3186 
   3187 
   3188 void MacroAssembler::EnumLength(Register dst, Register map) {
   3189   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
   3190   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   3191   and_(dst, Immediate(Map::EnumLengthBits::kMask));
   3192   SmiTag(dst);
   3193 }
   3194 
   3195 
   3196 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
   3197   Label next, start;
   3198   mov(ecx, eax);
   3199 
   3200   // Check if the enum length field is properly initialized, indicating that
   3201   // there is an enum cache.
   3202   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   3203 
   3204   EnumLength(edx, ebx);
   3205   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
   3206   j(equal, call_runtime);
   3207 
   3208   jmp(&start);
   3209 
   3210   bind(&next);
   3211   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   3212 
   3213   // For all objects but the receiver, check that the cache is empty.
   3214   EnumLength(edx, ebx);
   3215   cmp(edx, Immediate(Smi::FromInt(0)));
   3216   j(not_equal, call_runtime);
   3217 
   3218   bind(&start);
   3219 
   3220   // Check that there are no elements. Register rcx contains the current JS
   3221   // object we've reached through the prototype chain.
   3222   Label no_elements;
   3223   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
   3224   cmp(ecx, isolate()->factory()->empty_fixed_array());
   3225   j(equal, &no_elements);
   3226 
   3227   // Second chance, the object may be using the empty slow element dictionary.
   3228   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
   3229   j(not_equal, call_runtime);
   3230 
   3231   bind(&no_elements);
   3232   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   3233   cmp(ecx, isolate()->factory()->null_value());
   3234   j(not_equal, &next);
   3235 }
   3236 
   3237 
   3238 void MacroAssembler::TestJSArrayForAllocationMemento(
   3239     Register receiver_reg,
   3240     Register scratch_reg,
   3241     Label* no_memento_found) {
   3242   ExternalReference new_space_start =
   3243       ExternalReference::new_space_start(isolate());
   3244   ExternalReference new_space_allocation_top =
   3245       ExternalReference::new_space_allocation_top_address(isolate());
   3246 
   3247   lea(scratch_reg, Operand(receiver_reg,
   3248       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
   3249   cmp(scratch_reg, Immediate(new_space_start));
   3250   j(less, no_memento_found);
   3251   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   3252   j(greater, no_memento_found);
   3253   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
   3254       Immediate(isolate()->factory()->allocation_memento_map()));
   3255 }
   3256 
   3257 
   3258 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
   3259     Register object,
   3260     Register scratch0,
   3261     Register scratch1,
   3262     Label* found) {
   3263   ASSERT(!scratch1.is(scratch0));
   3264   Factory* factory = isolate()->factory();
   3265   Register current = scratch0;
   3266   Label loop_again;
   3267 
   3268   // scratch contained elements pointer.
   3269   mov(current, object);
   3270 
   3271   // Loop based on the map going up the prototype chain.
   3272   bind(&loop_again);
   3273   mov(current, FieldOperand(current, HeapObject::kMapOffset));
   3274   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
   3275   DecodeField<Map::ElementsKindBits>(scratch1);
   3276   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
   3277   j(equal, found);
   3278   mov(current, FieldOperand(current, Map::kPrototypeOffset));
   3279   cmp(current, Immediate(factory->null_value()));
   3280   j(not_equal, &loop_again);
   3281 }
   3282 
   3283 
   3284 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
   3285   ASSERT(!dividend.is(eax));
   3286   ASSERT(!dividend.is(edx));
   3287   MultiplierAndShift ms(divisor);
   3288   mov(eax, Immediate(ms.multiplier()));
   3289   imul(dividend);
   3290   if (divisor > 0 && ms.multiplier() < 0) add(edx, dividend);
   3291   if (divisor < 0 && ms.multiplier() > 0) sub(edx, dividend);
   3292   if (ms.shift() > 0) sar(edx, ms.shift());
   3293   mov(eax, dividend);
   3294   shr(eax, 31);
   3295   add(edx, eax);
   3296 }
   3297 
   3298 
   3299 } }  // namespace v8::internal
   3300 
   3301 #endif  // V8_TARGET_ARCH_X87
   3302