Home | History | Annotate | Download | only in x87
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X87
      6 
      7 #include "src/base/bits.h"
      8 #include "src/base/division-by-constant.h"
      9 #include "src/bootstrapper.h"
     10 #include "src/codegen.h"
     11 #include "src/debug/debug.h"
     12 #include "src/runtime/runtime.h"
     13 #include "src/x87/frames-x87.h"
     14 #include "src/x87/macro-assembler-x87.h"
     15 
     16 namespace v8 {
     17 namespace internal {
     18 
     19 // -------------------------------------------------------------------------
     20 // MacroAssembler implementation.
     21 
     22 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
     23                                CodeObjectRequired create_code_object)
     24     : Assembler(arg_isolate, buffer, size),
     25       generating_stub_(false),
     26       has_frame_(false) {
     27   if (create_code_object == CodeObjectRequired::kYes) {
     28     code_object_ =
     29         Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
     30   }
     31 }
     32 
     33 
     34 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
     35   DCHECK(!r.IsDouble());
     36   if (r.IsInteger8()) {
     37     movsx_b(dst, src);
     38   } else if (r.IsUInteger8()) {
     39     movzx_b(dst, src);
     40   } else if (r.IsInteger16()) {
     41     movsx_w(dst, src);
     42   } else if (r.IsUInteger16()) {
     43     movzx_w(dst, src);
     44   } else {
     45     mov(dst, src);
     46   }
     47 }
     48 
     49 
     50 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
     51   DCHECK(!r.IsDouble());
     52   if (r.IsInteger8() || r.IsUInteger8()) {
     53     mov_b(dst, src);
     54   } else if (r.IsInteger16() || r.IsUInteger16()) {
     55     mov_w(dst, src);
     56   } else {
     57     if (r.IsHeapObject()) {
     58       AssertNotSmi(src);
     59     } else if (r.IsSmi()) {
     60       AssertSmi(src);
     61     }
     62     mov(dst, src);
     63   }
     64 }
     65 
     66 
     67 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     68   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
     69     mov(destination, isolate()->heap()->root_handle(index));
     70     return;
     71   }
     72   ExternalReference roots_array_start =
     73       ExternalReference::roots_array_start(isolate());
     74   mov(destination, Immediate(index));
     75   mov(destination, Operand::StaticArray(destination,
     76                                         times_pointer_size,
     77                                         roots_array_start));
     78 }
     79 
     80 
     81 void MacroAssembler::StoreRoot(Register source,
     82                                Register scratch,
     83                                Heap::RootListIndex index) {
     84   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
     85   ExternalReference roots_array_start =
     86       ExternalReference::roots_array_start(isolate());
     87   mov(scratch, Immediate(index));
     88   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
     89       source);
     90 }
     91 
     92 
     93 void MacroAssembler::CompareRoot(Register with,
     94                                  Register scratch,
     95                                  Heap::RootListIndex index) {
     96   ExternalReference roots_array_start =
     97       ExternalReference::roots_array_start(isolate());
     98   mov(scratch, Immediate(index));
     99   cmp(with, Operand::StaticArray(scratch,
    100                                 times_pointer_size,
    101                                 roots_array_start));
    102 }
    103 
    104 
    105 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
    106   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    107   cmp(with, isolate()->heap()->root_handle(index));
    108 }
    109 
    110 
    111 void MacroAssembler::CompareRoot(const Operand& with,
    112                                  Heap::RootListIndex index) {
    113   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    114   cmp(with, isolate()->heap()->root_handle(index));
    115 }
    116 
    117 
    118 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
    119   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    120   Push(isolate()->heap()->root_handle(index));
    121 }
    122 
    123 
    124 void MacroAssembler::InNewSpace(
    125     Register object,
    126     Register scratch,
    127     Condition cc,
    128     Label* condition_met,
    129     Label::Distance condition_met_distance) {
    130   DCHECK(cc == equal || cc == not_equal);
    131   if (scratch.is(object)) {
    132     and_(scratch, Immediate(~Page::kPageAlignmentMask));
    133   } else {
    134     mov(scratch, Immediate(~Page::kPageAlignmentMask));
    135     and_(scratch, object);
    136   }
    137   // Check that we can use a test_b.
    138   DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
    139   DCHECK(MemoryChunk::IN_TO_SPACE < 8);
    140   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
    141            | (1 << MemoryChunk::IN_TO_SPACE);
    142   // If non-zero, the page belongs to new-space.
    143   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
    144          static_cast<uint8_t>(mask));
    145   j(cc, condition_met, condition_met_distance);
    146 }
    147 
    148 
    149 void MacroAssembler::RememberedSetHelper(
    150     Register object,  // Only used for debug checks.
    151     Register addr, Register scratch, SaveFPRegsMode save_fp,
    152     MacroAssembler::RememberedSetFinalAction and_then) {
    153   Label done;
    154   if (emit_debug_code()) {
    155     Label ok;
    156     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
    157     int3();
    158     bind(&ok);
    159   }
    160   // Load store buffer top.
    161   ExternalReference store_buffer =
    162       ExternalReference::store_buffer_top(isolate());
    163   mov(scratch, Operand::StaticVariable(store_buffer));
    164   // Store pointer to buffer.
    165   mov(Operand(scratch, 0), addr);
    166   // Increment buffer top.
    167   add(scratch, Immediate(kPointerSize));
    168   // Write back new top of buffer.
    169   mov(Operand::StaticVariable(store_buffer), scratch);
    170   // Call stub on end of buffer.
    171   // Check for end of buffer.
    172   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
    173   if (and_then == kReturnAtEnd) {
    174     Label buffer_overflowed;
    175     j(not_equal, &buffer_overflowed, Label::kNear);
    176     ret(0);
    177     bind(&buffer_overflowed);
    178   } else {
    179     DCHECK(and_then == kFallThroughAtEnd);
    180     j(equal, &done, Label::kNear);
    181   }
    182   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
    183   CallStub(&store_buffer_overflow);
    184   if (and_then == kReturnAtEnd) {
    185     ret(0);
    186   } else {
    187     DCHECK(and_then == kFallThroughAtEnd);
    188     bind(&done);
    189   }
    190 }
    191 
    192 
    193 void MacroAssembler::ClampTOSToUint8(Register result_reg) {
    194   Label done, conv_failure;
    195   sub(esp, Immediate(kPointerSize));
    196   fnclex();
    197   fist_s(Operand(esp, 0));
    198   pop(result_reg);
    199   X87CheckIA();
    200   j(equal, &conv_failure, Label::kNear);
    201   test(result_reg, Immediate(0xFFFFFF00));
    202   j(zero, &done, Label::kNear);
    203   setcc(sign, result_reg);
    204   sub(result_reg, Immediate(1));
    205   and_(result_reg, Immediate(255));
    206   jmp(&done, Label::kNear);
    207   bind(&conv_failure);
    208   fnclex();
    209   fldz();
    210   fld(1);
    211   FCmp();
    212   setcc(below, result_reg);  // 1 if negative, 0 if positive.
    213   dec_b(result_reg);         // 0 if negative, 255 if positive.
    214   bind(&done);
    215 }
    216 
    217 
    218 void MacroAssembler::ClampUint8(Register reg) {
    219   Label done;
    220   test(reg, Immediate(0xFFFFFF00));
    221   j(zero, &done, Label::kNear);
    222   setcc(negative, reg);  // 1 if negative, 0 if positive.
    223   dec_b(reg);  // 0 if negative, 255 if positive.
    224   bind(&done);
    225 }
    226 
    227 
    228 void MacroAssembler::SlowTruncateToI(Register result_reg,
    229                                      Register input_reg,
    230                                      int offset) {
    231   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
    232   call(stub.GetCode(), RelocInfo::CODE_TARGET);
    233 }
    234 
    235 
    236 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
    237   sub(esp, Immediate(kDoubleSize));
    238   fst_d(MemOperand(esp, 0));
    239   SlowTruncateToI(result_reg, esp, 0);
    240   add(esp, Immediate(kDoubleSize));
    241 }
    242 
    243 
    244 void MacroAssembler::X87TOSToI(Register result_reg,
    245                                MinusZeroMode minus_zero_mode,
    246                                Label* lost_precision, Label* is_nan,
    247                                Label* minus_zero, Label::Distance dst) {
    248   Label done;
    249   sub(esp, Immediate(kPointerSize));
    250   fld(0);
    251   fist_s(MemOperand(esp, 0));
    252   fild_s(MemOperand(esp, 0));
    253   pop(result_reg);
    254   FCmp();
    255   j(not_equal, lost_precision, dst);
    256   j(parity_even, is_nan, dst);
    257   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    258     test(result_reg, Operand(result_reg));
    259     j(not_zero, &done, Label::kNear);
    260     // To check for minus zero, we load the value again as float, and check
    261     // if that is still 0.
    262     sub(esp, Immediate(kPointerSize));
    263     fst_s(MemOperand(esp, 0));
    264     pop(result_reg);
    265     test(result_reg, Operand(result_reg));
    266     j(not_zero, minus_zero, dst);
    267   }
    268   bind(&done);
    269 }
    270 
    271 
    272 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
    273                                            Register input_reg) {
    274   Label done, slow_case;
    275 
    276   SlowTruncateToI(result_reg, input_reg);
    277   bind(&done);
    278 }
    279 
    280 
    281 void MacroAssembler::LoadUint32NoSSE2(const Operand& src) {
    282   Label done;
    283   push(src);
    284   fild_s(Operand(esp, 0));
    285   cmp(src, Immediate(0));
    286   j(not_sign, &done, Label::kNear);
    287   ExternalReference uint32_bias =
    288         ExternalReference::address_of_uint32_bias();
    289   fld_d(Operand::StaticVariable(uint32_bias));
    290   faddp(1);
    291   bind(&done);
    292   add(esp, Immediate(kPointerSize));
    293 }
    294 
    295 
    296 void MacroAssembler::RecordWriteArray(
    297     Register object, Register value, Register index, SaveFPRegsMode save_fp,
    298     RememberedSetAction remembered_set_action, SmiCheck smi_check,
    299     PointersToHereCheck pointers_to_here_check_for_value) {
    300   // First, check if a write barrier is even needed. The tests below
    301   // catch stores of Smis.
    302   Label done;
    303 
    304   // Skip barrier if writing a smi.
    305   if (smi_check == INLINE_SMI_CHECK) {
    306     DCHECK_EQ(0, kSmiTag);
    307     test(value, Immediate(kSmiTagMask));
    308     j(zero, &done);
    309   }
    310 
    311   // Array access: calculate the destination address in the same manner as
    312   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    313   // into an array of words.
    314   Register dst = index;
    315   lea(dst, Operand(object, index, times_half_pointer_size,
    316                    FixedArray::kHeaderSize - kHeapObjectTag));
    317 
    318   RecordWrite(object, dst, value, save_fp, remembered_set_action,
    319               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
    320 
    321   bind(&done);
    322 
    323   // Clobber clobbered input registers when running with the debug-code flag
    324   // turned on to provoke errors.
    325   if (emit_debug_code()) {
    326     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    327     mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
    328   }
    329 }
    330 
    331 
    332 void MacroAssembler::RecordWriteField(
    333     Register object, int offset, Register value, Register dst,
    334     SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
    335     SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
    336   // First, check if a write barrier is even needed. The tests below
    337   // catch stores of Smis.
    338   Label done;
    339 
    340   // Skip barrier if writing a smi.
    341   if (smi_check == INLINE_SMI_CHECK) {
    342     JumpIfSmi(value, &done, Label::kNear);
    343   }
    344 
    345   // Although the object register is tagged, the offset is relative to the start
    346   // of the object, so so offset must be a multiple of kPointerSize.
    347   DCHECK(IsAligned(offset, kPointerSize));
    348 
    349   lea(dst, FieldOperand(object, offset));
    350   if (emit_debug_code()) {
    351     Label ok;
    352     test_b(dst, (1 << kPointerSizeLog2) - 1);
    353     j(zero, &ok, Label::kNear);
    354     int3();
    355     bind(&ok);
    356   }
    357 
    358   RecordWrite(object, dst, value, save_fp, remembered_set_action,
    359               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
    360 
    361   bind(&done);
    362 
    363   // Clobber clobbered input registers when running with the debug-code flag
    364   // turned on to provoke errors.
    365   if (emit_debug_code()) {
    366     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    367     mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
    368   }
    369 }
    370 
    371 
    372 void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
    373                                        Register scratch1, Register scratch2,
    374                                        SaveFPRegsMode save_fp) {
    375   Label done;
    376 
    377   Register address = scratch1;
    378   Register value = scratch2;
    379   if (emit_debug_code()) {
    380     Label ok;
    381     lea(address, FieldOperand(object, HeapObject::kMapOffset));
    382     test_b(address, (1 << kPointerSizeLog2) - 1);
    383     j(zero, &ok, Label::kNear);
    384     int3();
    385     bind(&ok);
    386   }
    387 
    388   DCHECK(!object.is(value));
    389   DCHECK(!object.is(address));
    390   DCHECK(!value.is(address));
    391   AssertNotSmi(object);
    392 
    393   if (!FLAG_incremental_marking) {
    394     return;
    395   }
    396 
    397   // Compute the address.
    398   lea(address, FieldOperand(object, HeapObject::kMapOffset));
    399 
    400   // A single check of the map's pages interesting flag suffices, since it is
    401   // only set during incremental collection, and then it's also guaranteed that
    402   // the from object's page's interesting flag is also set.  This optimization
    403   // relies on the fact that maps can never be in new space.
    404   DCHECK(!isolate()->heap()->InNewSpace(*map));
    405   CheckPageFlagForMap(map,
    406                       MemoryChunk::kPointersToHereAreInterestingMask,
    407                       zero,
    408                       &done,
    409                       Label::kNear);
    410 
    411   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
    412                        save_fp);
    413   CallStub(&stub);
    414 
    415   bind(&done);
    416 
    417   // Count number of write barriers in generated code.
    418   isolate()->counters()->write_barriers_static()->Increment();
    419   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    420 
    421   // Clobber clobbered input registers when running with the debug-code flag
    422   // turned on to provoke errors.
    423   if (emit_debug_code()) {
    424     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    425     mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
    426     mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
    427   }
    428 }
    429 
    430 
    431 void MacroAssembler::RecordWrite(
    432     Register object, Register address, Register value, SaveFPRegsMode fp_mode,
    433     RememberedSetAction remembered_set_action, SmiCheck smi_check,
    434     PointersToHereCheck pointers_to_here_check_for_value) {
    435   DCHECK(!object.is(value));
    436   DCHECK(!object.is(address));
    437   DCHECK(!value.is(address));
    438   AssertNotSmi(object);
    439 
    440   if (remembered_set_action == OMIT_REMEMBERED_SET &&
    441       !FLAG_incremental_marking) {
    442     return;
    443   }
    444 
    445   if (emit_debug_code()) {
    446     Label ok;
    447     cmp(value, Operand(address, 0));
    448     j(equal, &ok, Label::kNear);
    449     int3();
    450     bind(&ok);
    451   }
    452 
    453   // First, check if a write barrier is even needed. The tests below
    454   // catch stores of Smis and stores into young gen.
    455   Label done;
    456 
    457   if (smi_check == INLINE_SMI_CHECK) {
    458     // Skip barrier if writing a smi.
    459     JumpIfSmi(value, &done, Label::kNear);
    460   }
    461 
    462   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
    463     CheckPageFlag(value,
    464                   value,  // Used as scratch.
    465                   MemoryChunk::kPointersToHereAreInterestingMask,
    466                   zero,
    467                   &done,
    468                   Label::kNear);
    469   }
    470   CheckPageFlag(object,
    471                 value,  // Used as scratch.
    472                 MemoryChunk::kPointersFromHereAreInterestingMask,
    473                 zero,
    474                 &done,
    475                 Label::kNear);
    476 
    477   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
    478                        fp_mode);
    479   CallStub(&stub);
    480 
    481   bind(&done);
    482 
    483   // Count number of write barriers in generated code.
    484   isolate()->counters()->write_barriers_static()->Increment();
    485   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    486 
    487   // Clobber clobbered registers when running with the debug-code flag
    488   // turned on to provoke errors.
    489   if (emit_debug_code()) {
    490     mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
    491     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    492   }
    493 }
    494 
    495 
    496 void MacroAssembler::DebugBreak() {
    497   Move(eax, Immediate(0));
    498   mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
    499                                        isolate())));
    500   CEntryStub ces(isolate(), 1);
    501   call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
    502 }
    503 
    504 
    505 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    506   static const int kMaxImmediateBits = 17;
    507   if (!RelocInfo::IsNone(x.rmode_)) return false;
    508   return !is_intn(x.x_, kMaxImmediateBits);
    509 }
    510 
    511 
    512 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
    513   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    514     Move(dst, Immediate(x.x_ ^ jit_cookie()));
    515     xor_(dst, jit_cookie());
    516   } else {
    517     Move(dst, x);
    518   }
    519 }
    520 
    521 
    522 void MacroAssembler::SafePush(const Immediate& x) {
    523   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    524     push(Immediate(x.x_ ^ jit_cookie()));
    525     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    526   } else {
    527     push(x);
    528   }
    529 }
    530 
    531 
    532 void MacroAssembler::CmpObjectType(Register heap_object,
    533                                    InstanceType type,
    534                                    Register map) {
    535   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    536   CmpInstanceType(map, type);
    537 }
    538 
    539 
    540 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    541   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    542        static_cast<int8_t>(type));
    543 }
    544 
    545 
    546 void MacroAssembler::CheckFastElements(Register map,
    547                                        Label* fail,
    548                                        Label::Distance distance) {
    549   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    550   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    551   STATIC_ASSERT(FAST_ELEMENTS == 2);
    552   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    553   cmpb(FieldOperand(map, Map::kBitField2Offset),
    554        Map::kMaximumBitField2FastHoleyElementValue);
    555   j(above, fail, distance);
    556 }
    557 
    558 
    559 void MacroAssembler::CheckFastObjectElements(Register map,
    560                                              Label* fail,
    561                                              Label::Distance distance) {
    562   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    563   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    564   STATIC_ASSERT(FAST_ELEMENTS == 2);
    565   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    566   cmpb(FieldOperand(map, Map::kBitField2Offset),
    567        Map::kMaximumBitField2FastHoleySmiElementValue);
    568   j(below_equal, fail, distance);
    569   cmpb(FieldOperand(map, Map::kBitField2Offset),
    570        Map::kMaximumBitField2FastHoleyElementValue);
    571   j(above, fail, distance);
    572 }
    573 
    574 
    575 void MacroAssembler::CheckFastSmiElements(Register map,
    576                                           Label* fail,
    577                                           Label::Distance distance) {
    578   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    579   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    580   cmpb(FieldOperand(map, Map::kBitField2Offset),
    581        Map::kMaximumBitField2FastHoleySmiElementValue);
    582   j(above, fail, distance);
    583 }
    584 
    585 
    586 void MacroAssembler::StoreNumberToDoubleElements(
    587     Register maybe_number,
    588     Register elements,
    589     Register key,
    590     Register scratch,
    591     Label* fail,
    592     int elements_offset) {
    593   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
    594   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
    595 
    596   CheckMap(maybe_number,
    597            isolate()->factory()->heap_number_map(),
    598            fail,
    599            DONT_DO_SMI_CHECK);
    600 
    601   fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
    602   jmp(&done, Label::kNear);
    603 
    604   bind(&smi_value);
    605   // Value is a smi. Convert to a double and store.
    606   // Preserve original value.
    607   mov(scratch, maybe_number);
    608   SmiUntag(scratch);
    609   push(scratch);
    610   fild_s(Operand(esp, 0));
    611   pop(scratch);
    612   bind(&done);
    613   fstp_d(FieldOperand(elements, key, times_4,
    614                       FixedDoubleArray::kHeaderSize - elements_offset));
    615 }
    616 
    617 
    618 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
    619   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    620 }
    621 
    622 
    623 void MacroAssembler::CheckMap(Register obj,
    624                               Handle<Map> map,
    625                               Label* fail,
    626                               SmiCheckType smi_check_type) {
    627   if (smi_check_type == DO_SMI_CHECK) {
    628     JumpIfSmi(obj, fail);
    629   }
    630 
    631   CompareMap(obj, map);
    632   j(not_equal, fail);
    633 }
    634 
    635 
    636 void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
    637                                      Register scratch2, Handle<WeakCell> cell,
    638                                      Handle<Code> success,
    639                                      SmiCheckType smi_check_type) {
    640   Label fail;
    641   if (smi_check_type == DO_SMI_CHECK) {
    642     JumpIfSmi(obj, &fail);
    643   }
    644   mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
    645   CmpWeakValue(scratch1, cell, scratch2);
    646   j(equal, success);
    647 
    648   bind(&fail);
    649 }
    650 
    651 
    652 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    653                                              Register map,
    654                                              Register instance_type) {
    655   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    656   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    657   STATIC_ASSERT(kNotStringTag != 0);
    658   test(instance_type, Immediate(kIsNotStringMask));
    659   return zero;
    660 }
    661 
    662 
    663 Condition MacroAssembler::IsObjectNameType(Register heap_object,
    664                                            Register map,
    665                                            Register instance_type) {
    666   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    667   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    668   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
    669   return below_equal;
    670 }
    671 
    672 
    673 void MacroAssembler::FCmp() {
    674   fucompp();
    675   push(eax);
    676   fnstsw_ax();
    677   sahf();
    678   pop(eax);
    679 }
    680 
    681 
    682 void MacroAssembler::FXamMinusZero() {
    683   fxam();
    684   push(eax);
    685   fnstsw_ax();
    686   and_(eax, Immediate(0x4700));
    687   // For minus zero, C3 == 1 && C1 == 1.
    688   cmp(eax, Immediate(0x4200));
    689   pop(eax);
    690   fstp(0);
    691 }
    692 
    693 
    694 void MacroAssembler::FXamSign() {
    695   fxam();
    696   push(eax);
    697   fnstsw_ax();
    698   // For negative value (including -0.0), C1 == 1.
    699   and_(eax, Immediate(0x0200));
    700   pop(eax);
    701   fstp(0);
    702 }
    703 
    704 
    705 void MacroAssembler::X87CheckIA() {
    706   push(eax);
    707   fnstsw_ax();
    708   // For #IA, IE == 1 && SF == 0.
    709   and_(eax, Immediate(0x0041));
    710   cmp(eax, Immediate(0x0001));
    711   pop(eax);
    712 }
    713 
    714 
    715 // rc=00B, round to nearest.
    716 // rc=01B, round down.
    717 // rc=10B, round up.
    718 // rc=11B, round toward zero.
    719 void MacroAssembler::X87SetRC(int rc) {
    720   sub(esp, Immediate(kPointerSize));
    721   fnstcw(MemOperand(esp, 0));
    722   and_(MemOperand(esp, 0), Immediate(0xF3FF));
    723   or_(MemOperand(esp, 0), Immediate(rc));
    724   fldcw(MemOperand(esp, 0));
    725   add(esp, Immediate(kPointerSize));
    726 }
    727 
    728 
    729 void MacroAssembler::X87SetFPUCW(int cw) {
    730   RecordComment("-- X87SetFPUCW start --");
    731   push(Immediate(cw));
    732   fldcw(MemOperand(esp, 0));
    733   add(esp, Immediate(kPointerSize));
    734   RecordComment("-- X87SetFPUCW end--");
    735 }
    736 
    737 
    738 void MacroAssembler::AssertNumber(Register object) {
    739   if (emit_debug_code()) {
    740     Label ok;
    741     JumpIfSmi(object, &ok);
    742     cmp(FieldOperand(object, HeapObject::kMapOffset),
    743         isolate()->factory()->heap_number_map());
    744     Check(equal, kOperandNotANumber);
    745     bind(&ok);
    746   }
    747 }
    748 
    749 
    750 void MacroAssembler::AssertSmi(Register object) {
    751   if (emit_debug_code()) {
    752     test(object, Immediate(kSmiTagMask));
    753     Check(equal, kOperandIsNotASmi);
    754   }
    755 }
    756 
    757 
    758 void MacroAssembler::AssertString(Register object) {
    759   if (emit_debug_code()) {
    760     test(object, Immediate(kSmiTagMask));
    761     Check(not_equal, kOperandIsASmiAndNotAString);
    762     push(object);
    763     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    764     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    765     pop(object);
    766     Check(below, kOperandIsNotAString);
    767   }
    768 }
    769 
    770 
    771 void MacroAssembler::AssertName(Register object) {
    772   if (emit_debug_code()) {
    773     test(object, Immediate(kSmiTagMask));
    774     Check(not_equal, kOperandIsASmiAndNotAName);
    775     push(object);
    776     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    777     CmpInstanceType(object, LAST_NAME_TYPE);
    778     pop(object);
    779     Check(below_equal, kOperandIsNotAName);
    780   }
    781 }
    782 
    783 
    784 void MacroAssembler::AssertFunction(Register object) {
    785   if (emit_debug_code()) {
    786     test(object, Immediate(kSmiTagMask));
    787     Check(not_equal, kOperandIsASmiAndNotAFunction);
    788     Push(object);
    789     CmpObjectType(object, JS_FUNCTION_TYPE, object);
    790     Pop(object);
    791     Check(equal, kOperandIsNotAFunction);
    792   }
    793 }
    794 
    795 
    796 void MacroAssembler::AssertBoundFunction(Register object) {
    797   if (emit_debug_code()) {
    798     test(object, Immediate(kSmiTagMask));
    799     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
    800     Push(object);
    801     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
    802     Pop(object);
    803     Check(equal, kOperandIsNotABoundFunction);
    804   }
    805 }
    806 
    807 
    808 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
    809   if (emit_debug_code()) {
    810     Label done_checking;
    811     AssertNotSmi(object);
    812     cmp(object, isolate()->factory()->undefined_value());
    813     j(equal, &done_checking);
    814     cmp(FieldOperand(object, 0),
    815         Immediate(isolate()->factory()->allocation_site_map()));
    816     Assert(equal, kExpectedUndefinedOrCell);
    817     bind(&done_checking);
    818   }
    819 }
    820 
    821 
    822 void MacroAssembler::AssertNotSmi(Register object) {
    823   if (emit_debug_code()) {
    824     test(object, Immediate(kSmiTagMask));
    825     Check(not_equal, kOperandIsASmi);
    826   }
    827 }
    828 
    829 
    830 void MacroAssembler::StubPrologue() {
    831   push(ebp);  // Caller's frame pointer.
    832   mov(ebp, esp);
    833   push(esi);  // Callee's context.
    834   push(Immediate(Smi::FromInt(StackFrame::STUB)));
    835 }
    836 
    837 
    838 void MacroAssembler::Prologue(bool code_pre_aging) {
    839   PredictableCodeSizeScope predictible_code_size_scope(this,
    840       kNoCodeAgeSequenceLength);
    841   if (code_pre_aging) {
    842       // Pre-age the code.
    843     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
    844         RelocInfo::CODE_AGE_SEQUENCE);
    845     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
    846   } else {
    847     push(ebp);  // Caller's frame pointer.
    848     mov(ebp, esp);
    849     push(esi);  // Callee's context.
    850     push(edi);  // Callee's JS function.
    851   }
    852 }
    853 
    854 
    855 void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
    856   mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
    857   mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
    858   mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
    859 }
    860 
    861 
    862 void MacroAssembler::EnterFrame(StackFrame::Type type,
    863                                 bool load_constant_pool_pointer_reg) {
    864   // Out-of-line constant pool not implemented on x87.
    865   UNREACHABLE();
    866 }
    867 
    868 
    869 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    870   push(ebp);
    871   mov(ebp, esp);
    872   push(esi);
    873   push(Immediate(Smi::FromInt(type)));
    874   push(Immediate(CodeObject()));
    875   if (emit_debug_code()) {
    876     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
    877     Check(not_equal, kCodeObjectNotProperlyPatched);
    878   }
    879 }
    880 
    881 
    882 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    883   if (emit_debug_code()) {
    884     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
    885         Immediate(Smi::FromInt(type)));
    886     Check(equal, kStackFrameTypesMustMatch);
    887   }
    888   leave();
    889 }
    890 
    891 
    892 void MacroAssembler::EnterExitFramePrologue() {
    893   // Set up the frame structure on the stack.
    894   DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
    895   DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
    896   DCHECK(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
    897   push(ebp);
    898   mov(ebp, esp);
    899 
    900   // Reserve room for entry stack pointer and push the code object.
    901   DCHECK(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
    902   push(Immediate(0));  // Saved entry sp, patched before call.
    903   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
    904 
    905   // Save the frame pointer and the context in top.
    906   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
    907   ExternalReference context_address(Isolate::kContextAddress, isolate());
    908   ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
    909   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
    910   mov(Operand::StaticVariable(context_address), esi);
    911   mov(Operand::StaticVariable(c_function_address), ebx);
    912 }
    913 
    914 
    915 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
    916   // Optionally save FPU state.
    917   if (save_doubles) {
    918     // Store FPU state to m108byte.
    919     int space = 108 + argc * kPointerSize;
    920     sub(esp, Immediate(space));
    921     const int offset = -2 * kPointerSize;  // entry fp + code object.
    922     fnsave(MemOperand(ebp, offset - 108));
    923   } else {
    924     sub(esp, Immediate(argc * kPointerSize));
    925   }
    926 
    927   // Get the required frame alignment for the OS.
    928   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
    929   if (kFrameAlignment > 0) {
    930     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
    931     and_(esp, -kFrameAlignment);
    932   }
    933 
    934   // Patch the saved entry sp.
    935   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
    936 }
    937 
    938 
    939 void MacroAssembler::EnterExitFrame(bool save_doubles) {
    940   EnterExitFramePrologue();
    941 
    942   // Set up argc and argv in callee-saved registers.
    943   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    944   mov(edi, eax);
    945   lea(esi, Operand(ebp, eax, times_4, offset));
    946 
    947   // Reserve space for argc, argv and isolate.
    948   EnterExitFrameEpilogue(3, save_doubles);
    949 }
    950 
    951 
    952 void MacroAssembler::EnterApiExitFrame(int argc) {
    953   EnterExitFramePrologue();
    954   EnterExitFrameEpilogue(argc, false);
    955 }
    956 
    957 
    958 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
    959   // Optionally restore FPU state.
    960   if (save_doubles) {
    961     const int offset = -2 * kPointerSize;
    962     frstor(MemOperand(ebp, offset - 108));
    963   }
    964 
    965   if (pop_arguments) {
    966     // Get the return address from the stack and restore the frame pointer.
    967     mov(ecx, Operand(ebp, 1 * kPointerSize));
    968     mov(ebp, Operand(ebp, 0 * kPointerSize));
    969 
    970     // Pop the arguments and the receiver from the caller stack.
    971     lea(esp, Operand(esi, 1 * kPointerSize));
    972 
    973     // Push the return address to get ready to return.
    974     push(ecx);
    975   } else {
    976     // Otherwise just leave the exit frame.
    977     leave();
    978   }
    979 
    980   LeaveExitFrameEpilogue(true);
    981 }
    982 
    983 
    984 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
    985   // Restore current context from top and clear it in debug mode.
    986   ExternalReference context_address(Isolate::kContextAddress, isolate());
    987   if (restore_context) {
    988     mov(esi, Operand::StaticVariable(context_address));
    989   }
    990 #ifdef DEBUG
    991   mov(Operand::StaticVariable(context_address), Immediate(0));
    992 #endif
    993 
    994   // Clear the top frame.
    995   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    996                                        isolate());
    997   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
    998 }
    999 
   1000 
   1001 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
   1002   mov(esp, ebp);
   1003   pop(ebp);
   1004 
   1005   LeaveExitFrameEpilogue(restore_context);
   1006 }
   1007 
   1008 
   1009 void MacroAssembler::PushStackHandler() {
   1010   // Adjust this code if not the case.
   1011   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
   1012   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1013 
   1014   // Link the current handler as the next handler.
   1015   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1016   push(Operand::StaticVariable(handler_address));
   1017 
   1018   // Set this new handler as the current one.
   1019   mov(Operand::StaticVariable(handler_address), esp);
   1020 }
   1021 
   1022 
   1023 void MacroAssembler::PopStackHandler() {
   1024   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1025   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1026   pop(Operand::StaticVariable(handler_address));
   1027   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
   1028 }
   1029 
   1030 
   1031 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
   1032                                             Register scratch1,
   1033                                             Register scratch2,
   1034                                             Label* miss) {
   1035   Label same_contexts;
   1036 
   1037   DCHECK(!holder_reg.is(scratch1));
   1038   DCHECK(!holder_reg.is(scratch2));
   1039   DCHECK(!scratch1.is(scratch2));
   1040 
   1041   // Load current lexical context from the stack frame.
   1042   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
   1043 
   1044   // When generating debug code, make sure the lexical context is set.
   1045   if (emit_debug_code()) {
   1046     cmp(scratch1, Immediate(0));
   1047     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
   1048   }
   1049   // Load the native context of the current context.
   1050   mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
   1051 
   1052   // Check the context is a native context.
   1053   if (emit_debug_code()) {
   1054     // Read the first word and compare to native_context_map.
   1055     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
   1056         isolate()->factory()->native_context_map());
   1057     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1058   }
   1059 
   1060   // Check if both contexts are the same.
   1061   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1062   j(equal, &same_contexts);
   1063 
   1064   // Compare security tokens, save holder_reg on the stack so we can use it
   1065   // as a temporary register.
   1066   //
   1067   // Check that the security token in the calling global object is
   1068   // compatible with the security token in the receiving global
   1069   // object.
   1070   mov(scratch2,
   1071       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1072 
   1073   // Check the context is a native context.
   1074   if (emit_debug_code()) {
   1075     cmp(scratch2, isolate()->factory()->null_value());
   1076     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
   1077 
   1078     // Read the first word and compare to native_context_map(),
   1079     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
   1080         isolate()->factory()->native_context_map());
   1081     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1082   }
   1083 
   1084   int token_offset = Context::kHeaderSize +
   1085                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
   1086   mov(scratch1, FieldOperand(scratch1, token_offset));
   1087   cmp(scratch1, FieldOperand(scratch2, token_offset));
   1088   j(not_equal, miss);
   1089 
   1090   bind(&same_contexts);
   1091 }
   1092 
   1093 
   1094 // Compute the hash code from the untagged key.  This must be kept in sync with
   1095 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
   1096 // code-stub-hydrogen.cc
   1097 //
   1098 // Note: r0 will contain hash code
   1099 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
   1100   // Xor original key with a seed.
   1101   if (serializer_enabled()) {
   1102     ExternalReference roots_array_start =
   1103         ExternalReference::roots_array_start(isolate());
   1104     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
   1105     mov(scratch,
   1106         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
   1107     SmiUntag(scratch);
   1108     xor_(r0, scratch);
   1109   } else {
   1110     int32_t seed = isolate()->heap()->HashSeed();
   1111     xor_(r0, Immediate(seed));
   1112   }
   1113 
   1114   // hash = ~hash + (hash << 15);
   1115   mov(scratch, r0);
   1116   not_(r0);
   1117   shl(scratch, 15);
   1118   add(r0, scratch);
   1119   // hash = hash ^ (hash >> 12);
   1120   mov(scratch, r0);
   1121   shr(scratch, 12);
   1122   xor_(r0, scratch);
   1123   // hash = hash + (hash << 2);
   1124   lea(r0, Operand(r0, r0, times_4, 0));
   1125   // hash = hash ^ (hash >> 4);
   1126   mov(scratch, r0);
   1127   shr(scratch, 4);
   1128   xor_(r0, scratch);
   1129   // hash = hash * 2057;
   1130   imul(r0, r0, 2057);
   1131   // hash = hash ^ (hash >> 16);
   1132   mov(scratch, r0);
   1133   shr(scratch, 16);
   1134   xor_(r0, scratch);
   1135   and_(r0, 0x3fffffff);
   1136 }
   1137 
   1138 
   1139 
   1140 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
   1141                                               Register elements,
   1142                                               Register key,
   1143                                               Register r0,
   1144                                               Register r1,
   1145                                               Register r2,
   1146                                               Register result) {
   1147   // Register use:
   1148   //
   1149   // elements - holds the slow-case elements of the receiver and is unchanged.
   1150   //
   1151   // key      - holds the smi key on entry and is unchanged.
   1152   //
   1153   // Scratch registers:
   1154   //
   1155   // r0 - holds the untagged key on entry and holds the hash once computed.
   1156   //
   1157   // r1 - used to hold the capacity mask of the dictionary
   1158   //
   1159   // r2 - used for the index into the dictionary.
   1160   //
   1161   // result - holds the result on exit if the load succeeds and we fall through.
   1162 
   1163   Label done;
   1164 
   1165   GetNumberHash(r0, r1);
   1166 
   1167   // Compute capacity mask.
   1168   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
   1169   shr(r1, kSmiTagSize);  // convert smi to int
   1170   dec(r1);
   1171 
   1172   // Generate an unrolled loop that performs a few probes before giving up.
   1173   for (int i = 0; i < kNumberDictionaryProbes; i++) {
   1174     // Use r2 for index calculations and keep the hash intact in r0.
   1175     mov(r2, r0);
   1176     // Compute the masked index: (hash + i + i * i) & mask.
   1177     if (i > 0) {
   1178       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
   1179     }
   1180     and_(r2, r1);
   1181 
   1182     // Scale the index by multiplying by the entry size.
   1183     DCHECK(SeededNumberDictionary::kEntrySize == 3);
   1184     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
   1185 
   1186     // Check if the key matches.
   1187     cmp(key, FieldOperand(elements,
   1188                           r2,
   1189                           times_pointer_size,
   1190                           SeededNumberDictionary::kElementsStartOffset));
   1191     if (i != (kNumberDictionaryProbes - 1)) {
   1192       j(equal, &done);
   1193     } else {
   1194       j(not_equal, miss);
   1195     }
   1196   }
   1197 
   1198   bind(&done);
   1199   // Check that the value is a field property.
   1200   const int kDetailsOffset =
   1201       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
   1202   DCHECK_EQ(DATA, 0);
   1203   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
   1204        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
   1205   j(not_zero, miss);
   1206 
   1207   // Get the value at the masked, scaled index.
   1208   const int kValueOffset =
   1209       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
   1210   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
   1211 }
   1212 
   1213 
   1214 void MacroAssembler::LoadAllocationTopHelper(Register result,
   1215                                              Register scratch,
   1216                                              AllocationFlags flags) {
   1217   ExternalReference allocation_top =
   1218       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1219 
   1220   // Just return if allocation top is already known.
   1221   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   1222     // No use of scratch if allocation top is provided.
   1223     DCHECK(scratch.is(no_reg));
   1224 #ifdef DEBUG
   1225     // Assert that result actually contains top on entry.
   1226     cmp(result, Operand::StaticVariable(allocation_top));
   1227     Check(equal, kUnexpectedAllocationTop);
   1228 #endif
   1229     return;
   1230   }
   1231 
   1232   // Move address of new object to result. Use scratch register if available.
   1233   if (scratch.is(no_reg)) {
   1234     mov(result, Operand::StaticVariable(allocation_top));
   1235   } else {
   1236     mov(scratch, Immediate(allocation_top));
   1237     mov(result, Operand(scratch, 0));
   1238   }
   1239 }
   1240 
   1241 
   1242 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   1243                                                Register scratch,
   1244                                                AllocationFlags flags) {
   1245   if (emit_debug_code()) {
   1246     test(result_end, Immediate(kObjectAlignmentMask));
   1247     Check(zero, kUnalignedAllocationInNewSpace);
   1248   }
   1249 
   1250   ExternalReference allocation_top =
   1251       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1252 
   1253   // Update new top. Use scratch if available.
   1254   if (scratch.is(no_reg)) {
   1255     mov(Operand::StaticVariable(allocation_top), result_end);
   1256   } else {
   1257     mov(Operand(scratch, 0), result_end);
   1258   }
   1259 }
   1260 
   1261 
   1262 void MacroAssembler::Allocate(int object_size,
   1263                               Register result,
   1264                               Register result_end,
   1265                               Register scratch,
   1266                               Label* gc_required,
   1267                               AllocationFlags flags) {
   1268   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1269   DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
   1270   if (!FLAG_inline_new) {
   1271     if (emit_debug_code()) {
   1272       // Trash the registers to simulate an allocation failure.
   1273       mov(result, Immediate(0x7091));
   1274       if (result_end.is_valid()) {
   1275         mov(result_end, Immediate(0x7191));
   1276       }
   1277       if (scratch.is_valid()) {
   1278         mov(scratch, Immediate(0x7291));
   1279       }
   1280     }
   1281     jmp(gc_required);
   1282     return;
   1283   }
   1284   DCHECK(!result.is(result_end));
   1285 
   1286   // Load address of new object into result.
   1287   LoadAllocationTopHelper(result, scratch, flags);
   1288 
   1289   ExternalReference allocation_limit =
   1290       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1291 
   1292   // Align the next allocation. Storing the filler map without checking top is
   1293   // safe in new-space because the limit of the heap is aligned there.
   1294   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1295     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1296     Label aligned;
   1297     test(result, Immediate(kDoubleAlignmentMask));
   1298     j(zero, &aligned, Label::kNear);
   1299     if ((flags & PRETENURE) != 0) {
   1300       cmp(result, Operand::StaticVariable(allocation_limit));
   1301       j(above_equal, gc_required);
   1302     }
   1303     mov(Operand(result, 0),
   1304         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1305     add(result, Immediate(kDoubleSize / 2));
   1306     bind(&aligned);
   1307   }
   1308 
   1309   // Calculate new top and bail out if space is exhausted.
   1310   Register top_reg = result_end.is_valid() ? result_end : result;
   1311   if (!top_reg.is(result)) {
   1312     mov(top_reg, result);
   1313   }
   1314   add(top_reg, Immediate(object_size));
   1315   j(carry, gc_required);
   1316   cmp(top_reg, Operand::StaticVariable(allocation_limit));
   1317   j(above, gc_required);
   1318 
   1319   // Update allocation top.
   1320   UpdateAllocationTopHelper(top_reg, scratch, flags);
   1321 
   1322   // Tag result if requested.
   1323   bool tag_result = (flags & TAG_OBJECT) != 0;
   1324   if (top_reg.is(result)) {
   1325     if (tag_result) {
   1326       sub(result, Immediate(object_size - kHeapObjectTag));
   1327     } else {
   1328       sub(result, Immediate(object_size));
   1329     }
   1330   } else if (tag_result) {
   1331     DCHECK(kHeapObjectTag == 1);
   1332     inc(result);
   1333   }
   1334 }
   1335 
   1336 
   1337 void MacroAssembler::Allocate(int header_size,
   1338                               ScaleFactor element_size,
   1339                               Register element_count,
   1340                               RegisterValueType element_count_type,
   1341                               Register result,
   1342                               Register result_end,
   1343                               Register scratch,
   1344                               Label* gc_required,
   1345                               AllocationFlags flags) {
   1346   DCHECK((flags & SIZE_IN_WORDS) == 0);
   1347   if (!FLAG_inline_new) {
   1348     if (emit_debug_code()) {
   1349       // Trash the registers to simulate an allocation failure.
   1350       mov(result, Immediate(0x7091));
   1351       mov(result_end, Immediate(0x7191));
   1352       if (scratch.is_valid()) {
   1353         mov(scratch, Immediate(0x7291));
   1354       }
   1355       // Register element_count is not modified by the function.
   1356     }
   1357     jmp(gc_required);
   1358     return;
   1359   }
   1360   DCHECK(!result.is(result_end));
   1361 
   1362   // Load address of new object into result.
   1363   LoadAllocationTopHelper(result, scratch, flags);
   1364 
   1365   ExternalReference allocation_limit =
   1366       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1367 
   1368   // Align the next allocation. Storing the filler map without checking top is
   1369   // safe in new-space because the limit of the heap is aligned there.
   1370   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1371     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1372     Label aligned;
   1373     test(result, Immediate(kDoubleAlignmentMask));
   1374     j(zero, &aligned, Label::kNear);
   1375     if ((flags & PRETENURE) != 0) {
   1376       cmp(result, Operand::StaticVariable(allocation_limit));
   1377       j(above_equal, gc_required);
   1378     }
   1379     mov(Operand(result, 0),
   1380         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1381     add(result, Immediate(kDoubleSize / 2));
   1382     bind(&aligned);
   1383   }
   1384 
   1385   // Calculate new top and bail out if space is exhausted.
   1386   // We assume that element_count*element_size + header_size does not
   1387   // overflow.
   1388   if (element_count_type == REGISTER_VALUE_IS_SMI) {
   1389     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
   1390     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
   1391     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
   1392     DCHECK(element_size >= times_2);
   1393     DCHECK(kSmiTagSize == 1);
   1394     element_size = static_cast<ScaleFactor>(element_size - 1);
   1395   } else {
   1396     DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
   1397   }
   1398   lea(result_end, Operand(element_count, element_size, header_size));
   1399   add(result_end, result);
   1400   j(carry, gc_required);
   1401   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1402   j(above, gc_required);
   1403 
   1404   if ((flags & TAG_OBJECT) != 0) {
   1405     DCHECK(kHeapObjectTag == 1);
   1406     inc(result);
   1407   }
   1408 
   1409   // Update allocation top.
   1410   UpdateAllocationTopHelper(result_end, scratch, flags);
   1411 }
   1412 
   1413 
   1414 void MacroAssembler::Allocate(Register object_size,
   1415                               Register result,
   1416                               Register result_end,
   1417                               Register scratch,
   1418                               Label* gc_required,
   1419                               AllocationFlags flags) {
   1420   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1421   if (!FLAG_inline_new) {
   1422     if (emit_debug_code()) {
   1423       // Trash the registers to simulate an allocation failure.
   1424       mov(result, Immediate(0x7091));
   1425       mov(result_end, Immediate(0x7191));
   1426       if (scratch.is_valid()) {
   1427         mov(scratch, Immediate(0x7291));
   1428       }
   1429       // object_size is left unchanged by this function.
   1430     }
   1431     jmp(gc_required);
   1432     return;
   1433   }
   1434   DCHECK(!result.is(result_end));
   1435 
   1436   // Load address of new object into result.
   1437   LoadAllocationTopHelper(result, scratch, flags);
   1438 
   1439   ExternalReference allocation_limit =
   1440       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1441 
   1442   // Align the next allocation. Storing the filler map without checking top is
   1443   // safe in new-space because the limit of the heap is aligned there.
   1444   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1445     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1446     Label aligned;
   1447     test(result, Immediate(kDoubleAlignmentMask));
   1448     j(zero, &aligned, Label::kNear);
   1449     if ((flags & PRETENURE) != 0) {
   1450       cmp(result, Operand::StaticVariable(allocation_limit));
   1451       j(above_equal, gc_required);
   1452     }
   1453     mov(Operand(result, 0),
   1454         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1455     add(result, Immediate(kDoubleSize / 2));
   1456     bind(&aligned);
   1457   }
   1458 
   1459   // Calculate new top and bail out if space is exhausted.
   1460   if (!object_size.is(result_end)) {
   1461     mov(result_end, object_size);
   1462   }
   1463   add(result_end, result);
   1464   j(carry, gc_required);
   1465   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1466   j(above, gc_required);
   1467 
   1468   // Tag result if requested.
   1469   if ((flags & TAG_OBJECT) != 0) {
   1470     DCHECK(kHeapObjectTag == 1);
   1471     inc(result);
   1472   }
   1473 
   1474   // Update allocation top.
   1475   UpdateAllocationTopHelper(result_end, scratch, flags);
   1476 }
   1477 
   1478 
   1479 void MacroAssembler::AllocateHeapNumber(Register result,
   1480                                         Register scratch1,
   1481                                         Register scratch2,
   1482                                         Label* gc_required,
   1483                                         MutableMode mode) {
   1484   // Allocate heap number in new space.
   1485   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
   1486            TAG_OBJECT);
   1487 
   1488   Handle<Map> map = mode == MUTABLE
   1489       ? isolate()->factory()->mutable_heap_number_map()
   1490       : isolate()->factory()->heap_number_map();
   1491 
   1492   // Set the map.
   1493   mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
   1494 }
   1495 
   1496 
   1497 void MacroAssembler::AllocateTwoByteString(Register result,
   1498                                            Register length,
   1499                                            Register scratch1,
   1500                                            Register scratch2,
   1501                                            Register scratch3,
   1502                                            Label* gc_required) {
   1503   // Calculate the number of bytes needed for the characters in the string while
   1504   // observing object alignment.
   1505   DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1506   DCHECK(kShortSize == 2);
   1507   // scratch1 = length * 2 + kObjectAlignmentMask.
   1508   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
   1509   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1510 
   1511   // Allocate two byte string in new space.
   1512   Allocate(SeqTwoByteString::kHeaderSize,
   1513            times_1,
   1514            scratch1,
   1515            REGISTER_VALUE_IS_INT32,
   1516            result,
   1517            scratch2,
   1518            scratch3,
   1519            gc_required,
   1520            TAG_OBJECT);
   1521 
   1522   // Set the map, length and hash field.
   1523   mov(FieldOperand(result, HeapObject::kMapOffset),
   1524       Immediate(isolate()->factory()->string_map()));
   1525   mov(scratch1, length);
   1526   SmiTag(scratch1);
   1527   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1528   mov(FieldOperand(result, String::kHashFieldOffset),
   1529       Immediate(String::kEmptyHashField));
   1530 }
   1531 
   1532 
   1533 void MacroAssembler::AllocateOneByteString(Register result, Register length,
   1534                                            Register scratch1, Register scratch2,
   1535                                            Register scratch3,
   1536                                            Label* gc_required) {
   1537   // Calculate the number of bytes needed for the characters in the string while
   1538   // observing object alignment.
   1539   DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1540   mov(scratch1, length);
   1541   DCHECK(kCharSize == 1);
   1542   add(scratch1, Immediate(kObjectAlignmentMask));
   1543   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1544 
   1545   // Allocate one-byte string in new space.
   1546   Allocate(SeqOneByteString::kHeaderSize,
   1547            times_1,
   1548            scratch1,
   1549            REGISTER_VALUE_IS_INT32,
   1550            result,
   1551            scratch2,
   1552            scratch3,
   1553            gc_required,
   1554            TAG_OBJECT);
   1555 
   1556   // Set the map, length and hash field.
   1557   mov(FieldOperand(result, HeapObject::kMapOffset),
   1558       Immediate(isolate()->factory()->one_byte_string_map()));
   1559   mov(scratch1, length);
   1560   SmiTag(scratch1);
   1561   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1562   mov(FieldOperand(result, String::kHashFieldOffset),
   1563       Immediate(String::kEmptyHashField));
   1564 }
   1565 
   1566 
   1567 void MacroAssembler::AllocateOneByteString(Register result, int length,
   1568                                            Register scratch1, Register scratch2,
   1569                                            Label* gc_required) {
   1570   DCHECK(length > 0);
   1571 
   1572   // Allocate one-byte string in new space.
   1573   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
   1574            gc_required, TAG_OBJECT);
   1575 
   1576   // Set the map, length and hash field.
   1577   mov(FieldOperand(result, HeapObject::kMapOffset),
   1578       Immediate(isolate()->factory()->one_byte_string_map()));
   1579   mov(FieldOperand(result, String::kLengthOffset),
   1580       Immediate(Smi::FromInt(length)));
   1581   mov(FieldOperand(result, String::kHashFieldOffset),
   1582       Immediate(String::kEmptyHashField));
   1583 }
   1584 
   1585 
   1586 void MacroAssembler::AllocateTwoByteConsString(Register result,
   1587                                         Register scratch1,
   1588                                         Register scratch2,
   1589                                         Label* gc_required) {
   1590   // Allocate heap number in new space.
   1591   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
   1592            TAG_OBJECT);
   1593 
   1594   // Set the map. The other fields are left uninitialized.
   1595   mov(FieldOperand(result, HeapObject::kMapOffset),
   1596       Immediate(isolate()->factory()->cons_string_map()));
   1597 }
   1598 
   1599 
   1600 void MacroAssembler::AllocateOneByteConsString(Register result,
   1601                                                Register scratch1,
   1602                                                Register scratch2,
   1603                                                Label* gc_required) {
   1604   Allocate(ConsString::kSize,
   1605            result,
   1606            scratch1,
   1607            scratch2,
   1608            gc_required,
   1609            TAG_OBJECT);
   1610 
   1611   // Set the map. The other fields are left uninitialized.
   1612   mov(FieldOperand(result, HeapObject::kMapOffset),
   1613       Immediate(isolate()->factory()->cons_one_byte_string_map()));
   1614 }
   1615 
   1616 
   1617 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
   1618                                           Register scratch1,
   1619                                           Register scratch2,
   1620                                           Label* gc_required) {
   1621   // Allocate heap number in new space.
   1622   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1623            TAG_OBJECT);
   1624 
   1625   // Set the map. The other fields are left uninitialized.
   1626   mov(FieldOperand(result, HeapObject::kMapOffset),
   1627       Immediate(isolate()->factory()->sliced_string_map()));
   1628 }
   1629 
   1630 
   1631 void MacroAssembler::AllocateOneByteSlicedString(Register result,
   1632                                                  Register scratch1,
   1633                                                  Register scratch2,
   1634                                                  Label* gc_required) {
   1635   // Allocate heap number in new space.
   1636   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1637            TAG_OBJECT);
   1638 
   1639   // Set the map. The other fields are left uninitialized.
   1640   mov(FieldOperand(result, HeapObject::kMapOffset),
   1641       Immediate(isolate()->factory()->sliced_one_byte_string_map()));
   1642 }
   1643 
   1644 
   1645 void MacroAssembler::AllocateJSValue(Register result, Register constructor,
   1646                                      Register value, Register scratch,
   1647                                      Label* gc_required) {
   1648   DCHECK(!result.is(constructor));
   1649   DCHECK(!result.is(scratch));
   1650   DCHECK(!result.is(value));
   1651 
   1652   // Allocate JSValue in new space.
   1653   Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
   1654 
   1655   // Initialize the JSValue.
   1656   LoadGlobalFunctionInitialMap(constructor, scratch);
   1657   mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
   1658   LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
   1659   mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
   1660   mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
   1661   mov(FieldOperand(result, JSValue::kValueOffset), value);
   1662   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
   1663 }
   1664 
   1665 
   1666 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   1667 // long or aligned copies.  The contents of scratch and length are destroyed.
   1668 // Source and destination are incremented by length.
   1669 // Many variants of movsb, loop unrolling, word moves, and indexed operands
   1670 // have been tried here already, and this is fastest.
   1671 // A simpler loop is faster on small copies, but 30% slower on large ones.
   1672 // The cld() instruction must have been emitted, to set the direction flag(),
   1673 // before calling this function.
   1674 void MacroAssembler::CopyBytes(Register source,
   1675                                Register destination,
   1676                                Register length,
   1677                                Register scratch) {
   1678   Label short_loop, len4, len8, len12, done, short_string;
   1679   DCHECK(source.is(esi));
   1680   DCHECK(destination.is(edi));
   1681   DCHECK(length.is(ecx));
   1682   cmp(length, Immediate(4));
   1683   j(below, &short_string, Label::kNear);
   1684 
   1685   // Because source is 4-byte aligned in our uses of this function,
   1686   // we keep source aligned for the rep_movs call by copying the odd bytes
   1687   // at the end of the ranges.
   1688   mov(scratch, Operand(source, length, times_1, -4));
   1689   mov(Operand(destination, length, times_1, -4), scratch);
   1690 
   1691   cmp(length, Immediate(8));
   1692   j(below_equal, &len4, Label::kNear);
   1693   cmp(length, Immediate(12));
   1694   j(below_equal, &len8, Label::kNear);
   1695   cmp(length, Immediate(16));
   1696   j(below_equal, &len12, Label::kNear);
   1697 
   1698   mov(scratch, ecx);
   1699   shr(ecx, 2);
   1700   rep_movs();
   1701   and_(scratch, Immediate(0x3));
   1702   add(destination, scratch);
   1703   jmp(&done, Label::kNear);
   1704 
   1705   bind(&len12);
   1706   mov(scratch, Operand(source, 8));
   1707   mov(Operand(destination, 8), scratch);
   1708   bind(&len8);
   1709   mov(scratch, Operand(source, 4));
   1710   mov(Operand(destination, 4), scratch);
   1711   bind(&len4);
   1712   mov(scratch, Operand(source, 0));
   1713   mov(Operand(destination, 0), scratch);
   1714   add(destination, length);
   1715   jmp(&done, Label::kNear);
   1716 
   1717   bind(&short_string);
   1718   test(length, length);
   1719   j(zero, &done, Label::kNear);
   1720 
   1721   bind(&short_loop);
   1722   mov_b(scratch, Operand(source, 0));
   1723   mov_b(Operand(destination, 0), scratch);
   1724   inc(source);
   1725   inc(destination);
   1726   dec(length);
   1727   j(not_zero, &short_loop);
   1728 
   1729   bind(&done);
   1730 }
   1731 
   1732 
   1733 void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
   1734                                                 Register end_address,
   1735                                                 Register filler) {
   1736   Label loop, entry;
   1737   jmp(&entry);
   1738   bind(&loop);
   1739   mov(Operand(current_address, 0), filler);
   1740   add(current_address, Immediate(kPointerSize));
   1741   bind(&entry);
   1742   cmp(current_address, end_address);
   1743   j(below, &loop);
   1744 }
   1745 
   1746 
   1747 void MacroAssembler::BooleanBitTest(Register object,
   1748                                     int field_offset,
   1749                                     int bit_index) {
   1750   bit_index += kSmiTagSize + kSmiShiftSize;
   1751   DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
   1752   int byte_index = bit_index / kBitsPerByte;
   1753   int byte_bit_index = bit_index & (kBitsPerByte - 1);
   1754   test_b(FieldOperand(object, field_offset + byte_index),
   1755          static_cast<byte>(1 << byte_bit_index));
   1756 }
   1757 
   1758 
   1759 
   1760 void MacroAssembler::NegativeZeroTest(Register result,
   1761                                       Register op,
   1762                                       Label* then_label) {
   1763   Label ok;
   1764   test(result, result);
   1765   j(not_zero, &ok);
   1766   test(op, op);
   1767   j(sign, then_label);
   1768   bind(&ok);
   1769 }
   1770 
   1771 
   1772 void MacroAssembler::NegativeZeroTest(Register result,
   1773                                       Register op1,
   1774                                       Register op2,
   1775                                       Register scratch,
   1776                                       Label* then_label) {
   1777   Label ok;
   1778   test(result, result);
   1779   j(not_zero, &ok);
   1780   mov(scratch, op1);
   1781   or_(scratch, op2);
   1782   j(sign, then_label);
   1783   bind(&ok);
   1784 }
   1785 
   1786 
   1787 void MacroAssembler::GetMapConstructor(Register result, Register map,
   1788                                        Register temp) {
   1789   Label done, loop;
   1790   mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
   1791   bind(&loop);
   1792   JumpIfSmi(result, &done, Label::kNear);
   1793   CmpObjectType(result, MAP_TYPE, temp);
   1794   j(not_equal, &done, Label::kNear);
   1795   mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
   1796   jmp(&loop);
   1797   bind(&done);
   1798 }
   1799 
   1800 
   1801 void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
   1802                                              Register scratch, Label* miss) {
   1803   // Get the prototype or initial map from the function.
   1804   mov(result,
   1805       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1806 
   1807   // If the prototype or initial map is the hole, don't return it and
   1808   // simply miss the cache instead. This will allow us to allocate a
   1809   // prototype object on-demand in the runtime system.
   1810   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
   1811   j(equal, miss);
   1812 
   1813   // If the function does not have an initial map, we're done.
   1814   Label done;
   1815   CmpObjectType(result, MAP_TYPE, scratch);
   1816   j(not_equal, &done, Label::kNear);
   1817 
   1818   // Get the prototype from the initial map.
   1819   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   1820 
   1821   // All done.
   1822   bind(&done);
   1823 }
   1824 
   1825 
   1826 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
   1827   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
   1828   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
   1829 }
   1830 
   1831 
   1832 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1833   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   1834 }
   1835 
   1836 
   1837 void MacroAssembler::StubReturn(int argc) {
   1838   DCHECK(argc >= 1 && generating_stub());
   1839   ret((argc - 1) * kPointerSize);
   1840 }
   1841 
   1842 
   1843 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
   1844   return has_frame_ || !stub->SometimesSetsUpAFrame();
   1845 }
   1846 
   1847 
   1848 void MacroAssembler::IndexFromHash(Register hash, Register index) {
   1849   // The assert checks that the constants for the maximum number of digits
   1850   // for an array index cached in the hash field and the number of bits
   1851   // reserved for it does not conflict.
   1852   DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
   1853          (1 << String::kArrayIndexValueBits));
   1854   if (!index.is(hash)) {
   1855     mov(index, hash);
   1856   }
   1857   DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
   1858 }
   1859 
   1860 
   1861 void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
   1862                                  SaveFPRegsMode save_doubles) {
   1863   // If the expected number of arguments of the runtime function is
   1864   // constant, we check that the actual number of arguments match the
   1865   // expectation.
   1866   CHECK(f->nargs < 0 || f->nargs == num_arguments);
   1867 
   1868   // TODO(1236192): Most runtime routines don't need the number of
   1869   // arguments passed in because it is constant. At some point we
   1870   // should remove this need and make the runtime routine entry code
   1871   // smarter.
   1872   Move(eax, Immediate(num_arguments));
   1873   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1874   CEntryStub ces(isolate(), 1, save_doubles);
   1875   CallStub(&ces);
   1876 }
   1877 
   1878 
   1879 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1880                                            int num_arguments) {
   1881   mov(eax, Immediate(num_arguments));
   1882   mov(ebx, Immediate(ref));
   1883 
   1884   CEntryStub stub(isolate(), 1);
   1885   CallStub(&stub);
   1886 }
   1887 
   1888 
   1889 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
   1890   // ----------- S t a t e -------------
   1891   //  -- esp[0]                 : return address
   1892   //  -- esp[8]                 : argument num_arguments - 1
   1893   //  ...
   1894   //  -- esp[8 * num_arguments] : argument 0 (receiver)
   1895   //
   1896   //  For runtime functions with variable arguments:
   1897   //  -- eax                    : number of  arguments
   1898   // -----------------------------------
   1899 
   1900   const Runtime::Function* function = Runtime::FunctionForId(fid);
   1901   DCHECK_EQ(1, function->result_size);
   1902   if (function->nargs >= 0) {
   1903     // TODO(1236192): Most runtime routines don't need the number of
   1904     // arguments passed in because it is constant. At some point we
   1905     // should remove this need and make the runtime routine entry code
   1906     // smarter.
   1907     mov(eax, Immediate(function->nargs));
   1908   }
   1909   JumpToExternalReference(ExternalReference(fid, isolate()));
   1910 }
   1911 
   1912 
   1913 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   1914   // Set the entry point and jump to the C entry runtime stub.
   1915   mov(ebx, Immediate(ext));
   1916   CEntryStub ces(isolate(), 1);
   1917   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   1918 }
   1919 
   1920 
   1921 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   1922                                     const ParameterCount& actual,
   1923                                     Label* done,
   1924                                     bool* definitely_mismatches,
   1925                                     InvokeFlag flag,
   1926                                     Label::Distance done_near,
   1927                                     const CallWrapper& call_wrapper) {
   1928   bool definitely_matches = false;
   1929   *definitely_mismatches = false;
   1930   Label invoke;
   1931   if (expected.is_immediate()) {
   1932     DCHECK(actual.is_immediate());
   1933     mov(eax, actual.immediate());
   1934     if (expected.immediate() == actual.immediate()) {
   1935       definitely_matches = true;
   1936     } else {
   1937       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   1938       if (expected.immediate() == sentinel) {
   1939         // Don't worry about adapting arguments for builtins that
   1940         // don't want that done. Skip adaption code by making it look
   1941         // like we have a match between expected and actual number of
   1942         // arguments.
   1943         definitely_matches = true;
   1944       } else {
   1945         *definitely_mismatches = true;
   1946         mov(ebx, expected.immediate());
   1947       }
   1948     }
   1949   } else {
   1950     if (actual.is_immediate()) {
   1951       // Expected is in register, actual is immediate. This is the
   1952       // case when we invoke function values without going through the
   1953       // IC mechanism.
   1954       mov(eax, actual.immediate());
   1955       cmp(expected.reg(), actual.immediate());
   1956       j(equal, &invoke);
   1957       DCHECK(expected.reg().is(ebx));
   1958     } else if (!expected.reg().is(actual.reg())) {
   1959       // Both expected and actual are in (different) registers. This
   1960       // is the case when we invoke functions using call and apply.
   1961       cmp(expected.reg(), actual.reg());
   1962       j(equal, &invoke);
   1963       DCHECK(actual.reg().is(eax));
   1964       DCHECK(expected.reg().is(ebx));
   1965     } else {
   1966       Move(eax, actual.reg());
   1967     }
   1968   }
   1969 
   1970   if (!definitely_matches) {
   1971     Handle<Code> adaptor =
   1972         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   1973     if (flag == CALL_FUNCTION) {
   1974       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
   1975       call(adaptor, RelocInfo::CODE_TARGET);
   1976       call_wrapper.AfterCall();
   1977       if (!*definitely_mismatches) {
   1978         jmp(done, done_near);
   1979       }
   1980     } else {
   1981       jmp(adaptor, RelocInfo::CODE_TARGET);
   1982     }
   1983     bind(&invoke);
   1984   }
   1985 }
   1986 
   1987 
   1988 void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
   1989                                              const ParameterCount& expected,
   1990                                              const ParameterCount& actual) {
   1991   Label skip_flooding;
   1992   ExternalReference step_in_enabled =
   1993       ExternalReference::debug_step_in_enabled_address(isolate());
   1994   cmpb(Operand::StaticVariable(step_in_enabled), 0);
   1995   j(equal, &skip_flooding);
   1996   {
   1997     FrameScope frame(this,
   1998                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
   1999     if (expected.is_reg()) {
   2000       SmiTag(expected.reg());
   2001       Push(expected.reg());
   2002     }
   2003     if (actual.is_reg()) {
   2004       SmiTag(actual.reg());
   2005       Push(actual.reg());
   2006     }
   2007     if (new_target.is_valid()) {
   2008       Push(new_target);
   2009     }
   2010     Push(fun);
   2011     Push(fun);
   2012     CallRuntime(Runtime::kDebugPrepareStepInIfStepping, 1);
   2013     Pop(fun);
   2014     if (new_target.is_valid()) {
   2015       Pop(new_target);
   2016     }
   2017     if (actual.is_reg()) {
   2018       Pop(actual.reg());
   2019       SmiUntag(actual.reg());
   2020     }
   2021     if (expected.is_reg()) {
   2022       Pop(expected.reg());
   2023       SmiUntag(expected.reg());
   2024     }
   2025   }
   2026   bind(&skip_flooding);
   2027 }
   2028 
   2029 
   2030 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
   2031                                         const ParameterCount& expected,
   2032                                         const ParameterCount& actual,
   2033                                         InvokeFlag flag,
   2034                                         const CallWrapper& call_wrapper) {
   2035   // You can't call a function without a valid frame.
   2036   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2037   DCHECK(function.is(edi));
   2038   DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
   2039 
   2040   if (call_wrapper.NeedsDebugStepCheck()) {
   2041     FloodFunctionIfStepping(function, new_target, expected, actual);
   2042   }
   2043 
   2044   // Clear the new.target register if not given.
   2045   if (!new_target.is_valid()) {
   2046     mov(edx, isolate()->factory()->undefined_value());
   2047   }
   2048 
   2049   Label done;
   2050   bool definitely_mismatches = false;
   2051   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
   2052                  Label::kNear, call_wrapper);
   2053   if (!definitely_mismatches) {
   2054     // We call indirectly through the code field in the function to
   2055     // allow recompilation to take effect without changing any of the
   2056     // call sites.
   2057     Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
   2058     if (flag == CALL_FUNCTION) {
   2059       call_wrapper.BeforeCall(CallSize(code));
   2060       call(code);
   2061       call_wrapper.AfterCall();
   2062     } else {
   2063       DCHECK(flag == JUMP_FUNCTION);
   2064       jmp(code);
   2065     }
   2066     bind(&done);
   2067   }
   2068 }
   2069 
   2070 
   2071 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
   2072                                     const ParameterCount& actual,
   2073                                     InvokeFlag flag,
   2074                                     const CallWrapper& call_wrapper) {
   2075   // You can't call a function without a valid frame.
   2076   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2077 
   2078   DCHECK(fun.is(edi));
   2079   mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2080   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2081   mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
   2082   SmiUntag(ebx);
   2083 
   2084   ParameterCount expected(ebx);
   2085   InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
   2086 }
   2087 
   2088 
   2089 void MacroAssembler::InvokeFunction(Register fun,
   2090                                     const ParameterCount& expected,
   2091                                     const ParameterCount& actual,
   2092                                     InvokeFlag flag,
   2093                                     const CallWrapper& call_wrapper) {
   2094   // You can't call a function without a valid frame.
   2095   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2096 
   2097   DCHECK(fun.is(edi));
   2098   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2099 
   2100   InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
   2101 }
   2102 
   2103 
   2104 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
   2105                                     const ParameterCount& expected,
   2106                                     const ParameterCount& actual,
   2107                                     InvokeFlag flag,
   2108                                     const CallWrapper& call_wrapper) {
   2109   LoadHeapObject(edi, function);
   2110   InvokeFunction(edi, expected, actual, flag, call_wrapper);
   2111 }
   2112 
   2113 
   2114 void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag,
   2115                                    const CallWrapper& call_wrapper) {
   2116   // You can't call a builtin without a valid frame.
   2117   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2118 
   2119   // Fake a parameter count to avoid emitting code to do the check.
   2120   ParameterCount expected(0);
   2121   GetBuiltinFunction(edi, native_context_index);
   2122   InvokeFunctionCode(edi, no_reg, expected, expected, flag, call_wrapper);
   2123 }
   2124 
   2125 
   2126 void MacroAssembler::GetBuiltinFunction(Register target,
   2127                                         int native_context_index) {
   2128   // Load the JavaScript builtin function from the builtins object.
   2129   mov(target, NativeContextOperand());
   2130   mov(target, ContextOperand(target, native_context_index));
   2131 }
   2132 
   2133 
   2134 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   2135   if (context_chain_length > 0) {
   2136     // Move up the chain of contexts to the context containing the slot.
   2137     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2138     for (int i = 1; i < context_chain_length; i++) {
   2139       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2140     }
   2141   } else {
   2142     // Slot is in the current function context.  Move it into the
   2143     // destination register in case we store into it (the write barrier
   2144     // cannot be allowed to destroy the context in esi).
   2145     mov(dst, esi);
   2146   }
   2147 
   2148   // We should not have found a with context by walking the context chain
   2149   // (i.e., the static scope chain and runtime context chain do not agree).
   2150   // A variable occurring in such a scope should have slot type LOOKUP and
   2151   // not CONTEXT.
   2152   if (emit_debug_code()) {
   2153     cmp(FieldOperand(dst, HeapObject::kMapOffset),
   2154         isolate()->factory()->with_context_map());
   2155     Check(not_equal, kVariableResolvedToWithContext);
   2156   }
   2157 }
   2158 
   2159 
   2160 void MacroAssembler::LoadGlobalProxy(Register dst) {
   2161   mov(dst, NativeContextOperand());
   2162   mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
   2163 }
   2164 
   2165 
   2166 void MacroAssembler::LoadTransitionedArrayMapConditional(
   2167     ElementsKind expected_kind,
   2168     ElementsKind transitioned_kind,
   2169     Register map_in_out,
   2170     Register scratch,
   2171     Label* no_map_match) {
   2172   DCHECK(IsFastElementsKind(expected_kind));
   2173   DCHECK(IsFastElementsKind(transitioned_kind));
   2174 
   2175   // Check that the function's map is the same as the expected cached map.
   2176   mov(scratch, NativeContextOperand());
   2177   cmp(map_in_out,
   2178       ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
   2179   j(not_equal, no_map_match);
   2180 
   2181   // Use the transitioned cached map.
   2182   mov(map_in_out,
   2183       ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
   2184 }
   2185 
   2186 
   2187 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2188   // Load the native context from the current context.
   2189   mov(function, NativeContextOperand());
   2190   // Load the function from the native context.
   2191   mov(function, ContextOperand(function, index));
   2192 }
   2193 
   2194 
   2195 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2196                                                   Register map) {
   2197   // Load the initial map.  The global functions all have initial maps.
   2198   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2199   if (emit_debug_code()) {
   2200     Label ok, fail;
   2201     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
   2202     jmp(&ok);
   2203     bind(&fail);
   2204     Abort(kGlobalFunctionsMustHaveInitialMap);
   2205     bind(&ok);
   2206   }
   2207 }
   2208 
   2209 
   2210 // Store the value in register src in the safepoint register stack
   2211 // slot for register dst.
   2212 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   2213   mov(SafepointRegisterSlot(dst), src);
   2214 }
   2215 
   2216 
   2217 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   2218   mov(SafepointRegisterSlot(dst), src);
   2219 }
   2220 
   2221 
   2222 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   2223   mov(dst, SafepointRegisterSlot(src));
   2224 }
   2225 
   2226 
   2227 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   2228   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   2229 }
   2230 
   2231 
   2232 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   2233   // The registers are pushed starting with the lowest encoding,
   2234   // which means that lowest encodings are furthest away from
   2235   // the stack pointer.
   2236   DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   2237   return kNumSafepointRegisters - reg_code - 1;
   2238 }
   2239 
   2240 
   2241 void MacroAssembler::LoadHeapObject(Register result,
   2242                                     Handle<HeapObject> object) {
   2243   AllowDeferredHandleDereference embedding_raw_address;
   2244   if (isolate()->heap()->InNewSpace(*object)) {
   2245     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2246     mov(result, Operand::ForCell(cell));
   2247   } else {
   2248     mov(result, object);
   2249   }
   2250 }
   2251 
   2252 
   2253 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
   2254   AllowDeferredHandleDereference using_raw_address;
   2255   if (isolate()->heap()->InNewSpace(*object)) {
   2256     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2257     cmp(reg, Operand::ForCell(cell));
   2258   } else {
   2259     cmp(reg, object);
   2260   }
   2261 }
   2262 
   2263 
   2264 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
   2265   AllowDeferredHandleDereference using_raw_address;
   2266   if (isolate()->heap()->InNewSpace(*object)) {
   2267     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2268     push(Operand::ForCell(cell));
   2269   } else {
   2270     Push(object);
   2271   }
   2272 }
   2273 
   2274 
   2275 void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
   2276                                   Register scratch) {
   2277   mov(scratch, cell);
   2278   cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
   2279 }
   2280 
   2281 
   2282 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
   2283   mov(value, cell);
   2284   mov(value, FieldOperand(value, WeakCell::kValueOffset));
   2285 }
   2286 
   2287 
   2288 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
   2289                                    Label* miss) {
   2290   GetWeakValue(value, cell);
   2291   JumpIfSmi(value, miss);
   2292 }
   2293 
   2294 
   2295 void MacroAssembler::Ret() {
   2296   ret(0);
   2297 }
   2298 
   2299 
   2300 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   2301   if (is_uint16(bytes_dropped)) {
   2302     ret(bytes_dropped);
   2303   } else {
   2304     pop(scratch);
   2305     add(esp, Immediate(bytes_dropped));
   2306     push(scratch);
   2307     ret(0);
   2308   }
   2309 }
   2310 
   2311 
   2312 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
   2313   // Turn off the stack depth check when serializer is enabled to reduce the
   2314   // code size.
   2315   if (serializer_enabled()) return;
   2316   // Make sure the floating point stack is either empty or has depth items.
   2317   DCHECK(depth <= 7);
   2318   // This is very expensive.
   2319   DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
   2320 
   2321   // The top-of-stack (tos) is 7 if there is one item pushed.
   2322   int tos = (8 - depth) % 8;
   2323   const int kTopMask = 0x3800;
   2324   push(eax);
   2325   fwait();
   2326   fnstsw_ax();
   2327   and_(eax, kTopMask);
   2328   shr(eax, 11);
   2329   cmp(eax, Immediate(tos));
   2330   Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
   2331   fnclex();
   2332   pop(eax);
   2333 }
   2334 
   2335 
   2336 void MacroAssembler::Drop(int stack_elements) {
   2337   if (stack_elements > 0) {
   2338     add(esp, Immediate(stack_elements * kPointerSize));
   2339   }
   2340 }
   2341 
   2342 
   2343 void MacroAssembler::Move(Register dst, Register src) {
   2344   if (!dst.is(src)) {
   2345     mov(dst, src);
   2346   }
   2347 }
   2348 
   2349 
   2350 void MacroAssembler::Move(Register dst, const Immediate& x) {
   2351   if (x.is_zero()) {
   2352     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
   2353   } else {
   2354     mov(dst, x);
   2355   }
   2356 }
   2357 
   2358 
   2359 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
   2360   mov(dst, x);
   2361 }
   2362 
   2363 
   2364 void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
   2365   // TODO(intel): Add support for LZCNT (with ABM/BMI1).
   2366   Label not_zero_src;
   2367   bsr(dst, src);
   2368   j(not_zero, &not_zero_src, Label::kNear);
   2369   Move(dst, Immediate(63));  // 63^31 == 32
   2370   bind(&not_zero_src);
   2371   xor_(dst, Immediate(31));  // for x in [0..31], 31^x == 31-x.
   2372 }
   2373 
   2374 
   2375 void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
   2376   // TODO(intel): Add support for TZCNT (with ABM/BMI1).
   2377   Label not_zero_src;
   2378   bsf(dst, src);
   2379   j(not_zero, &not_zero_src, Label::kNear);
   2380   Move(dst, Immediate(32));  // The result of tzcnt is 32 if src = 0.
   2381   bind(&not_zero_src);
   2382 }
   2383 
   2384 
   2385 void MacroAssembler::Popcnt(Register dst, const Operand& src) {
   2386   // TODO(intel): Add support for POPCNT (with POPCNT)
   2387   // if (CpuFeatures::IsSupported(POPCNT)) {
   2388   //   CpuFeatureScope scope(this, POPCNT);
   2389   //   popcnt(dst, src);
   2390   //   return;
   2391   // }
   2392   UNREACHABLE();
   2393 }
   2394 
   2395 
   2396 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2397   if (FLAG_native_code_counters && counter->Enabled()) {
   2398     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   2399   }
   2400 }
   2401 
   2402 
   2403 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2404   DCHECK(value > 0);
   2405   if (FLAG_native_code_counters && counter->Enabled()) {
   2406     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2407     if (value == 1) {
   2408       inc(operand);
   2409     } else {
   2410       add(operand, Immediate(value));
   2411     }
   2412   }
   2413 }
   2414 
   2415 
   2416 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2417   DCHECK(value > 0);
   2418   if (FLAG_native_code_counters && counter->Enabled()) {
   2419     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2420     if (value == 1) {
   2421       dec(operand);
   2422     } else {
   2423       sub(operand, Immediate(value));
   2424     }
   2425   }
   2426 }
   2427 
   2428 
   2429 void MacroAssembler::IncrementCounter(Condition cc,
   2430                                       StatsCounter* counter,
   2431                                       int value) {
   2432   DCHECK(value > 0);
   2433   if (FLAG_native_code_counters && counter->Enabled()) {
   2434     Label skip;
   2435     j(NegateCondition(cc), &skip);
   2436     pushfd();
   2437     IncrementCounter(counter, value);
   2438     popfd();
   2439     bind(&skip);
   2440   }
   2441 }
   2442 
   2443 
   2444 void MacroAssembler::DecrementCounter(Condition cc,
   2445                                       StatsCounter* counter,
   2446                                       int value) {
   2447   DCHECK(value > 0);
   2448   if (FLAG_native_code_counters && counter->Enabled()) {
   2449     Label skip;
   2450     j(NegateCondition(cc), &skip);
   2451     pushfd();
   2452     DecrementCounter(counter, value);
   2453     popfd();
   2454     bind(&skip);
   2455   }
   2456 }
   2457 
   2458 
   2459 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
   2460   if (emit_debug_code()) Check(cc, reason);
   2461 }
   2462 
   2463 
   2464 void MacroAssembler::AssertFastElements(Register elements) {
   2465   if (emit_debug_code()) {
   2466     Factory* factory = isolate()->factory();
   2467     Label ok;
   2468     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2469         Immediate(factory->fixed_array_map()));
   2470     j(equal, &ok);
   2471     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2472         Immediate(factory->fixed_double_array_map()));
   2473     j(equal, &ok);
   2474     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2475         Immediate(factory->fixed_cow_array_map()));
   2476     j(equal, &ok);
   2477     Abort(kJSObjectWithFastElementsMapHasSlowElements);
   2478     bind(&ok);
   2479   }
   2480 }
   2481 
   2482 
   2483 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
   2484   Label L;
   2485   j(cc, &L);
   2486   Abort(reason);
   2487   // will not return here
   2488   bind(&L);
   2489 }
   2490 
   2491 
   2492 void MacroAssembler::CheckStackAlignment() {
   2493   int frame_alignment = base::OS::ActivationFrameAlignment();
   2494   int frame_alignment_mask = frame_alignment - 1;
   2495   if (frame_alignment > kPointerSize) {
   2496     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
   2497     Label alignment_as_expected;
   2498     test(esp, Immediate(frame_alignment_mask));
   2499     j(zero, &alignment_as_expected);
   2500     // Abort if stack is not aligned.
   2501     int3();
   2502     bind(&alignment_as_expected);
   2503   }
   2504 }
   2505 
   2506 
   2507 void MacroAssembler::Abort(BailoutReason reason) {
   2508 #ifdef DEBUG
   2509   const char* msg = GetBailoutReason(reason);
   2510   if (msg != NULL) {
   2511     RecordComment("Abort message: ");
   2512     RecordComment(msg);
   2513   }
   2514 
   2515   if (FLAG_trap_on_abort) {
   2516     int3();
   2517     return;
   2518   }
   2519 #endif
   2520 
   2521   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
   2522   // Disable stub call restrictions to always allow calls to abort.
   2523   if (!has_frame_) {
   2524     // We don't actually want to generate a pile of code for this, so just
   2525     // claim there is a stack frame, without generating one.
   2526     FrameScope scope(this, StackFrame::NONE);
   2527     CallRuntime(Runtime::kAbort, 1);
   2528   } else {
   2529     CallRuntime(Runtime::kAbort, 1);
   2530   }
   2531   // will not return here
   2532   int3();
   2533 }
   2534 
   2535 
   2536 void MacroAssembler::LoadInstanceDescriptors(Register map,
   2537                                              Register descriptors) {
   2538   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
   2539 }
   2540 
   2541 
   2542 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
   2543   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2544   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
   2545 }
   2546 
   2547 
   2548 void MacroAssembler::LoadAccessor(Register dst, Register holder,
   2549                                   int accessor_index,
   2550                                   AccessorComponent accessor) {
   2551   mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
   2552   LoadInstanceDescriptors(dst, dst);
   2553   mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
   2554   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
   2555                                            : AccessorPair::kSetterOffset;
   2556   mov(dst, FieldOperand(dst, offset));
   2557 }
   2558 
   2559 
   2560 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
   2561     Register instance_type, Register scratch, Label* failure) {
   2562   if (!scratch.is(instance_type)) {
   2563     mov(scratch, instance_type);
   2564   }
   2565   and_(scratch,
   2566        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   2567   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
   2568   j(not_equal, failure);
   2569 }
   2570 
   2571 
   2572 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
   2573                                                            Register object2,
   2574                                                            Register scratch1,
   2575                                                            Register scratch2,
   2576                                                            Label* failure) {
   2577   // Check that both objects are not smis.
   2578   STATIC_ASSERT(kSmiTag == 0);
   2579   mov(scratch1, object1);
   2580   and_(scratch1, object2);
   2581   JumpIfSmi(scratch1, failure);
   2582 
   2583   // Load instance type for both strings.
   2584   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   2585   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   2586   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   2587   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   2588 
   2589   // Check that both are flat one-byte strings.
   2590   const int kFlatOneByteStringMask =
   2591       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   2592   const int kFlatOneByteStringTag =
   2593       kStringTag | kOneByteStringTag | kSeqStringTag;
   2594   // Interleave bits from both instance types and compare them in one check.
   2595   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
   2596   and_(scratch1, kFlatOneByteStringMask);
   2597   and_(scratch2, kFlatOneByteStringMask);
   2598   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   2599   cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
   2600   j(not_equal, failure);
   2601 }
   2602 
   2603 
   2604 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
   2605                                                      Label* not_unique_name,
   2606                                                      Label::Distance distance) {
   2607   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2608   Label succeed;
   2609   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2610   j(zero, &succeed);
   2611   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
   2612   j(not_equal, not_unique_name, distance);
   2613 
   2614   bind(&succeed);
   2615 }
   2616 
   2617 
   2618 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
   2619                                                Register index,
   2620                                                Register value,
   2621                                                uint32_t encoding_mask) {
   2622   Label is_object;
   2623   JumpIfNotSmi(string, &is_object, Label::kNear);
   2624   Abort(kNonObject);
   2625   bind(&is_object);
   2626 
   2627   push(value);
   2628   mov(value, FieldOperand(string, HeapObject::kMapOffset));
   2629   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
   2630 
   2631   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
   2632   cmp(value, Immediate(encoding_mask));
   2633   pop(value);
   2634   Check(equal, kUnexpectedStringType);
   2635 
   2636   // The index is assumed to be untagged coming in, tag it to compare with the
   2637   // string length without using a temp register, it is restored at the end of
   2638   // this function.
   2639   SmiTag(index);
   2640   Check(no_overflow, kIndexIsTooLarge);
   2641 
   2642   cmp(index, FieldOperand(string, String::kLengthOffset));
   2643   Check(less, kIndexIsTooLarge);
   2644 
   2645   cmp(index, Immediate(Smi::FromInt(0)));
   2646   Check(greater_equal, kIndexIsNegative);
   2647 
   2648   // Restore the index
   2649   SmiUntag(index);
   2650 }
   2651 
   2652 
   2653 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2654   int frame_alignment = base::OS::ActivationFrameAlignment();
   2655   if (frame_alignment != 0) {
   2656     // Make stack end at alignment and make room for num_arguments words
   2657     // and the original value of esp.
   2658     mov(scratch, esp);
   2659     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
   2660     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
   2661     and_(esp, -frame_alignment);
   2662     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2663   } else {
   2664     sub(esp, Immediate(num_arguments * kPointerSize));
   2665   }
   2666 }
   2667 
   2668 
   2669 void MacroAssembler::CallCFunction(ExternalReference function,
   2670                                    int num_arguments) {
   2671   // Trashing eax is ok as it will be the return value.
   2672   mov(eax, Immediate(function));
   2673   CallCFunction(eax, num_arguments);
   2674 }
   2675 
   2676 
   2677 void MacroAssembler::CallCFunction(Register function,
   2678                                    int num_arguments) {
   2679   DCHECK(has_frame());
   2680   // Check stack alignment.
   2681   if (emit_debug_code()) {
   2682     CheckStackAlignment();
   2683   }
   2684 
   2685   call(function);
   2686   if (base::OS::ActivationFrameAlignment() != 0) {
   2687     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2688   } else {
   2689     add(esp, Immediate(num_arguments * kPointerSize));
   2690   }
   2691 }
   2692 
   2693 
   2694 #ifdef DEBUG
   2695 bool AreAliased(Register reg1,
   2696                 Register reg2,
   2697                 Register reg3,
   2698                 Register reg4,
   2699                 Register reg5,
   2700                 Register reg6,
   2701                 Register reg7,
   2702                 Register reg8) {
   2703   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
   2704       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
   2705       reg7.is_valid() + reg8.is_valid();
   2706 
   2707   RegList regs = 0;
   2708   if (reg1.is_valid()) regs |= reg1.bit();
   2709   if (reg2.is_valid()) regs |= reg2.bit();
   2710   if (reg3.is_valid()) regs |= reg3.bit();
   2711   if (reg4.is_valid()) regs |= reg4.bit();
   2712   if (reg5.is_valid()) regs |= reg5.bit();
   2713   if (reg6.is_valid()) regs |= reg6.bit();
   2714   if (reg7.is_valid()) regs |= reg7.bit();
   2715   if (reg8.is_valid()) regs |= reg8.bit();
   2716   int n_of_non_aliasing_regs = NumRegs(regs);
   2717 
   2718   return n_of_valid_regs != n_of_non_aliasing_regs;
   2719 }
   2720 #endif
   2721 
   2722 
   2723 CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
   2724     : address_(address),
   2725       size_(size),
   2726       masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
   2727   // Create a new macro assembler pointing to the address of the code to patch.
   2728   // The size is adjusted with kGap on order for the assembler to generate size
   2729   // bytes of instructions without failing with buffer size constraints.
   2730   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2731 }
   2732 
   2733 
   2734 CodePatcher::~CodePatcher() {
   2735   // Indicate that code has changed.
   2736   Assembler::FlushICache(masm_.isolate(), address_, size_);
   2737 
   2738   // Check that the code was patched as expected.
   2739   DCHECK(masm_.pc_ == address_ + size_);
   2740   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2741 }
   2742 
   2743 
   2744 void MacroAssembler::CheckPageFlag(
   2745     Register object,
   2746     Register scratch,
   2747     int mask,
   2748     Condition cc,
   2749     Label* condition_met,
   2750     Label::Distance condition_met_distance) {
   2751   DCHECK(cc == zero || cc == not_zero);
   2752   if (scratch.is(object)) {
   2753     and_(scratch, Immediate(~Page::kPageAlignmentMask));
   2754   } else {
   2755     mov(scratch, Immediate(~Page::kPageAlignmentMask));
   2756     and_(scratch, object);
   2757   }
   2758   if (mask < (1 << kBitsPerByte)) {
   2759     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
   2760            static_cast<uint8_t>(mask));
   2761   } else {
   2762     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   2763   }
   2764   j(cc, condition_met, condition_met_distance);
   2765 }
   2766 
   2767 
   2768 void MacroAssembler::CheckPageFlagForMap(
   2769     Handle<Map> map,
   2770     int mask,
   2771     Condition cc,
   2772     Label* condition_met,
   2773     Label::Distance condition_met_distance) {
   2774   DCHECK(cc == zero || cc == not_zero);
   2775   Page* page = Page::FromAddress(map->address());
   2776   DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
   2777   ExternalReference reference(ExternalReference::page_flags(page));
   2778   // The inlined static address check of the page's flags relies
   2779   // on maps never being compacted.
   2780   DCHECK(!isolate()->heap()->mark_compact_collector()->
   2781          IsOnEvacuationCandidate(*map));
   2782   if (mask < (1 << kBitsPerByte)) {
   2783     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
   2784   } else {
   2785     test(Operand::StaticVariable(reference), Immediate(mask));
   2786   }
   2787   j(cc, condition_met, condition_met_distance);
   2788 }
   2789 
   2790 
   2791 void MacroAssembler::JumpIfBlack(Register object,
   2792                                  Register scratch0,
   2793                                  Register scratch1,
   2794                                  Label* on_black,
   2795                                  Label::Distance on_black_near) {
   2796   HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
   2797            1);  // kBlackBitPattern.
   2798   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
   2799 }
   2800 
   2801 
   2802 void MacroAssembler::HasColor(Register object,
   2803                               Register bitmap_scratch,
   2804                               Register mask_scratch,
   2805                               Label* has_color,
   2806                               Label::Distance has_color_distance,
   2807                               int first_bit,
   2808                               int second_bit) {
   2809   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
   2810 
   2811   GetMarkBits(object, bitmap_scratch, mask_scratch);
   2812 
   2813   Label other_color, word_boundary;
   2814   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2815   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
   2816   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
   2817   j(zero, &word_boundary, Label::kNear);
   2818   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2819   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   2820   jmp(&other_color, Label::kNear);
   2821 
   2822   bind(&word_boundary);
   2823   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
   2824 
   2825   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   2826   bind(&other_color);
   2827 }
   2828 
   2829 
   2830 void MacroAssembler::GetMarkBits(Register addr_reg,
   2831                                  Register bitmap_reg,
   2832                                  Register mask_reg) {
   2833   DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
   2834   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
   2835   and_(bitmap_reg, addr_reg);
   2836   mov(ecx, addr_reg);
   2837   int shift =
   2838       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
   2839   shr(ecx, shift);
   2840   and_(ecx,
   2841        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
   2842 
   2843   add(bitmap_reg, ecx);
   2844   mov(ecx, addr_reg);
   2845   shr(ecx, kPointerSizeLog2);
   2846   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
   2847   mov(mask_reg, Immediate(1));
   2848   shl_cl(mask_reg);
   2849 }
   2850 
   2851 
   2852 void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
   2853                                  Register mask_scratch, Label* value_is_white,
   2854                                  Label::Distance distance) {
   2855   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
   2856   GetMarkBits(value, bitmap_scratch, mask_scratch);
   2857 
   2858   // If the value is black or grey we don't need to do anything.
   2859   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
   2860   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
   2861   DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
   2862   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
   2863 
   2864   // Since both black and grey have a 1 in the first position and white does
   2865   // not have a 1 there we only need to check one bit.
   2866   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2867   j(zero, value_is_white, Label::kNear);
   2868 }
   2869 
   2870 
   2871 void MacroAssembler::EnumLength(Register dst, Register map) {
   2872   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
   2873   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2874   and_(dst, Immediate(Map::EnumLengthBits::kMask));
   2875   SmiTag(dst);
   2876 }
   2877 
   2878 
   2879 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
   2880   Label next, start;
   2881   mov(ecx, eax);
   2882 
   2883   // Check if the enum length field is properly initialized, indicating that
   2884   // there is an enum cache.
   2885   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   2886 
   2887   EnumLength(edx, ebx);
   2888   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
   2889   j(equal, call_runtime);
   2890 
   2891   jmp(&start);
   2892 
   2893   bind(&next);
   2894   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   2895 
   2896   // For all objects but the receiver, check that the cache is empty.
   2897   EnumLength(edx, ebx);
   2898   cmp(edx, Immediate(Smi::FromInt(0)));
   2899   j(not_equal, call_runtime);
   2900 
   2901   bind(&start);
   2902 
   2903   // Check that there are no elements. Register rcx contains the current JS
   2904   // object we've reached through the prototype chain.
   2905   Label no_elements;
   2906   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
   2907   cmp(ecx, isolate()->factory()->empty_fixed_array());
   2908   j(equal, &no_elements);
   2909 
   2910   // Second chance, the object may be using the empty slow element dictionary.
   2911   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
   2912   j(not_equal, call_runtime);
   2913 
   2914   bind(&no_elements);
   2915   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   2916   cmp(ecx, isolate()->factory()->null_value());
   2917   j(not_equal, &next);
   2918 }
   2919 
   2920 
   2921 void MacroAssembler::TestJSArrayForAllocationMemento(
   2922     Register receiver_reg,
   2923     Register scratch_reg,
   2924     Label* no_memento_found) {
   2925   ExternalReference new_space_start =
   2926       ExternalReference::new_space_start(isolate());
   2927   ExternalReference new_space_allocation_top =
   2928       ExternalReference::new_space_allocation_top_address(isolate());
   2929 
   2930   lea(scratch_reg, Operand(receiver_reg,
   2931       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
   2932   cmp(scratch_reg, Immediate(new_space_start));
   2933   j(less, no_memento_found);
   2934   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   2935   j(greater, no_memento_found);
   2936   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
   2937       Immediate(isolate()->factory()->allocation_memento_map()));
   2938 }
   2939 
   2940 
   2941 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
   2942     Register object,
   2943     Register scratch0,
   2944     Register scratch1,
   2945     Label* found) {
   2946   DCHECK(!scratch1.is(scratch0));
   2947   Factory* factory = isolate()->factory();
   2948   Register current = scratch0;
   2949   Label loop_again, end;
   2950 
   2951   // scratch contained elements pointer.
   2952   mov(current, object);
   2953   mov(current, FieldOperand(current, HeapObject::kMapOffset));
   2954   mov(current, FieldOperand(current, Map::kPrototypeOffset));
   2955   cmp(current, Immediate(factory->null_value()));
   2956   j(equal, &end);
   2957 
   2958   // Loop based on the map going up the prototype chain.
   2959   bind(&loop_again);
   2960   mov(current, FieldOperand(current, HeapObject::kMapOffset));
   2961   STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
   2962   STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
   2963   CmpInstanceType(current, JS_OBJECT_TYPE);
   2964   j(below, found);
   2965   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
   2966   DecodeField<Map::ElementsKindBits>(scratch1);
   2967   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
   2968   j(equal, found);
   2969   mov(current, FieldOperand(current, Map::kPrototypeOffset));
   2970   cmp(current, Immediate(factory->null_value()));
   2971   j(not_equal, &loop_again);
   2972 
   2973   bind(&end);
   2974 }
   2975 
   2976 
   2977 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
   2978   DCHECK(!dividend.is(eax));
   2979   DCHECK(!dividend.is(edx));
   2980   base::MagicNumbersForDivision<uint32_t> mag =
   2981       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
   2982   mov(eax, Immediate(mag.multiplier));
   2983   imul(dividend);
   2984   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
   2985   if (divisor > 0 && neg) add(edx, dividend);
   2986   if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
   2987   if (mag.shift > 0) sar(edx, mag.shift);
   2988   mov(eax, dividend);
   2989   shr(eax, 31);
   2990   add(edx, eax);
   2991 }
   2992 
   2993 
   2994 }  // namespace internal
   2995 }  // namespace v8
   2996 
   2997 #endif  // V8_TARGET_ARCH_X87
   2998