Home | History | Annotate | Download | only in x87
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X87
      6 
      7 #include "src/base/bits.h"
      8 #include "src/base/division-by-constant.h"
      9 #include "src/bootstrapper.h"
     10 #include "src/codegen.h"
     11 #include "src/debug/debug.h"
     12 #include "src/runtime/runtime.h"
     13 #include "src/x87/frames-x87.h"
     14 #include "src/x87/macro-assembler-x87.h"
     15 
     16 namespace v8 {
     17 namespace internal {
     18 
     19 // -------------------------------------------------------------------------
     20 // MacroAssembler implementation.
     21 
     22 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
     23                                CodeObjectRequired create_code_object)
     24     : Assembler(arg_isolate, buffer, size),
     25       generating_stub_(false),
     26       has_frame_(false) {
     27   if (create_code_object == CodeObjectRequired::kYes) {
     28     code_object_ =
     29         Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
     30   }
     31 }
     32 
     33 
     34 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
     35   DCHECK(!r.IsDouble());
     36   if (r.IsInteger8()) {
     37     movsx_b(dst, src);
     38   } else if (r.IsUInteger8()) {
     39     movzx_b(dst, src);
     40   } else if (r.IsInteger16()) {
     41     movsx_w(dst, src);
     42   } else if (r.IsUInteger16()) {
     43     movzx_w(dst, src);
     44   } else {
     45     mov(dst, src);
     46   }
     47 }
     48 
     49 
     50 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
     51   DCHECK(!r.IsDouble());
     52   if (r.IsInteger8() || r.IsUInteger8()) {
     53     mov_b(dst, src);
     54   } else if (r.IsInteger16() || r.IsUInteger16()) {
     55     mov_w(dst, src);
     56   } else {
     57     if (r.IsHeapObject()) {
     58       AssertNotSmi(src);
     59     } else if (r.IsSmi()) {
     60       AssertSmi(src);
     61     }
     62     mov(dst, src);
     63   }
     64 }
     65 
     66 
     67 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     68   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
     69     mov(destination, isolate()->heap()->root_handle(index));
     70     return;
     71   }
     72   ExternalReference roots_array_start =
     73       ExternalReference::roots_array_start(isolate());
     74   mov(destination, Immediate(index));
     75   mov(destination, Operand::StaticArray(destination,
     76                                         times_pointer_size,
     77                                         roots_array_start));
     78 }
     79 
     80 
     81 void MacroAssembler::StoreRoot(Register source,
     82                                Register scratch,
     83                                Heap::RootListIndex index) {
     84   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
     85   ExternalReference roots_array_start =
     86       ExternalReference::roots_array_start(isolate());
     87   mov(scratch, Immediate(index));
     88   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
     89       source);
     90 }
     91 
     92 
     93 void MacroAssembler::CompareRoot(Register with,
     94                                  Register scratch,
     95                                  Heap::RootListIndex index) {
     96   ExternalReference roots_array_start =
     97       ExternalReference::roots_array_start(isolate());
     98   mov(scratch, Immediate(index));
     99   cmp(with, Operand::StaticArray(scratch,
    100                                 times_pointer_size,
    101                                 roots_array_start));
    102 }
    103 
    104 
    105 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
    106   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    107   cmp(with, isolate()->heap()->root_handle(index));
    108 }
    109 
    110 
    111 void MacroAssembler::CompareRoot(const Operand& with,
    112                                  Heap::RootListIndex index) {
    113   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    114   cmp(with, isolate()->heap()->root_handle(index));
    115 }
    116 
    117 
    118 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
    119   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    120   Push(isolate()->heap()->root_handle(index));
    121 }
    122 
    123 #define REG(Name) \
    124   { Register::kCode_##Name }
    125 
    126 static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
    127 
    128 #undef REG
    129 
    130 static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
    131 
    132 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
    133                                      Register exclusion1, Register exclusion2,
    134                                      Register exclusion3) {
    135   // We don't allow a GC during a store buffer overflow so there is no need to
    136   // store the registers in any particular way, but we do have to store and
    137   // restore them.
    138   for (int i = 0; i < kNumberOfSavedRegs; i++) {
    139     Register reg = saved_regs[i];
    140     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
    141       push(reg);
    142     }
    143   }
    144   if (fp_mode == kSaveFPRegs) {
    145     // Save FPU state in m108byte.
    146     sub(esp, Immediate(108));
    147     fnsave(Operand(esp, 0));
    148   }
    149 }
    150 
    151 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
    152                                     Register exclusion2, Register exclusion3) {
    153   if (fp_mode == kSaveFPRegs) {
    154     // Restore FPU state in m108byte.
    155     frstor(Operand(esp, 0));
    156     add(esp, Immediate(108));
    157   }
    158 
    159   for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
    160     Register reg = saved_regs[i];
    161     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
    162       pop(reg);
    163     }
    164   }
    165 }
    166 
    167 void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
    168                                 Label* condition_met,
    169                                 Label::Distance distance) {
    170   CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc,
    171                 condition_met, distance);
    172 }
    173 
    174 
    175 void MacroAssembler::RememberedSetHelper(
    176     Register object,  // Only used for debug checks.
    177     Register addr, Register scratch, SaveFPRegsMode save_fp,
    178     MacroAssembler::RememberedSetFinalAction and_then) {
    179   Label done;
    180   if (emit_debug_code()) {
    181     Label ok;
    182     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
    183     int3();
    184     bind(&ok);
    185   }
    186   // Load store buffer top.
    187   ExternalReference store_buffer =
    188       ExternalReference::store_buffer_top(isolate());
    189   mov(scratch, Operand::StaticVariable(store_buffer));
    190   // Store pointer to buffer.
    191   mov(Operand(scratch, 0), addr);
    192   // Increment buffer top.
    193   add(scratch, Immediate(kPointerSize));
    194   // Write back new top of buffer.
    195   mov(Operand::StaticVariable(store_buffer), scratch);
    196   // Call stub on end of buffer.
    197   // Check for end of buffer.
    198   test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
    199   if (and_then == kReturnAtEnd) {
    200     Label buffer_overflowed;
    201     j(equal, &buffer_overflowed, Label::kNear);
    202     ret(0);
    203     bind(&buffer_overflowed);
    204   } else {
    205     DCHECK(and_then == kFallThroughAtEnd);
    206     j(not_equal, &done, Label::kNear);
    207   }
    208   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
    209   CallStub(&store_buffer_overflow);
    210   if (and_then == kReturnAtEnd) {
    211     ret(0);
    212   } else {
    213     DCHECK(and_then == kFallThroughAtEnd);
    214     bind(&done);
    215   }
    216 }
    217 
    218 
    219 void MacroAssembler::ClampTOSToUint8(Register result_reg) {
    220   Label done, conv_failure;
    221   sub(esp, Immediate(kPointerSize));
    222   fnclex();
    223   fist_s(Operand(esp, 0));
    224   pop(result_reg);
    225   X87CheckIA();
    226   j(equal, &conv_failure, Label::kNear);
    227   test(result_reg, Immediate(0xFFFFFF00));
    228   j(zero, &done, Label::kNear);
    229   setcc(sign, result_reg);
    230   sub(result_reg, Immediate(1));
    231   and_(result_reg, Immediate(255));
    232   jmp(&done, Label::kNear);
    233   bind(&conv_failure);
    234   fnclex();
    235   fldz();
    236   fld(1);
    237   FCmp();
    238   setcc(below, result_reg);  // 1 if negative, 0 if positive.
    239   dec_b(result_reg);         // 0 if negative, 255 if positive.
    240   bind(&done);
    241 }
    242 
    243 
    244 void MacroAssembler::ClampUint8(Register reg) {
    245   Label done;
    246   test(reg, Immediate(0xFFFFFF00));
    247   j(zero, &done, Label::kNear);
    248   setcc(negative, reg);  // 1 if negative, 0 if positive.
    249   dec_b(reg);  // 0 if negative, 255 if positive.
    250   bind(&done);
    251 }
    252 
    253 
    254 void MacroAssembler::SlowTruncateToI(Register result_reg,
    255                                      Register input_reg,
    256                                      int offset) {
    257   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
    258   call(stub.GetCode(), RelocInfo::CODE_TARGET);
    259 }
    260 
    261 
    262 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
    263   sub(esp, Immediate(kDoubleSize));
    264   fst_d(MemOperand(esp, 0));
    265   SlowTruncateToI(result_reg, esp, 0);
    266   add(esp, Immediate(kDoubleSize));
    267 }
    268 
    269 
    270 void MacroAssembler::X87TOSToI(Register result_reg,
    271                                MinusZeroMode minus_zero_mode,
    272                                Label* lost_precision, Label* is_nan,
    273                                Label* minus_zero, Label::Distance dst) {
    274   Label done;
    275   sub(esp, Immediate(kPointerSize));
    276   fld(0);
    277   fist_s(MemOperand(esp, 0));
    278   fild_s(MemOperand(esp, 0));
    279   pop(result_reg);
    280   FCmp();
    281   j(not_equal, lost_precision, dst);
    282   j(parity_even, is_nan, dst);
    283   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    284     test(result_reg, Operand(result_reg));
    285     j(not_zero, &done, Label::kNear);
    286     // To check for minus zero, we load the value again as float, and check
    287     // if that is still 0.
    288     sub(esp, Immediate(kPointerSize));
    289     fst_s(MemOperand(esp, 0));
    290     pop(result_reg);
    291     test(result_reg, Operand(result_reg));
    292     j(not_zero, minus_zero, dst);
    293   }
    294   bind(&done);
    295 }
    296 
    297 
    298 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
    299                                            Register input_reg) {
    300   Label done, slow_case;
    301 
    302   SlowTruncateToI(result_reg, input_reg);
    303   bind(&done);
    304 }
    305 
    306 
    307 void MacroAssembler::LoadUint32NoSSE2(const Operand& src) {
    308   Label done;
    309   push(src);
    310   fild_s(Operand(esp, 0));
    311   cmp(src, Immediate(0));
    312   j(not_sign, &done, Label::kNear);
    313   ExternalReference uint32_bias =
    314         ExternalReference::address_of_uint32_bias();
    315   fld_d(Operand::StaticVariable(uint32_bias));
    316   faddp(1);
    317   bind(&done);
    318   add(esp, Immediate(kPointerSize));
    319 }
    320 
    321 
    322 void MacroAssembler::RecordWriteArray(
    323     Register object, Register value, Register index, SaveFPRegsMode save_fp,
    324     RememberedSetAction remembered_set_action, SmiCheck smi_check,
    325     PointersToHereCheck pointers_to_here_check_for_value) {
    326   // First, check if a write barrier is even needed. The tests below
    327   // catch stores of Smis.
    328   Label done;
    329 
    330   // Skip barrier if writing a smi.
    331   if (smi_check == INLINE_SMI_CHECK) {
    332     DCHECK_EQ(0, kSmiTag);
    333     test(value, Immediate(kSmiTagMask));
    334     j(zero, &done);
    335   }
    336 
    337   // Array access: calculate the destination address in the same manner as
    338   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    339   // into an array of words.
    340   Register dst = index;
    341   lea(dst, Operand(object, index, times_half_pointer_size,
    342                    FixedArray::kHeaderSize - kHeapObjectTag));
    343 
    344   RecordWrite(object, dst, value, save_fp, remembered_set_action,
    345               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
    346 
    347   bind(&done);
    348 
    349   // Clobber clobbered input registers when running with the debug-code flag
    350   // turned on to provoke errors.
    351   if (emit_debug_code()) {
    352     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    353     mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
    354   }
    355 }
    356 
    357 
    358 void MacroAssembler::RecordWriteField(
    359     Register object, int offset, Register value, Register dst,
    360     SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
    361     SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
    362   // First, check if a write barrier is even needed. The tests below
    363   // catch stores of Smis.
    364   Label done;
    365 
    366   // Skip barrier if writing a smi.
    367   if (smi_check == INLINE_SMI_CHECK) {
    368     JumpIfSmi(value, &done, Label::kNear);
    369   }
    370 
    371   // Although the object register is tagged, the offset is relative to the start
    372   // of the object, so so offset must be a multiple of kPointerSize.
    373   DCHECK(IsAligned(offset, kPointerSize));
    374 
    375   lea(dst, FieldOperand(object, offset));
    376   if (emit_debug_code()) {
    377     Label ok;
    378     test_b(dst, Immediate((1 << kPointerSizeLog2) - 1));
    379     j(zero, &ok, Label::kNear);
    380     int3();
    381     bind(&ok);
    382   }
    383 
    384   RecordWrite(object, dst, value, save_fp, remembered_set_action,
    385               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
    386 
    387   bind(&done);
    388 
    389   // Clobber clobbered input registers when running with the debug-code flag
    390   // turned on to provoke errors.
    391   if (emit_debug_code()) {
    392     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    393     mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
    394   }
    395 }
    396 
    397 
    398 void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
    399                                        Register scratch1, Register scratch2,
    400                                        SaveFPRegsMode save_fp) {
    401   Label done;
    402 
    403   Register address = scratch1;
    404   Register value = scratch2;
    405   if (emit_debug_code()) {
    406     Label ok;
    407     lea(address, FieldOperand(object, HeapObject::kMapOffset));
    408     test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
    409     j(zero, &ok, Label::kNear);
    410     int3();
    411     bind(&ok);
    412   }
    413 
    414   DCHECK(!object.is(value));
    415   DCHECK(!object.is(address));
    416   DCHECK(!value.is(address));
    417   AssertNotSmi(object);
    418 
    419   if (!FLAG_incremental_marking) {
    420     return;
    421   }
    422 
    423   // Compute the address.
    424   lea(address, FieldOperand(object, HeapObject::kMapOffset));
    425 
    426   // A single check of the map's pages interesting flag suffices, since it is
    427   // only set during incremental collection, and then it's also guaranteed that
    428   // the from object's page's interesting flag is also set.  This optimization
    429   // relies on the fact that maps can never be in new space.
    430   DCHECK(!isolate()->heap()->InNewSpace(*map));
    431   CheckPageFlagForMap(map,
    432                       MemoryChunk::kPointersToHereAreInterestingMask,
    433                       zero,
    434                       &done,
    435                       Label::kNear);
    436 
    437   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
    438                        save_fp);
    439   CallStub(&stub);
    440 
    441   bind(&done);
    442 
    443   // Count number of write barriers in generated code.
    444   isolate()->counters()->write_barriers_static()->Increment();
    445   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    446 
    447   // Clobber clobbered input registers when running with the debug-code flag
    448   // turned on to provoke errors.
    449   if (emit_debug_code()) {
    450     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    451     mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
    452     mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
    453   }
    454 }
    455 
    456 
    457 void MacroAssembler::RecordWrite(
    458     Register object, Register address, Register value, SaveFPRegsMode fp_mode,
    459     RememberedSetAction remembered_set_action, SmiCheck smi_check,
    460     PointersToHereCheck pointers_to_here_check_for_value) {
    461   DCHECK(!object.is(value));
    462   DCHECK(!object.is(address));
    463   DCHECK(!value.is(address));
    464   AssertNotSmi(object);
    465 
    466   if (remembered_set_action == OMIT_REMEMBERED_SET &&
    467       !FLAG_incremental_marking) {
    468     return;
    469   }
    470 
    471   if (emit_debug_code()) {
    472     Label ok;
    473     cmp(value, Operand(address, 0));
    474     j(equal, &ok, Label::kNear);
    475     int3();
    476     bind(&ok);
    477   }
    478 
    479   // First, check if a write barrier is even needed. The tests below
    480   // catch stores of Smis and stores into young gen.
    481   Label done;
    482 
    483   if (smi_check == INLINE_SMI_CHECK) {
    484     // Skip barrier if writing a smi.
    485     JumpIfSmi(value, &done, Label::kNear);
    486   }
    487 
    488   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
    489     CheckPageFlag(value,
    490                   value,  // Used as scratch.
    491                   MemoryChunk::kPointersToHereAreInterestingMask,
    492                   zero,
    493                   &done,
    494                   Label::kNear);
    495   }
    496   CheckPageFlag(object,
    497                 value,  // Used as scratch.
    498                 MemoryChunk::kPointersFromHereAreInterestingMask,
    499                 zero,
    500                 &done,
    501                 Label::kNear);
    502 
    503   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
    504                        fp_mode);
    505   CallStub(&stub);
    506 
    507   bind(&done);
    508 
    509   // Count number of write barriers in generated code.
    510   isolate()->counters()->write_barriers_static()->Increment();
    511   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    512 
    513   // Clobber clobbered registers when running with the debug-code flag
    514   // turned on to provoke errors.
    515   if (emit_debug_code()) {
    516     mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
    517     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    518   }
    519 }
    520 
    521 void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
    522                                                Register code_entry,
    523                                                Register scratch) {
    524   const int offset = JSFunction::kCodeEntryOffset;
    525 
    526   // Since a code entry (value) is always in old space, we don't need to update
    527   // remembered set. If incremental marking is off, there is nothing for us to
    528   // do.
    529   if (!FLAG_incremental_marking) return;
    530 
    531   DCHECK(!js_function.is(code_entry));
    532   DCHECK(!js_function.is(scratch));
    533   DCHECK(!code_entry.is(scratch));
    534   AssertNotSmi(js_function);
    535 
    536   if (emit_debug_code()) {
    537     Label ok;
    538     lea(scratch, FieldOperand(js_function, offset));
    539     cmp(code_entry, Operand(scratch, 0));
    540     j(equal, &ok, Label::kNear);
    541     int3();
    542     bind(&ok);
    543   }
    544 
    545   // First, check if a write barrier is even needed. The tests below
    546   // catch stores of Smis and stores into young gen.
    547   Label done;
    548 
    549   CheckPageFlag(code_entry, scratch,
    550                 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
    551                 Label::kNear);
    552   CheckPageFlag(js_function, scratch,
    553                 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
    554                 Label::kNear);
    555 
    556   // Save input registers.
    557   push(js_function);
    558   push(code_entry);
    559 
    560   const Register dst = scratch;
    561   lea(dst, FieldOperand(js_function, offset));
    562 
    563   // Save caller-saved registers.
    564   PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
    565 
    566   int argument_count = 3;
    567   PrepareCallCFunction(argument_count, code_entry);
    568   mov(Operand(esp, 0 * kPointerSize), js_function);
    569   mov(Operand(esp, 1 * kPointerSize), dst);  // Slot.
    570   mov(Operand(esp, 2 * kPointerSize),
    571       Immediate(ExternalReference::isolate_address(isolate())));
    572 
    573   {
    574     AllowExternalCallThatCantCauseGC scope(this);
    575     CallCFunction(
    576         ExternalReference::incremental_marking_record_write_code_entry_function(
    577             isolate()),
    578         argument_count);
    579   }
    580 
    581   // Restore caller-saved registers.
    582   PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
    583 
    584   // Restore input registers.
    585   pop(code_entry);
    586   pop(js_function);
    587 
    588   bind(&done);
    589 }
    590 
    591 void MacroAssembler::DebugBreak() {
    592   Move(eax, Immediate(0));
    593   mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
    594                                        isolate())));
    595   CEntryStub ces(isolate(), 1);
    596   call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
    597 }
    598 
    599 void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
    600   if (shift >= 32) {
    601     mov(high, low);
    602     shl(high, shift - 32);
    603     xor_(low, low);
    604   } else {
    605     shld(high, low, shift);
    606     shl(low, shift);
    607   }
    608 }
    609 
    610 void MacroAssembler::ShlPair_cl(Register high, Register low) {
    611   shld_cl(high, low);
    612   shl_cl(low);
    613   Label done;
    614   test(ecx, Immediate(0x20));
    615   j(equal, &done, Label::kNear);
    616   mov(high, low);
    617   xor_(low, low);
    618   bind(&done);
    619 }
    620 
    621 void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
    622   if (shift >= 32) {
    623     mov(low, high);
    624     shr(low, shift - 32);
    625     xor_(high, high);
    626   } else {
    627     shrd(high, low, shift);
    628     shr(high, shift);
    629   }
    630 }
    631 
    632 void MacroAssembler::ShrPair_cl(Register high, Register low) {
    633   shrd_cl(low, high);
    634   shr_cl(high);
    635   Label done;
    636   test(ecx, Immediate(0x20));
    637   j(equal, &done, Label::kNear);
    638   mov(low, high);
    639   xor_(high, high);
    640   bind(&done);
    641 }
    642 
    643 void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
    644   if (shift >= 32) {
    645     mov(low, high);
    646     sar(low, shift - 32);
    647     sar(high, 31);
    648   } else {
    649     shrd(high, low, shift);
    650     sar(high, shift);
    651   }
    652 }
    653 
    654 void MacroAssembler::SarPair_cl(Register high, Register low) {
    655   shrd_cl(low, high);
    656   sar_cl(high);
    657   Label done;
    658   test(ecx, Immediate(0x20));
    659   j(equal, &done, Label::kNear);
    660   mov(low, high);
    661   sar(high, 31);
    662   bind(&done);
    663 }
    664 
    665 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    666   static const int kMaxImmediateBits = 17;
    667   if (!RelocInfo::IsNone(x.rmode_)) return false;
    668   return !is_intn(x.x_, kMaxImmediateBits);
    669 }
    670 
    671 
    672 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
    673   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    674     Move(dst, Immediate(x.x_ ^ jit_cookie()));
    675     xor_(dst, jit_cookie());
    676   } else {
    677     Move(dst, x);
    678   }
    679 }
    680 
    681 
    682 void MacroAssembler::SafePush(const Immediate& x) {
    683   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    684     push(Immediate(x.x_ ^ jit_cookie()));
    685     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    686   } else {
    687     push(x);
    688   }
    689 }
    690 
    691 
    692 void MacroAssembler::CmpObjectType(Register heap_object,
    693                                    InstanceType type,
    694                                    Register map) {
    695   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    696   CmpInstanceType(map, type);
    697 }
    698 
    699 
    700 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    701   cmpb(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
    702 }
    703 
    704 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
    705   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    706 }
    707 
    708 
    709 void MacroAssembler::CheckMap(Register obj,
    710                               Handle<Map> map,
    711                               Label* fail,
    712                               SmiCheckType smi_check_type) {
    713   if (smi_check_type == DO_SMI_CHECK) {
    714     JumpIfSmi(obj, fail);
    715   }
    716 
    717   CompareMap(obj, map);
    718   j(not_equal, fail);
    719 }
    720 
    721 
    722 void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
    723                                      Register scratch2, Handle<WeakCell> cell,
    724                                      Handle<Code> success,
    725                                      SmiCheckType smi_check_type) {
    726   Label fail;
    727   if (smi_check_type == DO_SMI_CHECK) {
    728     JumpIfSmi(obj, &fail);
    729   }
    730   mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
    731   CmpWeakValue(scratch1, cell, scratch2);
    732   j(equal, success);
    733 
    734   bind(&fail);
    735 }
    736 
    737 
    738 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    739                                              Register map,
    740                                              Register instance_type) {
    741   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    742   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    743   STATIC_ASSERT(kNotStringTag != 0);
    744   test(instance_type, Immediate(kIsNotStringMask));
    745   return zero;
    746 }
    747 
    748 
    749 Condition MacroAssembler::IsObjectNameType(Register heap_object,
    750                                            Register map,
    751                                            Register instance_type) {
    752   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    753   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    754   cmpb(instance_type, Immediate(LAST_NAME_TYPE));
    755   return below_equal;
    756 }
    757 
    758 
    759 void MacroAssembler::FCmp() {
    760   fucompp();
    761   push(eax);
    762   fnstsw_ax();
    763   sahf();
    764   pop(eax);
    765 }
    766 
    767 
    768 void MacroAssembler::FXamMinusZero() {
    769   fxam();
    770   push(eax);
    771   fnstsw_ax();
    772   and_(eax, Immediate(0x4700));
    773   // For minus zero, C3 == 1 && C1 == 1.
    774   cmp(eax, Immediate(0x4200));
    775   pop(eax);
    776   fstp(0);
    777 }
    778 
    779 
    780 void MacroAssembler::FXamSign() {
    781   fxam();
    782   push(eax);
    783   fnstsw_ax();
    784   // For negative value (including -0.0), C1 == 1.
    785   and_(eax, Immediate(0x0200));
    786   pop(eax);
    787   fstp(0);
    788 }
    789 
    790 
    791 void MacroAssembler::X87CheckIA() {
    792   push(eax);
    793   fnstsw_ax();
    794   // For #IA, IE == 1 && SF == 0.
    795   and_(eax, Immediate(0x0041));
    796   cmp(eax, Immediate(0x0001));
    797   pop(eax);
    798 }
    799 
    800 
    801 // rc=00B, round to nearest.
    802 // rc=01B, round down.
    803 // rc=10B, round up.
    804 // rc=11B, round toward zero.
    805 void MacroAssembler::X87SetRC(int rc) {
    806   sub(esp, Immediate(kPointerSize));
    807   fnstcw(MemOperand(esp, 0));
    808   and_(MemOperand(esp, 0), Immediate(0xF3FF));
    809   or_(MemOperand(esp, 0), Immediate(rc));
    810   fldcw(MemOperand(esp, 0));
    811   add(esp, Immediate(kPointerSize));
    812 }
    813 
    814 
    815 void MacroAssembler::X87SetFPUCW(int cw) {
    816   RecordComment("-- X87SetFPUCW start --");
    817   push(Immediate(cw));
    818   fldcw(MemOperand(esp, 0));
    819   add(esp, Immediate(kPointerSize));
    820   RecordComment("-- X87SetFPUCW end--");
    821 }
    822 
    823 
    824 void MacroAssembler::AssertNumber(Register object) {
    825   if (emit_debug_code()) {
    826     Label ok;
    827     JumpIfSmi(object, &ok);
    828     cmp(FieldOperand(object, HeapObject::kMapOffset),
    829         isolate()->factory()->heap_number_map());
    830     Check(equal, kOperandNotANumber);
    831     bind(&ok);
    832   }
    833 }
    834 
    835 void MacroAssembler::AssertNotNumber(Register object) {
    836   if (emit_debug_code()) {
    837     test(object, Immediate(kSmiTagMask));
    838     Check(not_equal, kOperandIsANumber);
    839     cmp(FieldOperand(object, HeapObject::kMapOffset),
    840         isolate()->factory()->heap_number_map());
    841     Check(not_equal, kOperandIsANumber);
    842   }
    843 }
    844 
    845 void MacroAssembler::AssertSmi(Register object) {
    846   if (emit_debug_code()) {
    847     test(object, Immediate(kSmiTagMask));
    848     Check(equal, kOperandIsNotASmi);
    849   }
    850 }
    851 
    852 
    853 void MacroAssembler::AssertString(Register object) {
    854   if (emit_debug_code()) {
    855     test(object, Immediate(kSmiTagMask));
    856     Check(not_equal, kOperandIsASmiAndNotAString);
    857     push(object);
    858     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    859     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    860     pop(object);
    861     Check(below, kOperandIsNotAString);
    862   }
    863 }
    864 
    865 
    866 void MacroAssembler::AssertName(Register object) {
    867   if (emit_debug_code()) {
    868     test(object, Immediate(kSmiTagMask));
    869     Check(not_equal, kOperandIsASmiAndNotAName);
    870     push(object);
    871     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    872     CmpInstanceType(object, LAST_NAME_TYPE);
    873     pop(object);
    874     Check(below_equal, kOperandIsNotAName);
    875   }
    876 }
    877 
    878 
    879 void MacroAssembler::AssertFunction(Register object) {
    880   if (emit_debug_code()) {
    881     test(object, Immediate(kSmiTagMask));
    882     Check(not_equal, kOperandIsASmiAndNotAFunction);
    883     Push(object);
    884     CmpObjectType(object, JS_FUNCTION_TYPE, object);
    885     Pop(object);
    886     Check(equal, kOperandIsNotAFunction);
    887   }
    888 }
    889 
    890 
    891 void MacroAssembler::AssertBoundFunction(Register object) {
    892   if (emit_debug_code()) {
    893     test(object, Immediate(kSmiTagMask));
    894     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
    895     Push(object);
    896     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
    897     Pop(object);
    898     Check(equal, kOperandIsNotABoundFunction);
    899   }
    900 }
    901 
    902 void MacroAssembler::AssertGeneratorObject(Register object) {
    903   if (emit_debug_code()) {
    904     test(object, Immediate(kSmiTagMask));
    905     Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
    906     Push(object);
    907     CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object);
    908     Pop(object);
    909     Check(equal, kOperandIsNotAGeneratorObject);
    910   }
    911 }
    912 
    913 void MacroAssembler::AssertReceiver(Register object) {
    914   if (emit_debug_code()) {
    915     test(object, Immediate(kSmiTagMask));
    916     Check(not_equal, kOperandIsASmiAndNotAReceiver);
    917     Push(object);
    918     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
    919     CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
    920     Pop(object);
    921     Check(above_equal, kOperandIsNotAReceiver);
    922   }
    923 }
    924 
    925 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
    926   if (emit_debug_code()) {
    927     Label done_checking;
    928     AssertNotSmi(object);
    929     cmp(object, isolate()->factory()->undefined_value());
    930     j(equal, &done_checking);
    931     cmp(FieldOperand(object, 0),
    932         Immediate(isolate()->factory()->allocation_site_map()));
    933     Assert(equal, kExpectedUndefinedOrCell);
    934     bind(&done_checking);
    935   }
    936 }
    937 
    938 
    939 void MacroAssembler::AssertNotSmi(Register object) {
    940   if (emit_debug_code()) {
    941     test(object, Immediate(kSmiTagMask));
    942     Check(not_equal, kOperandIsASmi);
    943   }
    944 }
    945 
    946 void MacroAssembler::StubPrologue(StackFrame::Type type) {
    947   push(ebp);  // Caller's frame pointer.
    948   mov(ebp, esp);
    949   push(Immediate(Smi::FromInt(type)));
    950 }
    951 
    952 
    953 void MacroAssembler::Prologue(bool code_pre_aging) {
    954   PredictableCodeSizeScope predictible_code_size_scope(this,
    955       kNoCodeAgeSequenceLength);
    956   if (code_pre_aging) {
    957       // Pre-age the code.
    958     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
    959         RelocInfo::CODE_AGE_SEQUENCE);
    960     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
    961   } else {
    962     push(ebp);  // Caller's frame pointer.
    963     mov(ebp, esp);
    964     push(esi);  // Callee's context.
    965     push(edi);  // Callee's JS function.
    966   }
    967 }
    968 
    969 void MacroAssembler::EmitLoadFeedbackVector(Register vector) {
    970   mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
    971   mov(vector, FieldOperand(vector, JSFunction::kFeedbackVectorOffset));
    972   mov(vector, FieldOperand(vector, Cell::kValueOffset));
    973 }
    974 
    975 
    976 void MacroAssembler::EnterFrame(StackFrame::Type type,
    977                                 bool load_constant_pool_pointer_reg) {
    978   // Out-of-line constant pool not implemented on x87.
    979   UNREACHABLE();
    980 }
    981 
    982 
    983 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    984   push(ebp);
    985   mov(ebp, esp);
    986   push(Immediate(Smi::FromInt(type)));
    987   if (type == StackFrame::INTERNAL) {
    988     push(Immediate(CodeObject()));
    989   }
    990   if (emit_debug_code()) {
    991     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
    992     Check(not_equal, kCodeObjectNotProperlyPatched);
    993   }
    994 }
    995 
    996 
    997 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    998   if (emit_debug_code()) {
    999     cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
   1000         Immediate(Smi::FromInt(type)));
   1001     Check(equal, kStackFrameTypesMustMatch);
   1002   }
   1003   leave();
   1004 }
   1005 
   1006 void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
   1007                                        Register argc) {
   1008   Push(ebp);
   1009   Move(ebp, esp);
   1010   Push(context);
   1011   Push(target);
   1012   Push(argc);
   1013 }
   1014 
   1015 void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
   1016                                        Register argc) {
   1017   Pop(argc);
   1018   Pop(target);
   1019   Pop(context);
   1020   leave();
   1021 }
   1022 
   1023 void MacroAssembler::EnterExitFramePrologue(StackFrame::Type frame_type) {
   1024   DCHECK(frame_type == StackFrame::EXIT ||
   1025          frame_type == StackFrame::BUILTIN_EXIT);
   1026 
   1027   // Set up the frame structure on the stack.
   1028   DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
   1029   DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
   1030   DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
   1031   push(ebp);
   1032   mov(ebp, esp);
   1033 
   1034   // Reserve room for entry stack pointer and push the code object.
   1035   push(Immediate(Smi::FromInt(frame_type)));
   1036   DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
   1037   push(Immediate(0));  // Saved entry sp, patched before call.
   1038   DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
   1039   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
   1040 
   1041   // Save the frame pointer and the context in top.
   1042   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
   1043   ExternalReference context_address(Isolate::kContextAddress, isolate());
   1044   ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
   1045   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
   1046   mov(Operand::StaticVariable(context_address), esi);
   1047   mov(Operand::StaticVariable(c_function_address), ebx);
   1048 }
   1049 
   1050 
   1051 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
   1052   // Optionally save FPU state.
   1053   if (save_doubles) {
   1054     // Store FPU state to m108byte.
   1055     int space = 108 + argc * kPointerSize;
   1056     sub(esp, Immediate(space));
   1057     const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
   1058     fnsave(MemOperand(ebp, offset - 108));
   1059   } else {
   1060     sub(esp, Immediate(argc * kPointerSize));
   1061   }
   1062 
   1063   // Get the required frame alignment for the OS.
   1064   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
   1065   if (kFrameAlignment > 0) {
   1066     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
   1067     and_(esp, -kFrameAlignment);
   1068   }
   1069 
   1070   // Patch the saved entry sp.
   1071   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
   1072 }
   1073 
   1074 void MacroAssembler::EnterExitFrame(int argc, bool save_doubles,
   1075                                     StackFrame::Type frame_type) {
   1076   EnterExitFramePrologue(frame_type);
   1077 
   1078   // Set up argc and argv in callee-saved registers.
   1079   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
   1080   mov(edi, eax);
   1081   lea(esi, Operand(ebp, eax, times_4, offset));
   1082 
   1083   // Reserve space for argc, argv and isolate.
   1084   EnterExitFrameEpilogue(argc, save_doubles);
   1085 }
   1086 
   1087 
   1088 void MacroAssembler::EnterApiExitFrame(int argc) {
   1089   EnterExitFramePrologue(StackFrame::EXIT);
   1090   EnterExitFrameEpilogue(argc, false);
   1091 }
   1092 
   1093 
   1094 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
   1095   // Optionally restore FPU state.
   1096   if (save_doubles) {
   1097     const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
   1098     frstor(MemOperand(ebp, offset - 108));
   1099   }
   1100 
   1101   if (pop_arguments) {
   1102     // Get the return address from the stack and restore the frame pointer.
   1103     mov(ecx, Operand(ebp, 1 * kPointerSize));
   1104     mov(ebp, Operand(ebp, 0 * kPointerSize));
   1105 
   1106     // Pop the arguments and the receiver from the caller stack.
   1107     lea(esp, Operand(esi, 1 * kPointerSize));
   1108 
   1109     // Push the return address to get ready to return.
   1110     push(ecx);
   1111   } else {
   1112     // Otherwise just leave the exit frame.
   1113     leave();
   1114   }
   1115 
   1116   LeaveExitFrameEpilogue(true);
   1117 }
   1118 
   1119 
   1120 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
   1121   // Restore current context from top and clear it in debug mode.
   1122   ExternalReference context_address(Isolate::kContextAddress, isolate());
   1123   if (restore_context) {
   1124     mov(esi, Operand::StaticVariable(context_address));
   1125   }
   1126 #ifdef DEBUG
   1127   mov(Operand::StaticVariable(context_address), Immediate(0));
   1128 #endif
   1129 
   1130   // Clear the top frame.
   1131   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
   1132                                        isolate());
   1133   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
   1134 }
   1135 
   1136 
   1137 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
   1138   mov(esp, ebp);
   1139   pop(ebp);
   1140 
   1141   LeaveExitFrameEpilogue(restore_context);
   1142 }
   1143 
   1144 
   1145 void MacroAssembler::PushStackHandler() {
   1146   // Adjust this code if not the case.
   1147   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
   1148   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1149 
   1150   // Link the current handler as the next handler.
   1151   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1152   push(Operand::StaticVariable(handler_address));
   1153 
   1154   // Set this new handler as the current one.
   1155   mov(Operand::StaticVariable(handler_address), esp);
   1156 }
   1157 
   1158 
   1159 void MacroAssembler::PopStackHandler() {
   1160   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1161   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1162   pop(Operand::StaticVariable(handler_address));
   1163   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
   1164 }
   1165 
   1166 
   1167 // Compute the hash code from the untagged key.  This must be kept in sync with
   1168 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
   1169 // code-stub-hydrogen.cc
   1170 //
   1171 // Note: r0 will contain hash code
   1172 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
   1173   // Xor original key with a seed.
   1174   if (serializer_enabled()) {
   1175     ExternalReference roots_array_start =
   1176         ExternalReference::roots_array_start(isolate());
   1177     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
   1178     mov(scratch,
   1179         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
   1180     SmiUntag(scratch);
   1181     xor_(r0, scratch);
   1182   } else {
   1183     int32_t seed = isolate()->heap()->HashSeed();
   1184     xor_(r0, Immediate(seed));
   1185   }
   1186 
   1187   // hash = ~hash + (hash << 15);
   1188   mov(scratch, r0);
   1189   not_(r0);
   1190   shl(scratch, 15);
   1191   add(r0, scratch);
   1192   // hash = hash ^ (hash >> 12);
   1193   mov(scratch, r0);
   1194   shr(scratch, 12);
   1195   xor_(r0, scratch);
   1196   // hash = hash + (hash << 2);
   1197   lea(r0, Operand(r0, r0, times_4, 0));
   1198   // hash = hash ^ (hash >> 4);
   1199   mov(scratch, r0);
   1200   shr(scratch, 4);
   1201   xor_(r0, scratch);
   1202   // hash = hash * 2057;
   1203   imul(r0, r0, 2057);
   1204   // hash = hash ^ (hash >> 16);
   1205   mov(scratch, r0);
   1206   shr(scratch, 16);
   1207   xor_(r0, scratch);
   1208   and_(r0, 0x3fffffff);
   1209 }
   1210 
   1211 void MacroAssembler::LoadAllocationTopHelper(Register result,
   1212                                              Register scratch,
   1213                                              AllocationFlags flags) {
   1214   ExternalReference allocation_top =
   1215       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1216 
   1217   // Just return if allocation top is already known.
   1218   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   1219     // No use of scratch if allocation top is provided.
   1220     DCHECK(scratch.is(no_reg));
   1221 #ifdef DEBUG
   1222     // Assert that result actually contains top on entry.
   1223     cmp(result, Operand::StaticVariable(allocation_top));
   1224     Check(equal, kUnexpectedAllocationTop);
   1225 #endif
   1226     return;
   1227   }
   1228 
   1229   // Move address of new object to result. Use scratch register if available.
   1230   if (scratch.is(no_reg)) {
   1231     mov(result, Operand::StaticVariable(allocation_top));
   1232   } else {
   1233     mov(scratch, Immediate(allocation_top));
   1234     mov(result, Operand(scratch, 0));
   1235   }
   1236 }
   1237 
   1238 
   1239 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   1240                                                Register scratch,
   1241                                                AllocationFlags flags) {
   1242   if (emit_debug_code()) {
   1243     test(result_end, Immediate(kObjectAlignmentMask));
   1244     Check(zero, kUnalignedAllocationInNewSpace);
   1245   }
   1246 
   1247   ExternalReference allocation_top =
   1248       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1249 
   1250   // Update new top. Use scratch if available.
   1251   if (scratch.is(no_reg)) {
   1252     mov(Operand::StaticVariable(allocation_top), result_end);
   1253   } else {
   1254     mov(Operand(scratch, 0), result_end);
   1255   }
   1256 }
   1257 
   1258 
   1259 void MacroAssembler::Allocate(int object_size,
   1260                               Register result,
   1261                               Register result_end,
   1262                               Register scratch,
   1263                               Label* gc_required,
   1264                               AllocationFlags flags) {
   1265   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1266   DCHECK(object_size <= kMaxRegularHeapObjectSize);
   1267   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   1268   if (!FLAG_inline_new) {
   1269     if (emit_debug_code()) {
   1270       // Trash the registers to simulate an allocation failure.
   1271       mov(result, Immediate(0x7091));
   1272       if (result_end.is_valid()) {
   1273         mov(result_end, Immediate(0x7191));
   1274       }
   1275       if (scratch.is_valid()) {
   1276         mov(scratch, Immediate(0x7291));
   1277       }
   1278     }
   1279     jmp(gc_required);
   1280     return;
   1281   }
   1282   DCHECK(!result.is(result_end));
   1283 
   1284   // Load address of new object into result.
   1285   LoadAllocationTopHelper(result, scratch, flags);
   1286 
   1287   ExternalReference allocation_limit =
   1288       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1289 
   1290   // Align the next allocation. Storing the filler map without checking top is
   1291   // safe in new-space because the limit of the heap is aligned there.
   1292   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1293     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1294     Label aligned;
   1295     test(result, Immediate(kDoubleAlignmentMask));
   1296     j(zero, &aligned, Label::kNear);
   1297     if ((flags & PRETENURE) != 0) {
   1298       cmp(result, Operand::StaticVariable(allocation_limit));
   1299       j(above_equal, gc_required);
   1300     }
   1301     mov(Operand(result, 0),
   1302         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1303     add(result, Immediate(kDoubleSize / 2));
   1304     bind(&aligned);
   1305   }
   1306 
   1307   // Calculate new top and bail out if space is exhausted.
   1308   Register top_reg = result_end.is_valid() ? result_end : result;
   1309 
   1310   if (!top_reg.is(result)) {
   1311     mov(top_reg, result);
   1312   }
   1313   add(top_reg, Immediate(object_size));
   1314   cmp(top_reg, Operand::StaticVariable(allocation_limit));
   1315   j(above, gc_required);
   1316 
   1317   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
   1318     // The top pointer is not updated for allocation folding dominators.
   1319     UpdateAllocationTopHelper(top_reg, scratch, flags);
   1320   }
   1321 
   1322   if (top_reg.is(result)) {
   1323     sub(result, Immediate(object_size - kHeapObjectTag));
   1324   } else {
   1325     // Tag the result.
   1326     DCHECK(kHeapObjectTag == 1);
   1327     inc(result);
   1328   }
   1329 }
   1330 
   1331 
   1332 void MacroAssembler::Allocate(int header_size,
   1333                               ScaleFactor element_size,
   1334                               Register element_count,
   1335                               RegisterValueType element_count_type,
   1336                               Register result,
   1337                               Register result_end,
   1338                               Register scratch,
   1339                               Label* gc_required,
   1340                               AllocationFlags flags) {
   1341   DCHECK((flags & SIZE_IN_WORDS) == 0);
   1342   DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
   1343   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   1344   if (!FLAG_inline_new) {
   1345     if (emit_debug_code()) {
   1346       // Trash the registers to simulate an allocation failure.
   1347       mov(result, Immediate(0x7091));
   1348       mov(result_end, Immediate(0x7191));
   1349       if (scratch.is_valid()) {
   1350         mov(scratch, Immediate(0x7291));
   1351       }
   1352       // Register element_count is not modified by the function.
   1353     }
   1354     jmp(gc_required);
   1355     return;
   1356   }
   1357   DCHECK(!result.is(result_end));
   1358 
   1359   // Load address of new object into result.
   1360   LoadAllocationTopHelper(result, scratch, flags);
   1361 
   1362   ExternalReference allocation_limit =
   1363       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1364 
   1365   // Align the next allocation. Storing the filler map without checking top is
   1366   // safe in new-space because the limit of the heap is aligned there.
   1367   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1368     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1369     Label aligned;
   1370     test(result, Immediate(kDoubleAlignmentMask));
   1371     j(zero, &aligned, Label::kNear);
   1372     if ((flags & PRETENURE) != 0) {
   1373       cmp(result, Operand::StaticVariable(allocation_limit));
   1374       j(above_equal, gc_required);
   1375     }
   1376     mov(Operand(result, 0),
   1377         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1378     add(result, Immediate(kDoubleSize / 2));
   1379     bind(&aligned);
   1380   }
   1381 
   1382   // Calculate new top and bail out if space is exhausted.
   1383   // We assume that element_count*element_size + header_size does not
   1384   // overflow.
   1385   if (element_count_type == REGISTER_VALUE_IS_SMI) {
   1386     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
   1387     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
   1388     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
   1389     DCHECK(element_size >= times_2);
   1390     DCHECK(kSmiTagSize == 1);
   1391     element_size = static_cast<ScaleFactor>(element_size - 1);
   1392   } else {
   1393     DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
   1394   }
   1395   lea(result_end, Operand(element_count, element_size, header_size));
   1396   add(result_end, result);
   1397   j(carry, gc_required);
   1398   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1399   j(above, gc_required);
   1400 
   1401   // Tag result.
   1402   DCHECK(kHeapObjectTag == 1);
   1403   inc(result);
   1404 
   1405   // Update allocation top.
   1406   UpdateAllocationTopHelper(result_end, scratch, flags);
   1407 }
   1408 
   1409 void MacroAssembler::Allocate(Register object_size,
   1410                               Register result,
   1411                               Register result_end,
   1412                               Register scratch,
   1413                               Label* gc_required,
   1414                               AllocationFlags flags) {
   1415   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1416   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   1417   if (!FLAG_inline_new) {
   1418     if (emit_debug_code()) {
   1419       // Trash the registers to simulate an allocation failure.
   1420       mov(result, Immediate(0x7091));
   1421       mov(result_end, Immediate(0x7191));
   1422       if (scratch.is_valid()) {
   1423         mov(scratch, Immediate(0x7291));
   1424       }
   1425       // object_size is left unchanged by this function.
   1426     }
   1427     jmp(gc_required);
   1428     return;
   1429   }
   1430   DCHECK(!result.is(result_end));
   1431 
   1432   // Load address of new object into result.
   1433   LoadAllocationTopHelper(result, scratch, flags);
   1434 
   1435   ExternalReference allocation_limit =
   1436       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1437 
   1438   // Align the next allocation. Storing the filler map without checking top is
   1439   // safe in new-space because the limit of the heap is aligned there.
   1440   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1441     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1442     Label aligned;
   1443     test(result, Immediate(kDoubleAlignmentMask));
   1444     j(zero, &aligned, Label::kNear);
   1445     if ((flags & PRETENURE) != 0) {
   1446       cmp(result, Operand::StaticVariable(allocation_limit));
   1447       j(above_equal, gc_required);
   1448     }
   1449     mov(Operand(result, 0),
   1450         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1451     add(result, Immediate(kDoubleSize / 2));
   1452     bind(&aligned);
   1453   }
   1454 
   1455   // Calculate new top and bail out if space is exhausted.
   1456   if (!object_size.is(result_end)) {
   1457     mov(result_end, object_size);
   1458   }
   1459   add(result_end, result);
   1460   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1461   j(above, gc_required);
   1462 
   1463   // Tag result.
   1464   DCHECK(kHeapObjectTag == 1);
   1465   inc(result);
   1466 
   1467   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
   1468     // The top pointer is not updated for allocation folding dominators.
   1469     UpdateAllocationTopHelper(result_end, scratch, flags);
   1470   }
   1471 }
   1472 
   1473 void MacroAssembler::FastAllocate(int object_size, Register result,
   1474                                   Register result_end, AllocationFlags flags) {
   1475   DCHECK(!result.is(result_end));
   1476   // Load address of new object into result.
   1477   LoadAllocationTopHelper(result, no_reg, flags);
   1478 
   1479   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1480     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1481     Label aligned;
   1482     test(result, Immediate(kDoubleAlignmentMask));
   1483     j(zero, &aligned, Label::kNear);
   1484     mov(Operand(result, 0),
   1485         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1486     add(result, Immediate(kDoubleSize / 2));
   1487     bind(&aligned);
   1488   }
   1489 
   1490   lea(result_end, Operand(result, object_size));
   1491   UpdateAllocationTopHelper(result_end, no_reg, flags);
   1492 
   1493   DCHECK(kHeapObjectTag == 1);
   1494   inc(result);
   1495 }
   1496 
   1497 void MacroAssembler::FastAllocate(Register object_size, Register result,
   1498                                   Register result_end, AllocationFlags flags) {
   1499   DCHECK(!result.is(result_end));
   1500   // Load address of new object into result.
   1501   LoadAllocationTopHelper(result, no_reg, flags);
   1502 
   1503   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1504     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1505     Label aligned;
   1506     test(result, Immediate(kDoubleAlignmentMask));
   1507     j(zero, &aligned, Label::kNear);
   1508     mov(Operand(result, 0),
   1509         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1510     add(result, Immediate(kDoubleSize / 2));
   1511     bind(&aligned);
   1512   }
   1513 
   1514   lea(result_end, Operand(result, object_size, times_1, 0));
   1515   UpdateAllocationTopHelper(result_end, no_reg, flags);
   1516 
   1517   DCHECK(kHeapObjectTag == 1);
   1518   inc(result);
   1519 }
   1520 
   1521 void MacroAssembler::AllocateHeapNumber(Register result,
   1522                                         Register scratch1,
   1523                                         Register scratch2,
   1524                                         Label* gc_required,
   1525                                         MutableMode mode) {
   1526   // Allocate heap number in new space.
   1527   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
   1528            NO_ALLOCATION_FLAGS);
   1529 
   1530   Handle<Map> map = mode == MUTABLE
   1531       ? isolate()->factory()->mutable_heap_number_map()
   1532       : isolate()->factory()->heap_number_map();
   1533 
   1534   // Set the map.
   1535   mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
   1536 }
   1537 
   1538 void MacroAssembler::AllocateJSValue(Register result, Register constructor,
   1539                                      Register value, Register scratch,
   1540                                      Label* gc_required) {
   1541   DCHECK(!result.is(constructor));
   1542   DCHECK(!result.is(scratch));
   1543   DCHECK(!result.is(value));
   1544 
   1545   // Allocate JSValue in new space.
   1546   Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
   1547            NO_ALLOCATION_FLAGS);
   1548 
   1549   // Initialize the JSValue.
   1550   LoadGlobalFunctionInitialMap(constructor, scratch);
   1551   mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
   1552   LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
   1553   mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
   1554   mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
   1555   mov(FieldOperand(result, JSValue::kValueOffset), value);
   1556   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
   1557 }
   1558 
   1559 void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
   1560                                                 Register end_address,
   1561                                                 Register filler) {
   1562   Label loop, entry;
   1563   jmp(&entry, Label::kNear);
   1564   bind(&loop);
   1565   mov(Operand(current_address, 0), filler);
   1566   add(current_address, Immediate(kPointerSize));
   1567   bind(&entry);
   1568   cmp(current_address, end_address);
   1569   j(below, &loop, Label::kNear);
   1570 }
   1571 
   1572 
   1573 void MacroAssembler::BooleanBitTest(Register object,
   1574                                     int field_offset,
   1575                                     int bit_index) {
   1576   bit_index += kSmiTagSize + kSmiShiftSize;
   1577   DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
   1578   int byte_index = bit_index / kBitsPerByte;
   1579   int byte_bit_index = bit_index & (kBitsPerByte - 1);
   1580   test_b(FieldOperand(object, field_offset + byte_index),
   1581          Immediate(1 << byte_bit_index));
   1582 }
   1583 
   1584 
   1585 
   1586 void MacroAssembler::NegativeZeroTest(Register result,
   1587                                       Register op,
   1588                                       Label* then_label) {
   1589   Label ok;
   1590   test(result, result);
   1591   j(not_zero, &ok, Label::kNear);
   1592   test(op, op);
   1593   j(sign, then_label, Label::kNear);
   1594   bind(&ok);
   1595 }
   1596 
   1597 
   1598 void MacroAssembler::NegativeZeroTest(Register result,
   1599                                       Register op1,
   1600                                       Register op2,
   1601                                       Register scratch,
   1602                                       Label* then_label) {
   1603   Label ok;
   1604   test(result, result);
   1605   j(not_zero, &ok, Label::kNear);
   1606   mov(scratch, op1);
   1607   or_(scratch, op2);
   1608   j(sign, then_label, Label::kNear);
   1609   bind(&ok);
   1610 }
   1611 
   1612 
   1613 void MacroAssembler::GetMapConstructor(Register result, Register map,
   1614                                        Register temp) {
   1615   Label done, loop;
   1616   mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
   1617   bind(&loop);
   1618   JumpIfSmi(result, &done, Label::kNear);
   1619   CmpObjectType(result, MAP_TYPE, temp);
   1620   j(not_equal, &done, Label::kNear);
   1621   mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
   1622   jmp(&loop);
   1623   bind(&done);
   1624 }
   1625 
   1626 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
   1627   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
   1628   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
   1629 }
   1630 
   1631 
   1632 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1633   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   1634 }
   1635 
   1636 
   1637 void MacroAssembler::StubReturn(int argc) {
   1638   DCHECK(argc >= 1 && generating_stub());
   1639   ret((argc - 1) * kPointerSize);
   1640 }
   1641 
   1642 
   1643 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
   1644   return has_frame_ || !stub->SometimesSetsUpAFrame();
   1645 }
   1646 
   1647 void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
   1648                                  SaveFPRegsMode save_doubles) {
   1649   // If the expected number of arguments of the runtime function is
   1650   // constant, we check that the actual number of arguments match the
   1651   // expectation.
   1652   CHECK(f->nargs < 0 || f->nargs == num_arguments);
   1653 
   1654   // TODO(1236192): Most runtime routines don't need the number of
   1655   // arguments passed in because it is constant. At some point we
   1656   // should remove this need and make the runtime routine entry code
   1657   // smarter.
   1658   Move(eax, Immediate(num_arguments));
   1659   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1660   CEntryStub ces(isolate(), 1, save_doubles);
   1661   CallStub(&ces);
   1662 }
   1663 
   1664 
   1665 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1666                                            int num_arguments) {
   1667   mov(eax, Immediate(num_arguments));
   1668   mov(ebx, Immediate(ref));
   1669 
   1670   CEntryStub stub(isolate(), 1);
   1671   CallStub(&stub);
   1672 }
   1673 
   1674 
   1675 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
   1676   // ----------- S t a t e -------------
   1677   //  -- esp[0]                 : return address
   1678   //  -- esp[8]                 : argument num_arguments - 1
   1679   //  ...
   1680   //  -- esp[8 * num_arguments] : argument 0 (receiver)
   1681   //
   1682   //  For runtime functions with variable arguments:
   1683   //  -- eax                    : number of  arguments
   1684   // -----------------------------------
   1685 
   1686   const Runtime::Function* function = Runtime::FunctionForId(fid);
   1687   DCHECK_EQ(1, function->result_size);
   1688   if (function->nargs >= 0) {
   1689     // TODO(1236192): Most runtime routines don't need the number of
   1690     // arguments passed in because it is constant. At some point we
   1691     // should remove this need and make the runtime routine entry code
   1692     // smarter.
   1693     mov(eax, Immediate(function->nargs));
   1694   }
   1695   JumpToExternalReference(ExternalReference(fid, isolate()));
   1696 }
   1697 
   1698 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
   1699                                              bool builtin_exit_frame) {
   1700   // Set the entry point and jump to the C entry runtime stub.
   1701   mov(ebx, Immediate(ext));
   1702   CEntryStub ces(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
   1703                  builtin_exit_frame);
   1704   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   1705 }
   1706 
   1707 void MacroAssembler::PrepareForTailCall(
   1708     const ParameterCount& callee_args_count, Register caller_args_count_reg,
   1709     Register scratch0, Register scratch1, ReturnAddressState ra_state,
   1710     int number_of_temp_values_after_return_address) {
   1711 #if DEBUG
   1712   if (callee_args_count.is_reg()) {
   1713     DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
   1714                        scratch1));
   1715   } else {
   1716     DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
   1717   }
   1718   DCHECK(ra_state != ReturnAddressState::kNotOnStack ||
   1719          number_of_temp_values_after_return_address == 0);
   1720 #endif
   1721 
   1722   // Calculate the destination address where we will put the return address
   1723   // after we drop current frame.
   1724   Register new_sp_reg = scratch0;
   1725   if (callee_args_count.is_reg()) {
   1726     sub(caller_args_count_reg, callee_args_count.reg());
   1727     lea(new_sp_reg,
   1728         Operand(ebp, caller_args_count_reg, times_pointer_size,
   1729                 StandardFrameConstants::kCallerPCOffset -
   1730                     number_of_temp_values_after_return_address * kPointerSize));
   1731   } else {
   1732     lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
   1733                             StandardFrameConstants::kCallerPCOffset -
   1734                                 (callee_args_count.immediate() +
   1735                                  number_of_temp_values_after_return_address) *
   1736                                     kPointerSize));
   1737   }
   1738 
   1739   if (FLAG_debug_code) {
   1740     cmp(esp, new_sp_reg);
   1741     Check(below, kStackAccessBelowStackPointer);
   1742   }
   1743 
   1744   // Copy return address from caller's frame to current frame's return address
   1745   // to avoid its trashing and let the following loop copy it to the right
   1746   // place.
   1747   Register tmp_reg = scratch1;
   1748   if (ra_state == ReturnAddressState::kOnStack) {
   1749     mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
   1750     mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
   1751         tmp_reg);
   1752   } else {
   1753     DCHECK(ReturnAddressState::kNotOnStack == ra_state);
   1754     DCHECK_EQ(0, number_of_temp_values_after_return_address);
   1755     Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset));
   1756   }
   1757 
   1758   // Restore caller's frame pointer now as it could be overwritten by
   1759   // the copying loop.
   1760   mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   1761 
   1762   // +2 here is to copy both receiver and return address.
   1763   Register count_reg = caller_args_count_reg;
   1764   if (callee_args_count.is_reg()) {
   1765     lea(count_reg, Operand(callee_args_count.reg(),
   1766                            2 + number_of_temp_values_after_return_address));
   1767   } else {
   1768     mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
   1769                              number_of_temp_values_after_return_address));
   1770     // TODO(ishell): Unroll copying loop for small immediate values.
   1771   }
   1772 
   1773   // Now copy callee arguments to the caller frame going backwards to avoid
   1774   // callee arguments corruption (source and destination areas could overlap).
   1775   Label loop, entry;
   1776   jmp(&entry, Label::kNear);
   1777   bind(&loop);
   1778   dec(count_reg);
   1779   mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
   1780   mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
   1781   bind(&entry);
   1782   cmp(count_reg, Immediate(0));
   1783   j(not_equal, &loop, Label::kNear);
   1784 
   1785   // Leave current frame.
   1786   mov(esp, new_sp_reg);
   1787 }
   1788 
   1789 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   1790                                     const ParameterCount& actual,
   1791                                     Label* done,
   1792                                     bool* definitely_mismatches,
   1793                                     InvokeFlag flag,
   1794                                     Label::Distance done_near,
   1795                                     const CallWrapper& call_wrapper) {
   1796   bool definitely_matches = false;
   1797   *definitely_mismatches = false;
   1798   Label invoke;
   1799   if (expected.is_immediate()) {
   1800     DCHECK(actual.is_immediate());
   1801     mov(eax, actual.immediate());
   1802     if (expected.immediate() == actual.immediate()) {
   1803       definitely_matches = true;
   1804     } else {
   1805       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   1806       if (expected.immediate() == sentinel) {
   1807         // Don't worry about adapting arguments for builtins that
   1808         // don't want that done. Skip adaption code by making it look
   1809         // like we have a match between expected and actual number of
   1810         // arguments.
   1811         definitely_matches = true;
   1812       } else {
   1813         *definitely_mismatches = true;
   1814         mov(ebx, expected.immediate());
   1815       }
   1816     }
   1817   } else {
   1818     if (actual.is_immediate()) {
   1819       // Expected is in register, actual is immediate. This is the
   1820       // case when we invoke function values without going through the
   1821       // IC mechanism.
   1822       mov(eax, actual.immediate());
   1823       cmp(expected.reg(), actual.immediate());
   1824       j(equal, &invoke);
   1825       DCHECK(expected.reg().is(ebx));
   1826     } else if (!expected.reg().is(actual.reg())) {
   1827       // Both expected and actual are in (different) registers. This
   1828       // is the case when we invoke functions using call and apply.
   1829       cmp(expected.reg(), actual.reg());
   1830       j(equal, &invoke);
   1831       DCHECK(actual.reg().is(eax));
   1832       DCHECK(expected.reg().is(ebx));
   1833     } else {
   1834       Move(eax, actual.reg());
   1835     }
   1836   }
   1837 
   1838   if (!definitely_matches) {
   1839     Handle<Code> adaptor =
   1840         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   1841     if (flag == CALL_FUNCTION) {
   1842       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
   1843       call(adaptor, RelocInfo::CODE_TARGET);
   1844       call_wrapper.AfterCall();
   1845       if (!*definitely_mismatches) {
   1846         jmp(done, done_near);
   1847       }
   1848     } else {
   1849       jmp(adaptor, RelocInfo::CODE_TARGET);
   1850     }
   1851     bind(&invoke);
   1852   }
   1853 }
   1854 
   1855 void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
   1856                                     const ParameterCount& expected,
   1857                                     const ParameterCount& actual) {
   1858   Label skip_hook;
   1859   ExternalReference debug_hook_active =
   1860       ExternalReference::debug_hook_on_function_call_address(isolate());
   1861   cmpb(Operand::StaticVariable(debug_hook_active), Immediate(0));
   1862   j(equal, &skip_hook);
   1863   {
   1864     FrameScope frame(this,
   1865                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
   1866     if (expected.is_reg()) {
   1867       SmiTag(expected.reg());
   1868       Push(expected.reg());
   1869     }
   1870     if (actual.is_reg()) {
   1871       SmiTag(actual.reg());
   1872       Push(actual.reg());
   1873     }
   1874     if (new_target.is_valid()) {
   1875       Push(new_target);
   1876     }
   1877     Push(fun);
   1878     Push(fun);
   1879     CallRuntime(Runtime::kDebugOnFunctionCall);
   1880     Pop(fun);
   1881     if (new_target.is_valid()) {
   1882       Pop(new_target);
   1883     }
   1884     if (actual.is_reg()) {
   1885       Pop(actual.reg());
   1886       SmiUntag(actual.reg());
   1887     }
   1888     if (expected.is_reg()) {
   1889       Pop(expected.reg());
   1890       SmiUntag(expected.reg());
   1891     }
   1892   }
   1893   bind(&skip_hook);
   1894 }
   1895 
   1896 
   1897 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
   1898                                         const ParameterCount& expected,
   1899                                         const ParameterCount& actual,
   1900                                         InvokeFlag flag,
   1901                                         const CallWrapper& call_wrapper) {
   1902   // You can't call a function without a valid frame.
   1903   DCHECK(flag == JUMP_FUNCTION || has_frame());
   1904   DCHECK(function.is(edi));
   1905   DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
   1906 
   1907   if (call_wrapper.NeedsDebugHookCheck()) {
   1908     CheckDebugHook(function, new_target, expected, actual);
   1909   }
   1910 
   1911   // Clear the new.target register if not given.
   1912   if (!new_target.is_valid()) {
   1913     mov(edx, isolate()->factory()->undefined_value());
   1914   }
   1915 
   1916   Label done;
   1917   bool definitely_mismatches = false;
   1918   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
   1919                  Label::kNear, call_wrapper);
   1920   if (!definitely_mismatches) {
   1921     // We call indirectly through the code field in the function to
   1922     // allow recompilation to take effect without changing any of the
   1923     // call sites.
   1924     Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
   1925     if (flag == CALL_FUNCTION) {
   1926       call_wrapper.BeforeCall(CallSize(code));
   1927       call(code);
   1928       call_wrapper.AfterCall();
   1929     } else {
   1930       DCHECK(flag == JUMP_FUNCTION);
   1931       jmp(code);
   1932     }
   1933     bind(&done);
   1934   }
   1935 }
   1936 
   1937 
   1938 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
   1939                                     const ParameterCount& actual,
   1940                                     InvokeFlag flag,
   1941                                     const CallWrapper& call_wrapper) {
   1942   // You can't call a function without a valid frame.
   1943   DCHECK(flag == JUMP_FUNCTION || has_frame());
   1944 
   1945   DCHECK(fun.is(edi));
   1946   mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   1947   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1948   mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
   1949   SmiUntag(ebx);
   1950 
   1951   ParameterCount expected(ebx);
   1952   InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
   1953 }
   1954 
   1955 
   1956 void MacroAssembler::InvokeFunction(Register fun,
   1957                                     const ParameterCount& expected,
   1958                                     const ParameterCount& actual,
   1959                                     InvokeFlag flag,
   1960                                     const CallWrapper& call_wrapper) {
   1961   // You can't call a function without a valid frame.
   1962   DCHECK(flag == JUMP_FUNCTION || has_frame());
   1963 
   1964   DCHECK(fun.is(edi));
   1965   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1966 
   1967   InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
   1968 }
   1969 
   1970 
   1971 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
   1972                                     const ParameterCount& expected,
   1973                                     const ParameterCount& actual,
   1974                                     InvokeFlag flag,
   1975                                     const CallWrapper& call_wrapper) {
   1976   LoadHeapObject(edi, function);
   1977   InvokeFunction(edi, expected, actual, flag, call_wrapper);
   1978 }
   1979 
   1980 
   1981 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   1982   if (context_chain_length > 0) {
   1983     // Move up the chain of contexts to the context containing the slot.
   1984     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   1985     for (int i = 1; i < context_chain_length; i++) {
   1986       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   1987     }
   1988   } else {
   1989     // Slot is in the current function context.  Move it into the
   1990     // destination register in case we store into it (the write barrier
   1991     // cannot be allowed to destroy the context in esi).
   1992     mov(dst, esi);
   1993   }
   1994 
   1995   // We should not have found a with context by walking the context chain
   1996   // (i.e., the static scope chain and runtime context chain do not agree).
   1997   // A variable occurring in such a scope should have slot type LOOKUP and
   1998   // not CONTEXT.
   1999   if (emit_debug_code()) {
   2000     cmp(FieldOperand(dst, HeapObject::kMapOffset),
   2001         isolate()->factory()->with_context_map());
   2002     Check(not_equal, kVariableResolvedToWithContext);
   2003   }
   2004 }
   2005 
   2006 
   2007 void MacroAssembler::LoadGlobalProxy(Register dst) {
   2008   mov(dst, NativeContextOperand());
   2009   mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
   2010 }
   2011 
   2012 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2013   // Load the native context from the current context.
   2014   mov(function, NativeContextOperand());
   2015   // Load the function from the native context.
   2016   mov(function, ContextOperand(function, index));
   2017 }
   2018 
   2019 
   2020 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2021                                                   Register map) {
   2022   // Load the initial map.  The global functions all have initial maps.
   2023   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2024   if (emit_debug_code()) {
   2025     Label ok, fail;
   2026     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
   2027     jmp(&ok);
   2028     bind(&fail);
   2029     Abort(kGlobalFunctionsMustHaveInitialMap);
   2030     bind(&ok);
   2031   }
   2032 }
   2033 
   2034 
   2035 // Store the value in register src in the safepoint register stack
   2036 // slot for register dst.
   2037 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   2038   mov(SafepointRegisterSlot(dst), src);
   2039 }
   2040 
   2041 
   2042 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   2043   mov(SafepointRegisterSlot(dst), src);
   2044 }
   2045 
   2046 
   2047 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   2048   mov(dst, SafepointRegisterSlot(src));
   2049 }
   2050 
   2051 
   2052 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   2053   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   2054 }
   2055 
   2056 
   2057 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   2058   // The registers are pushed starting with the lowest encoding,
   2059   // which means that lowest encodings are furthest away from
   2060   // the stack pointer.
   2061   DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   2062   return kNumSafepointRegisters - reg_code - 1;
   2063 }
   2064 
   2065 
   2066 void MacroAssembler::LoadHeapObject(Register result,
   2067                                     Handle<HeapObject> object) {
   2068   mov(result, object);
   2069 }
   2070 
   2071 
   2072 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
   2073   cmp(reg, object);
   2074 }
   2075 
   2076 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) { Push(object); }
   2077 
   2078 void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
   2079                                   Register scratch) {
   2080   mov(scratch, cell);
   2081   cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
   2082 }
   2083 
   2084 
   2085 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
   2086   mov(value, cell);
   2087   mov(value, FieldOperand(value, WeakCell::kValueOffset));
   2088 }
   2089 
   2090 
   2091 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
   2092                                    Label* miss) {
   2093   GetWeakValue(value, cell);
   2094   JumpIfSmi(value, miss);
   2095 }
   2096 
   2097 
   2098 void MacroAssembler::Ret() {
   2099   ret(0);
   2100 }
   2101 
   2102 
   2103 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   2104   if (is_uint16(bytes_dropped)) {
   2105     ret(bytes_dropped);
   2106   } else {
   2107     pop(scratch);
   2108     add(esp, Immediate(bytes_dropped));
   2109     push(scratch);
   2110     ret(0);
   2111   }
   2112 }
   2113 
   2114 
   2115 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
   2116   // Turn off the stack depth check when serializer is enabled to reduce the
   2117   // code size.
   2118   if (serializer_enabled()) return;
   2119   // Make sure the floating point stack is either empty or has depth items.
   2120   DCHECK(depth <= 7);
   2121   // This is very expensive.
   2122   DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
   2123 
   2124   // The top-of-stack (tos) is 7 if there is one item pushed.
   2125   int tos = (8 - depth) % 8;
   2126   const int kTopMask = 0x3800;
   2127   push(eax);
   2128   fwait();
   2129   fnstsw_ax();
   2130   and_(eax, kTopMask);
   2131   shr(eax, 11);
   2132   cmp(eax, Immediate(tos));
   2133   Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
   2134   fnclex();
   2135   pop(eax);
   2136 }
   2137 
   2138 
   2139 void MacroAssembler::Drop(int stack_elements) {
   2140   if (stack_elements > 0) {
   2141     add(esp, Immediate(stack_elements * kPointerSize));
   2142   }
   2143 }
   2144 
   2145 
   2146 void MacroAssembler::Move(Register dst, Register src) {
   2147   if (!dst.is(src)) {
   2148     mov(dst, src);
   2149   }
   2150 }
   2151 
   2152 
   2153 void MacroAssembler::Move(Register dst, const Immediate& x) {
   2154   if (x.is_zero() && RelocInfo::IsNone(x.rmode_)) {
   2155     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
   2156   } else {
   2157     mov(dst, x);
   2158   }
   2159 }
   2160 
   2161 
   2162 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
   2163   mov(dst, x);
   2164 }
   2165 
   2166 
   2167 void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
   2168   // TODO(intel): Add support for LZCNT (with ABM/BMI1).
   2169   Label not_zero_src;
   2170   bsr(dst, src);
   2171   j(not_zero, &not_zero_src, Label::kNear);
   2172   Move(dst, Immediate(63));  // 63^31 == 32
   2173   bind(&not_zero_src);
   2174   xor_(dst, Immediate(31));  // for x in [0..31], 31^x == 31-x.
   2175 }
   2176 
   2177 
   2178 void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
   2179   // TODO(intel): Add support for TZCNT (with ABM/BMI1).
   2180   Label not_zero_src;
   2181   bsf(dst, src);
   2182   j(not_zero, &not_zero_src, Label::kNear);
   2183   Move(dst, Immediate(32));  // The result of tzcnt is 32 if src = 0.
   2184   bind(&not_zero_src);
   2185 }
   2186 
   2187 
   2188 void MacroAssembler::Popcnt(Register dst, const Operand& src) {
   2189   // TODO(intel): Add support for POPCNT (with POPCNT)
   2190   // if (CpuFeatures::IsSupported(POPCNT)) {
   2191   //   CpuFeatureScope scope(this, POPCNT);
   2192   //   popcnt(dst, src);
   2193   //   return;
   2194   // }
   2195   UNREACHABLE();
   2196 }
   2197 
   2198 
   2199 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2200   if (FLAG_native_code_counters && counter->Enabled()) {
   2201     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   2202   }
   2203 }
   2204 
   2205 
   2206 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2207   DCHECK(value > 0);
   2208   if (FLAG_native_code_counters && counter->Enabled()) {
   2209     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2210     if (value == 1) {
   2211       inc(operand);
   2212     } else {
   2213       add(operand, Immediate(value));
   2214     }
   2215   }
   2216 }
   2217 
   2218 
   2219 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2220   DCHECK(value > 0);
   2221   if (FLAG_native_code_counters && counter->Enabled()) {
   2222     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2223     if (value == 1) {
   2224       dec(operand);
   2225     } else {
   2226       sub(operand, Immediate(value));
   2227     }
   2228   }
   2229 }
   2230 
   2231 
   2232 void MacroAssembler::IncrementCounter(Condition cc,
   2233                                       StatsCounter* counter,
   2234                                       int value) {
   2235   DCHECK(value > 0);
   2236   if (FLAG_native_code_counters && counter->Enabled()) {
   2237     Label skip;
   2238     j(NegateCondition(cc), &skip);
   2239     pushfd();
   2240     IncrementCounter(counter, value);
   2241     popfd();
   2242     bind(&skip);
   2243   }
   2244 }
   2245 
   2246 
   2247 void MacroAssembler::DecrementCounter(Condition cc,
   2248                                       StatsCounter* counter,
   2249                                       int value) {
   2250   DCHECK(value > 0);
   2251   if (FLAG_native_code_counters && counter->Enabled()) {
   2252     Label skip;
   2253     j(NegateCondition(cc), &skip);
   2254     pushfd();
   2255     DecrementCounter(counter, value);
   2256     popfd();
   2257     bind(&skip);
   2258   }
   2259 }
   2260 
   2261 
   2262 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
   2263   if (emit_debug_code()) Check(cc, reason);
   2264 }
   2265 
   2266 
   2267 void MacroAssembler::AssertFastElements(Register elements) {
   2268   if (emit_debug_code()) {
   2269     Factory* factory = isolate()->factory();
   2270     Label ok;
   2271     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2272         Immediate(factory->fixed_array_map()));
   2273     j(equal, &ok);
   2274     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2275         Immediate(factory->fixed_double_array_map()));
   2276     j(equal, &ok);
   2277     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2278         Immediate(factory->fixed_cow_array_map()));
   2279     j(equal, &ok);
   2280     Abort(kJSObjectWithFastElementsMapHasSlowElements);
   2281     bind(&ok);
   2282   }
   2283 }
   2284 
   2285 
   2286 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
   2287   Label L;
   2288   j(cc, &L);
   2289   Abort(reason);
   2290   // will not return here
   2291   bind(&L);
   2292 }
   2293 
   2294 
   2295 void MacroAssembler::CheckStackAlignment() {
   2296   int frame_alignment = base::OS::ActivationFrameAlignment();
   2297   int frame_alignment_mask = frame_alignment - 1;
   2298   if (frame_alignment > kPointerSize) {
   2299     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
   2300     Label alignment_as_expected;
   2301     test(esp, Immediate(frame_alignment_mask));
   2302     j(zero, &alignment_as_expected);
   2303     // Abort if stack is not aligned.
   2304     int3();
   2305     bind(&alignment_as_expected);
   2306   }
   2307 }
   2308 
   2309 
   2310 void MacroAssembler::Abort(BailoutReason reason) {
   2311 #ifdef DEBUG
   2312   const char* msg = GetBailoutReason(reason);
   2313   if (msg != NULL) {
   2314     RecordComment("Abort message: ");
   2315     RecordComment(msg);
   2316   }
   2317 
   2318   if (FLAG_trap_on_abort) {
   2319     int3();
   2320     return;
   2321   }
   2322 #endif
   2323 
   2324   // Check if Abort() has already been initialized.
   2325   DCHECK(isolate()->builtins()->Abort()->IsHeapObject());
   2326 
   2327   Move(edx, Smi::FromInt(static_cast<int>(reason)));
   2328 
   2329   // Disable stub call restrictions to always allow calls to abort.
   2330   if (!has_frame_) {
   2331     // We don't actually want to generate a pile of code for this, so just
   2332     // claim there is a stack frame, without generating one.
   2333     FrameScope scope(this, StackFrame::NONE);
   2334     Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
   2335   } else {
   2336     Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
   2337   }
   2338   // will not return here
   2339   int3();
   2340 }
   2341 
   2342 
   2343 void MacroAssembler::LoadInstanceDescriptors(Register map,
   2344                                              Register descriptors) {
   2345   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
   2346 }
   2347 
   2348 
   2349 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
   2350   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2351   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
   2352 }
   2353 
   2354 
   2355 void MacroAssembler::LoadAccessor(Register dst, Register holder,
   2356                                   int accessor_index,
   2357                                   AccessorComponent accessor) {
   2358   mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
   2359   LoadInstanceDescriptors(dst, dst);
   2360   mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
   2361   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
   2362                                            : AccessorPair::kSetterOffset;
   2363   mov(dst, FieldOperand(dst, offset));
   2364 }
   2365 
   2366 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
   2367                                                            Register object2,
   2368                                                            Register scratch1,
   2369                                                            Register scratch2,
   2370                                                            Label* failure) {
   2371   // Check that both objects are not smis.
   2372   STATIC_ASSERT(kSmiTag == 0);
   2373   mov(scratch1, object1);
   2374   and_(scratch1, object2);
   2375   JumpIfSmi(scratch1, failure);
   2376 
   2377   // Load instance type for both strings.
   2378   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   2379   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   2380   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   2381   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   2382 
   2383   // Check that both are flat one-byte strings.
   2384   const int kFlatOneByteStringMask =
   2385       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   2386   const int kFlatOneByteStringTag =
   2387       kStringTag | kOneByteStringTag | kSeqStringTag;
   2388   // Interleave bits from both instance types and compare them in one check.
   2389   const int kShift = 8;
   2390   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << kShift));
   2391   and_(scratch1, kFlatOneByteStringMask);
   2392   and_(scratch2, kFlatOneByteStringMask);
   2393   shl(scratch2, kShift);
   2394   or_(scratch1, scratch2);
   2395   cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << kShift));
   2396   j(not_equal, failure);
   2397 }
   2398 
   2399 
   2400 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
   2401                                                      Label* not_unique_name,
   2402                                                      Label::Distance distance) {
   2403   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2404   Label succeed;
   2405   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2406   j(zero, &succeed);
   2407   cmpb(operand, Immediate(SYMBOL_TYPE));
   2408   j(not_equal, not_unique_name, distance);
   2409 
   2410   bind(&succeed);
   2411 }
   2412 
   2413 
   2414 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
   2415                                                Register index,
   2416                                                Register value,
   2417                                                uint32_t encoding_mask) {
   2418   Label is_object;
   2419   JumpIfNotSmi(string, &is_object, Label::kNear);
   2420   Abort(kNonObject);
   2421   bind(&is_object);
   2422 
   2423   push(value);
   2424   mov(value, FieldOperand(string, HeapObject::kMapOffset));
   2425   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
   2426 
   2427   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
   2428   cmp(value, Immediate(encoding_mask));
   2429   pop(value);
   2430   Check(equal, kUnexpectedStringType);
   2431 
   2432   // The index is assumed to be untagged coming in, tag it to compare with the
   2433   // string length without using a temp register, it is restored at the end of
   2434   // this function.
   2435   SmiTag(index);
   2436   Check(no_overflow, kIndexIsTooLarge);
   2437 
   2438   cmp(index, FieldOperand(string, String::kLengthOffset));
   2439   Check(less, kIndexIsTooLarge);
   2440 
   2441   cmp(index, Immediate(Smi::kZero));
   2442   Check(greater_equal, kIndexIsNegative);
   2443 
   2444   // Restore the index
   2445   SmiUntag(index);
   2446 }
   2447 
   2448 
   2449 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2450   int frame_alignment = base::OS::ActivationFrameAlignment();
   2451   if (frame_alignment != 0) {
   2452     // Make stack end at alignment and make room for num_arguments words
   2453     // and the original value of esp.
   2454     mov(scratch, esp);
   2455     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
   2456     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
   2457     and_(esp, -frame_alignment);
   2458     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2459   } else {
   2460     sub(esp, Immediate(num_arguments * kPointerSize));
   2461   }
   2462 }
   2463 
   2464 
   2465 void MacroAssembler::CallCFunction(ExternalReference function,
   2466                                    int num_arguments) {
   2467   // Trashing eax is ok as it will be the return value.
   2468   mov(eax, Immediate(function));
   2469   CallCFunction(eax, num_arguments);
   2470 }
   2471 
   2472 
   2473 void MacroAssembler::CallCFunction(Register function,
   2474                                    int num_arguments) {
   2475   DCHECK(has_frame());
   2476   // Check stack alignment.
   2477   if (emit_debug_code()) {
   2478     CheckStackAlignment();
   2479   }
   2480 
   2481   call(function);
   2482   if (base::OS::ActivationFrameAlignment() != 0) {
   2483     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2484   } else {
   2485     add(esp, Immediate(num_arguments * kPointerSize));
   2486   }
   2487 }
   2488 
   2489 
   2490 #ifdef DEBUG
   2491 bool AreAliased(Register reg1,
   2492                 Register reg2,
   2493                 Register reg3,
   2494                 Register reg4,
   2495                 Register reg5,
   2496                 Register reg6,
   2497                 Register reg7,
   2498                 Register reg8) {
   2499   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
   2500       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
   2501       reg7.is_valid() + reg8.is_valid();
   2502 
   2503   RegList regs = 0;
   2504   if (reg1.is_valid()) regs |= reg1.bit();
   2505   if (reg2.is_valid()) regs |= reg2.bit();
   2506   if (reg3.is_valid()) regs |= reg3.bit();
   2507   if (reg4.is_valid()) regs |= reg4.bit();
   2508   if (reg5.is_valid()) regs |= reg5.bit();
   2509   if (reg6.is_valid()) regs |= reg6.bit();
   2510   if (reg7.is_valid()) regs |= reg7.bit();
   2511   if (reg8.is_valid()) regs |= reg8.bit();
   2512   int n_of_non_aliasing_regs = NumRegs(regs);
   2513 
   2514   return n_of_valid_regs != n_of_non_aliasing_regs;
   2515 }
   2516 #endif
   2517 
   2518 
   2519 CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
   2520     : address_(address),
   2521       size_(size),
   2522       masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
   2523   // Create a new macro assembler pointing to the address of the code to patch.
   2524   // The size is adjusted with kGap on order for the assembler to generate size
   2525   // bytes of instructions without failing with buffer size constraints.
   2526   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2527 }
   2528 
   2529 
   2530 CodePatcher::~CodePatcher() {
   2531   // Indicate that code has changed.
   2532   Assembler::FlushICache(masm_.isolate(), address_, size_);
   2533 
   2534   // Check that the code was patched as expected.
   2535   DCHECK(masm_.pc_ == address_ + size_);
   2536   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2537 }
   2538 
   2539 
   2540 void MacroAssembler::CheckPageFlag(
   2541     Register object,
   2542     Register scratch,
   2543     int mask,
   2544     Condition cc,
   2545     Label* condition_met,
   2546     Label::Distance condition_met_distance) {
   2547   DCHECK(cc == zero || cc == not_zero);
   2548   if (scratch.is(object)) {
   2549     and_(scratch, Immediate(~Page::kPageAlignmentMask));
   2550   } else {
   2551     mov(scratch, Immediate(~Page::kPageAlignmentMask));
   2552     and_(scratch, object);
   2553   }
   2554   if (mask < (1 << kBitsPerByte)) {
   2555     test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   2556   } else {
   2557     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   2558   }
   2559   j(cc, condition_met, condition_met_distance);
   2560 }
   2561 
   2562 
   2563 void MacroAssembler::CheckPageFlagForMap(
   2564     Handle<Map> map,
   2565     int mask,
   2566     Condition cc,
   2567     Label* condition_met,
   2568     Label::Distance condition_met_distance) {
   2569   DCHECK(cc == zero || cc == not_zero);
   2570   Page* page = Page::FromAddress(map->address());
   2571   DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
   2572   ExternalReference reference(ExternalReference::page_flags(page));
   2573   // The inlined static address check of the page's flags relies
   2574   // on maps never being compacted.
   2575   DCHECK(!isolate()->heap()->mark_compact_collector()->
   2576          IsOnEvacuationCandidate(*map));
   2577   if (mask < (1 << kBitsPerByte)) {
   2578     test_b(Operand::StaticVariable(reference), Immediate(mask));
   2579   } else {
   2580     test(Operand::StaticVariable(reference), Immediate(mask));
   2581   }
   2582   j(cc, condition_met, condition_met_distance);
   2583 }
   2584 
   2585 
   2586 void MacroAssembler::JumpIfBlack(Register object,
   2587                                  Register scratch0,
   2588                                  Register scratch1,
   2589                                  Label* on_black,
   2590                                  Label::Distance on_black_near) {
   2591   HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
   2592            1);  // kBlackBitPattern.
   2593   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
   2594 }
   2595 
   2596 
   2597 void MacroAssembler::HasColor(Register object,
   2598                               Register bitmap_scratch,
   2599                               Register mask_scratch,
   2600                               Label* has_color,
   2601                               Label::Distance has_color_distance,
   2602                               int first_bit,
   2603                               int second_bit) {
   2604   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
   2605 
   2606   GetMarkBits(object, bitmap_scratch, mask_scratch);
   2607 
   2608   Label other_color, word_boundary;
   2609   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2610   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
   2611   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
   2612   j(zero, &word_boundary, Label::kNear);
   2613   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2614   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   2615   jmp(&other_color, Label::kNear);
   2616 
   2617   bind(&word_boundary);
   2618   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize),
   2619          Immediate(1));
   2620 
   2621   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   2622   bind(&other_color);
   2623 }
   2624 
   2625 
   2626 void MacroAssembler::GetMarkBits(Register addr_reg,
   2627                                  Register bitmap_reg,
   2628                                  Register mask_reg) {
   2629   DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
   2630   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
   2631   and_(bitmap_reg, addr_reg);
   2632   mov(ecx, addr_reg);
   2633   int shift =
   2634       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
   2635   shr(ecx, shift);
   2636   and_(ecx,
   2637        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
   2638 
   2639   add(bitmap_reg, ecx);
   2640   mov(ecx, addr_reg);
   2641   shr(ecx, kPointerSizeLog2);
   2642   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
   2643   mov(mask_reg, Immediate(1));
   2644   shl_cl(mask_reg);
   2645 }
   2646 
   2647 
   2648 void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
   2649                                  Register mask_scratch, Label* value_is_white,
   2650                                  Label::Distance distance) {
   2651   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
   2652   GetMarkBits(value, bitmap_scratch, mask_scratch);
   2653 
   2654   // If the value is black or grey we don't need to do anything.
   2655   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
   2656   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
   2657   DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
   2658   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
   2659 
   2660   // Since both black and grey have a 1 in the first position and white does
   2661   // not have a 1 there we only need to check one bit.
   2662   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2663   j(zero, value_is_white, Label::kNear);
   2664 }
   2665 
   2666 
   2667 void MacroAssembler::EnumLength(Register dst, Register map) {
   2668   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
   2669   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2670   and_(dst, Immediate(Map::EnumLengthBits::kMask));
   2671   SmiTag(dst);
   2672 }
   2673 
   2674 
   2675 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
   2676   Label next, start;
   2677   mov(ecx, eax);
   2678 
   2679   // Check if the enum length field is properly initialized, indicating that
   2680   // there is an enum cache.
   2681   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   2682 
   2683   EnumLength(edx, ebx);
   2684   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
   2685   j(equal, call_runtime);
   2686 
   2687   jmp(&start);
   2688 
   2689   bind(&next);
   2690   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   2691 
   2692   // For all objects but the receiver, check that the cache is empty.
   2693   EnumLength(edx, ebx);
   2694   cmp(edx, Immediate(Smi::kZero));
   2695   j(not_equal, call_runtime);
   2696 
   2697   bind(&start);
   2698 
   2699   // Check that there are no elements. Register rcx contains the current JS
   2700   // object we've reached through the prototype chain.
   2701   Label no_elements;
   2702   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
   2703   cmp(ecx, isolate()->factory()->empty_fixed_array());
   2704   j(equal, &no_elements);
   2705 
   2706   // Second chance, the object may be using the empty slow element dictionary.
   2707   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
   2708   j(not_equal, call_runtime);
   2709 
   2710   bind(&no_elements);
   2711   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   2712   cmp(ecx, isolate()->factory()->null_value());
   2713   j(not_equal, &next);
   2714 }
   2715 
   2716 
   2717 void MacroAssembler::TestJSArrayForAllocationMemento(
   2718     Register receiver_reg,
   2719     Register scratch_reg,
   2720     Label* no_memento_found) {
   2721   Label map_check;
   2722   Label top_check;
   2723   ExternalReference new_space_allocation_top =
   2724       ExternalReference::new_space_allocation_top_address(isolate());
   2725   const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
   2726   const int kMementoLastWordOffset =
   2727       kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
   2728 
   2729   // Bail out if the object is not in new space.
   2730   JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
   2731   // If the object is in new space, we need to check whether it is on the same
   2732   // page as the current top.
   2733   lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
   2734   xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   2735   test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
   2736   j(zero, &top_check);
   2737   // The object is on a different page than allocation top. Bail out if the
   2738   // object sits on the page boundary as no memento can follow and we cannot
   2739   // touch the memory following it.
   2740   lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
   2741   xor_(scratch_reg, receiver_reg);
   2742   test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
   2743   j(not_zero, no_memento_found);
   2744   // Continue with the actual map check.
   2745   jmp(&map_check);
   2746   // If top is on the same page as the current object, we need to check whether
   2747   // we are below top.
   2748   bind(&top_check);
   2749   lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
   2750   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   2751   j(greater_equal, no_memento_found);
   2752   // Memento map check.
   2753   bind(&map_check);
   2754   mov(scratch_reg, Operand(receiver_reg, kMementoMapOffset));
   2755   cmp(scratch_reg, Immediate(isolate()->factory()->allocation_memento_map()));
   2756 }
   2757 
   2758 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
   2759   DCHECK(!dividend.is(eax));
   2760   DCHECK(!dividend.is(edx));
   2761   base::MagicNumbersForDivision<uint32_t> mag =
   2762       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
   2763   mov(eax, Immediate(mag.multiplier));
   2764   imul(dividend);
   2765   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
   2766   if (divisor > 0 && neg) add(edx, dividend);
   2767   if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
   2768   if (mag.shift > 0) sar(edx, mag.shift);
   2769   mov(eax, dividend);
   2770   shr(eax, 31);
   2771   add(edx, eax);
   2772 }
   2773 
   2774 
   2775 }  // namespace internal
   2776 }  // namespace v8
   2777 
   2778 #endif  // V8_TARGET_ARCH_X87
   2779