Home | History | Annotate | Download | only in x87
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X87
      6 
      7 #include "src/base/bits.h"
      8 #include "src/base/division-by-constant.h"
      9 #include "src/bootstrapper.h"
     10 #include "src/codegen.h"
     11 #include "src/debug/debug.h"
     12 #include "src/runtime/runtime.h"
     13 #include "src/x87/frames-x87.h"
     14 #include "src/x87/macro-assembler-x87.h"
     15 
     16 namespace v8 {
     17 namespace internal {
     18 
     19 // -------------------------------------------------------------------------
     20 // MacroAssembler implementation.
     21 
     22 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
     23                                CodeObjectRequired create_code_object)
     24     : Assembler(arg_isolate, buffer, size),
     25       generating_stub_(false),
     26       has_frame_(false) {
     27   if (create_code_object == CodeObjectRequired::kYes) {
     28     code_object_ =
     29         Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
     30   }
     31 }
     32 
     33 
     34 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
     35   DCHECK(!r.IsDouble());
     36   if (r.IsInteger8()) {
     37     movsx_b(dst, src);
     38   } else if (r.IsUInteger8()) {
     39     movzx_b(dst, src);
     40   } else if (r.IsInteger16()) {
     41     movsx_w(dst, src);
     42   } else if (r.IsUInteger16()) {
     43     movzx_w(dst, src);
     44   } else {
     45     mov(dst, src);
     46   }
     47 }
     48 
     49 
     50 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
     51   DCHECK(!r.IsDouble());
     52   if (r.IsInteger8() || r.IsUInteger8()) {
     53     mov_b(dst, src);
     54   } else if (r.IsInteger16() || r.IsUInteger16()) {
     55     mov_w(dst, src);
     56   } else {
     57     if (r.IsHeapObject()) {
     58       AssertNotSmi(src);
     59     } else if (r.IsSmi()) {
     60       AssertSmi(src);
     61     }
     62     mov(dst, src);
     63   }
     64 }
     65 
     66 
     67 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     68   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
     69     mov(destination, isolate()->heap()->root_handle(index));
     70     return;
     71   }
     72   ExternalReference roots_array_start =
     73       ExternalReference::roots_array_start(isolate());
     74   mov(destination, Immediate(index));
     75   mov(destination, Operand::StaticArray(destination,
     76                                         times_pointer_size,
     77                                         roots_array_start));
     78 }
     79 
     80 
     81 void MacroAssembler::StoreRoot(Register source,
     82                                Register scratch,
     83                                Heap::RootListIndex index) {
     84   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
     85   ExternalReference roots_array_start =
     86       ExternalReference::roots_array_start(isolate());
     87   mov(scratch, Immediate(index));
     88   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
     89       source);
     90 }
     91 
     92 
     93 void MacroAssembler::CompareRoot(Register with,
     94                                  Register scratch,
     95                                  Heap::RootListIndex index) {
     96   ExternalReference roots_array_start =
     97       ExternalReference::roots_array_start(isolate());
     98   mov(scratch, Immediate(index));
     99   cmp(with, Operand::StaticArray(scratch,
    100                                 times_pointer_size,
    101                                 roots_array_start));
    102 }
    103 
    104 
    105 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
    106   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    107   cmp(with, isolate()->heap()->root_handle(index));
    108 }
    109 
    110 
    111 void MacroAssembler::CompareRoot(const Operand& with,
    112                                  Heap::RootListIndex index) {
    113   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    114   cmp(with, isolate()->heap()->root_handle(index));
    115 }
    116 
    117 
    118 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
    119   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
    120   Push(isolate()->heap()->root_handle(index));
    121 }
    122 
    123 #define REG(Name) \
    124   { Register::kCode_##Name }
    125 
    126 static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
    127 
    128 #undef REG
    129 
    130 static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
    131 
    132 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
    133                                      Register exclusion1, Register exclusion2,
    134                                      Register exclusion3) {
    135   // We don't allow a GC during a store buffer overflow so there is no need to
    136   // store the registers in any particular way, but we do have to store and
    137   // restore them.
    138   for (int i = 0; i < kNumberOfSavedRegs; i++) {
    139     Register reg = saved_regs[i];
    140     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
    141       push(reg);
    142     }
    143   }
    144   if (fp_mode == kSaveFPRegs) {
    145     // Save FPU state in m108byte.
    146     sub(esp, Immediate(108));
    147     fnsave(Operand(esp, 0));
    148   }
    149 }
    150 
    151 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
    152                                     Register exclusion2, Register exclusion3) {
    153   if (fp_mode == kSaveFPRegs) {
    154     // Restore FPU state in m108byte.
    155     frstor(Operand(esp, 0));
    156     add(esp, Immediate(108));
    157   }
    158 
    159   for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
    160     Register reg = saved_regs[i];
    161     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
    162       pop(reg);
    163     }
    164   }
    165 }
    166 
    167 void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
    168                                 Label* condition_met,
    169                                 Label::Distance distance) {
    170   const int mask =
    171       (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
    172   CheckPageFlag(object, scratch, mask, cc, condition_met, distance);
    173 }
    174 
    175 
    176 void MacroAssembler::RememberedSetHelper(
    177     Register object,  // Only used for debug checks.
    178     Register addr, Register scratch, SaveFPRegsMode save_fp,
    179     MacroAssembler::RememberedSetFinalAction and_then) {
    180   Label done;
    181   if (emit_debug_code()) {
    182     Label ok;
    183     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
    184     int3();
    185     bind(&ok);
    186   }
    187   // Load store buffer top.
    188   ExternalReference store_buffer =
    189       ExternalReference::store_buffer_top(isolate());
    190   mov(scratch, Operand::StaticVariable(store_buffer));
    191   // Store pointer to buffer.
    192   mov(Operand(scratch, 0), addr);
    193   // Increment buffer top.
    194   add(scratch, Immediate(kPointerSize));
    195   // Write back new top of buffer.
    196   mov(Operand::StaticVariable(store_buffer), scratch);
    197   // Call stub on end of buffer.
    198   // Check for end of buffer.
    199   test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
    200   if (and_then == kReturnAtEnd) {
    201     Label buffer_overflowed;
    202     j(equal, &buffer_overflowed, Label::kNear);
    203     ret(0);
    204     bind(&buffer_overflowed);
    205   } else {
    206     DCHECK(and_then == kFallThroughAtEnd);
    207     j(not_equal, &done, Label::kNear);
    208   }
    209   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
    210   CallStub(&store_buffer_overflow);
    211   if (and_then == kReturnAtEnd) {
    212     ret(0);
    213   } else {
    214     DCHECK(and_then == kFallThroughAtEnd);
    215     bind(&done);
    216   }
    217 }
    218 
    219 
    220 void MacroAssembler::ClampTOSToUint8(Register result_reg) {
    221   Label done, conv_failure;
    222   sub(esp, Immediate(kPointerSize));
    223   fnclex();
    224   fist_s(Operand(esp, 0));
    225   pop(result_reg);
    226   X87CheckIA();
    227   j(equal, &conv_failure, Label::kNear);
    228   test(result_reg, Immediate(0xFFFFFF00));
    229   j(zero, &done, Label::kNear);
    230   setcc(sign, result_reg);
    231   sub(result_reg, Immediate(1));
    232   and_(result_reg, Immediate(255));
    233   jmp(&done, Label::kNear);
    234   bind(&conv_failure);
    235   fnclex();
    236   fldz();
    237   fld(1);
    238   FCmp();
    239   setcc(below, result_reg);  // 1 if negative, 0 if positive.
    240   dec_b(result_reg);         // 0 if negative, 255 if positive.
    241   bind(&done);
    242 }
    243 
    244 
    245 void MacroAssembler::ClampUint8(Register reg) {
    246   Label done;
    247   test(reg, Immediate(0xFFFFFF00));
    248   j(zero, &done, Label::kNear);
    249   setcc(negative, reg);  // 1 if negative, 0 if positive.
    250   dec_b(reg);  // 0 if negative, 255 if positive.
    251   bind(&done);
    252 }
    253 
    254 
    255 void MacroAssembler::SlowTruncateToI(Register result_reg,
    256                                      Register input_reg,
    257                                      int offset) {
    258   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
    259   call(stub.GetCode(), RelocInfo::CODE_TARGET);
    260 }
    261 
    262 
    263 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
    264   sub(esp, Immediate(kDoubleSize));
    265   fst_d(MemOperand(esp, 0));
    266   SlowTruncateToI(result_reg, esp, 0);
    267   add(esp, Immediate(kDoubleSize));
    268 }
    269 
    270 
    271 void MacroAssembler::X87TOSToI(Register result_reg,
    272                                MinusZeroMode minus_zero_mode,
    273                                Label* lost_precision, Label* is_nan,
    274                                Label* minus_zero, Label::Distance dst) {
    275   Label done;
    276   sub(esp, Immediate(kPointerSize));
    277   fld(0);
    278   fist_s(MemOperand(esp, 0));
    279   fild_s(MemOperand(esp, 0));
    280   pop(result_reg);
    281   FCmp();
    282   j(not_equal, lost_precision, dst);
    283   j(parity_even, is_nan, dst);
    284   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    285     test(result_reg, Operand(result_reg));
    286     j(not_zero, &done, Label::kNear);
    287     // To check for minus zero, we load the value again as float, and check
    288     // if that is still 0.
    289     sub(esp, Immediate(kPointerSize));
    290     fst_s(MemOperand(esp, 0));
    291     pop(result_reg);
    292     test(result_reg, Operand(result_reg));
    293     j(not_zero, minus_zero, dst);
    294   }
    295   bind(&done);
    296 }
    297 
    298 
    299 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
    300                                            Register input_reg) {
    301   Label done, slow_case;
    302 
    303   SlowTruncateToI(result_reg, input_reg);
    304   bind(&done);
    305 }
    306 
    307 
    308 void MacroAssembler::LoadUint32NoSSE2(const Operand& src) {
    309   Label done;
    310   push(src);
    311   fild_s(Operand(esp, 0));
    312   cmp(src, Immediate(0));
    313   j(not_sign, &done, Label::kNear);
    314   ExternalReference uint32_bias =
    315         ExternalReference::address_of_uint32_bias();
    316   fld_d(Operand::StaticVariable(uint32_bias));
    317   faddp(1);
    318   bind(&done);
    319   add(esp, Immediate(kPointerSize));
    320 }
    321 
    322 
    323 void MacroAssembler::RecordWriteArray(
    324     Register object, Register value, Register index, SaveFPRegsMode save_fp,
    325     RememberedSetAction remembered_set_action, SmiCheck smi_check,
    326     PointersToHereCheck pointers_to_here_check_for_value) {
    327   // First, check if a write barrier is even needed. The tests below
    328   // catch stores of Smis.
    329   Label done;
    330 
    331   // Skip barrier if writing a smi.
    332   if (smi_check == INLINE_SMI_CHECK) {
    333     DCHECK_EQ(0, kSmiTag);
    334     test(value, Immediate(kSmiTagMask));
    335     j(zero, &done);
    336   }
    337 
    338   // Array access: calculate the destination address in the same manner as
    339   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    340   // into an array of words.
    341   Register dst = index;
    342   lea(dst, Operand(object, index, times_half_pointer_size,
    343                    FixedArray::kHeaderSize - kHeapObjectTag));
    344 
    345   RecordWrite(object, dst, value, save_fp, remembered_set_action,
    346               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
    347 
    348   bind(&done);
    349 
    350   // Clobber clobbered input registers when running with the debug-code flag
    351   // turned on to provoke errors.
    352   if (emit_debug_code()) {
    353     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    354     mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
    355   }
    356 }
    357 
    358 
    359 void MacroAssembler::RecordWriteField(
    360     Register object, int offset, Register value, Register dst,
    361     SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
    362     SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
    363   // First, check if a write barrier is even needed. The tests below
    364   // catch stores of Smis.
    365   Label done;
    366 
    367   // Skip barrier if writing a smi.
    368   if (smi_check == INLINE_SMI_CHECK) {
    369     JumpIfSmi(value, &done, Label::kNear);
    370   }
    371 
    372   // Although the object register is tagged, the offset is relative to the start
    373   // of the object, so so offset must be a multiple of kPointerSize.
    374   DCHECK(IsAligned(offset, kPointerSize));
    375 
    376   lea(dst, FieldOperand(object, offset));
    377   if (emit_debug_code()) {
    378     Label ok;
    379     test_b(dst, Immediate((1 << kPointerSizeLog2) - 1));
    380     j(zero, &ok, Label::kNear);
    381     int3();
    382     bind(&ok);
    383   }
    384 
    385   RecordWrite(object, dst, value, save_fp, remembered_set_action,
    386               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
    387 
    388   bind(&done);
    389 
    390   // Clobber clobbered input registers when running with the debug-code flag
    391   // turned on to provoke errors.
    392   if (emit_debug_code()) {
    393     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    394     mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
    395   }
    396 }
    397 
    398 
    399 void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
    400                                        Register scratch1, Register scratch2,
    401                                        SaveFPRegsMode save_fp) {
    402   Label done;
    403 
    404   Register address = scratch1;
    405   Register value = scratch2;
    406   if (emit_debug_code()) {
    407     Label ok;
    408     lea(address, FieldOperand(object, HeapObject::kMapOffset));
    409     test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
    410     j(zero, &ok, Label::kNear);
    411     int3();
    412     bind(&ok);
    413   }
    414 
    415   DCHECK(!object.is(value));
    416   DCHECK(!object.is(address));
    417   DCHECK(!value.is(address));
    418   AssertNotSmi(object);
    419 
    420   if (!FLAG_incremental_marking) {
    421     return;
    422   }
    423 
    424   // Compute the address.
    425   lea(address, FieldOperand(object, HeapObject::kMapOffset));
    426 
    427   // A single check of the map's pages interesting flag suffices, since it is
    428   // only set during incremental collection, and then it's also guaranteed that
    429   // the from object's page's interesting flag is also set.  This optimization
    430   // relies on the fact that maps can never be in new space.
    431   DCHECK(!isolate()->heap()->InNewSpace(*map));
    432   CheckPageFlagForMap(map,
    433                       MemoryChunk::kPointersToHereAreInterestingMask,
    434                       zero,
    435                       &done,
    436                       Label::kNear);
    437 
    438   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
    439                        save_fp);
    440   CallStub(&stub);
    441 
    442   bind(&done);
    443 
    444   // Count number of write barriers in generated code.
    445   isolate()->counters()->write_barriers_static()->Increment();
    446   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    447 
    448   // Clobber clobbered input registers when running with the debug-code flag
    449   // turned on to provoke errors.
    450   if (emit_debug_code()) {
    451     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    452     mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
    453     mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
    454   }
    455 }
    456 
    457 
    458 void MacroAssembler::RecordWrite(
    459     Register object, Register address, Register value, SaveFPRegsMode fp_mode,
    460     RememberedSetAction remembered_set_action, SmiCheck smi_check,
    461     PointersToHereCheck pointers_to_here_check_for_value) {
    462   DCHECK(!object.is(value));
    463   DCHECK(!object.is(address));
    464   DCHECK(!value.is(address));
    465   AssertNotSmi(object);
    466 
    467   if (remembered_set_action == OMIT_REMEMBERED_SET &&
    468       !FLAG_incremental_marking) {
    469     return;
    470   }
    471 
    472   if (emit_debug_code()) {
    473     Label ok;
    474     cmp(value, Operand(address, 0));
    475     j(equal, &ok, Label::kNear);
    476     int3();
    477     bind(&ok);
    478   }
    479 
    480   // First, check if a write barrier is even needed. The tests below
    481   // catch stores of Smis and stores into young gen.
    482   Label done;
    483 
    484   if (smi_check == INLINE_SMI_CHECK) {
    485     // Skip barrier if writing a smi.
    486     JumpIfSmi(value, &done, Label::kNear);
    487   }
    488 
    489   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
    490     CheckPageFlag(value,
    491                   value,  // Used as scratch.
    492                   MemoryChunk::kPointersToHereAreInterestingMask,
    493                   zero,
    494                   &done,
    495                   Label::kNear);
    496   }
    497   CheckPageFlag(object,
    498                 value,  // Used as scratch.
    499                 MemoryChunk::kPointersFromHereAreInterestingMask,
    500                 zero,
    501                 &done,
    502                 Label::kNear);
    503 
    504   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
    505                        fp_mode);
    506   CallStub(&stub);
    507 
    508   bind(&done);
    509 
    510   // Count number of write barriers in generated code.
    511   isolate()->counters()->write_barriers_static()->Increment();
    512   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
    513 
    514   // Clobber clobbered registers when running with the debug-code flag
    515   // turned on to provoke errors.
    516   if (emit_debug_code()) {
    517     mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
    518     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    519   }
    520 }
    521 
    522 void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
    523                                                Register code_entry,
    524                                                Register scratch) {
    525   const int offset = JSFunction::kCodeEntryOffset;
    526 
    527   // Since a code entry (value) is always in old space, we don't need to update
    528   // remembered set. If incremental marking is off, there is nothing for us to
    529   // do.
    530   if (!FLAG_incremental_marking) return;
    531 
    532   DCHECK(!js_function.is(code_entry));
    533   DCHECK(!js_function.is(scratch));
    534   DCHECK(!code_entry.is(scratch));
    535   AssertNotSmi(js_function);
    536 
    537   if (emit_debug_code()) {
    538     Label ok;
    539     lea(scratch, FieldOperand(js_function, offset));
    540     cmp(code_entry, Operand(scratch, 0));
    541     j(equal, &ok, Label::kNear);
    542     int3();
    543     bind(&ok);
    544   }
    545 
    546   // First, check if a write barrier is even needed. The tests below
    547   // catch stores of Smis and stores into young gen.
    548   Label done;
    549 
    550   CheckPageFlag(code_entry, scratch,
    551                 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
    552                 Label::kNear);
    553   CheckPageFlag(js_function, scratch,
    554                 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
    555                 Label::kNear);
    556 
    557   // Save input registers.
    558   push(js_function);
    559   push(code_entry);
    560 
    561   const Register dst = scratch;
    562   lea(dst, FieldOperand(js_function, offset));
    563 
    564   // Save caller-saved registers.
    565   PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
    566 
    567   int argument_count = 3;
    568   PrepareCallCFunction(argument_count, code_entry);
    569   mov(Operand(esp, 0 * kPointerSize), js_function);
    570   mov(Operand(esp, 1 * kPointerSize), dst);  // Slot.
    571   mov(Operand(esp, 2 * kPointerSize),
    572       Immediate(ExternalReference::isolate_address(isolate())));
    573 
    574   {
    575     AllowExternalCallThatCantCauseGC scope(this);
    576     CallCFunction(
    577         ExternalReference::incremental_marking_record_write_code_entry_function(
    578             isolate()),
    579         argument_count);
    580   }
    581 
    582   // Restore caller-saved registers.
    583   PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
    584 
    585   // Restore input registers.
    586   pop(code_entry);
    587   pop(js_function);
    588 
    589   bind(&done);
    590 }
    591 
    592 void MacroAssembler::DebugBreak() {
    593   Move(eax, Immediate(0));
    594   mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
    595                                        isolate())));
    596   CEntryStub ces(isolate(), 1);
    597   call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
    598 }
    599 
    600 void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
    601   if (shift >= 32) {
    602     mov(high, low);
    603     shl(high, shift - 32);
    604     xor_(low, low);
    605   } else {
    606     shld(high, low, shift);
    607     shl(low, shift);
    608   }
    609 }
    610 
    611 void MacroAssembler::ShlPair_cl(Register high, Register low) {
    612   shld_cl(high, low);
    613   shl_cl(low);
    614   Label done;
    615   test(ecx, Immediate(0x20));
    616   j(equal, &done, Label::kNear);
    617   mov(high, low);
    618   xor_(low, low);
    619   bind(&done);
    620 }
    621 
    622 void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
    623   if (shift >= 32) {
    624     mov(low, high);
    625     shr(low, shift - 32);
    626     xor_(high, high);
    627   } else {
    628     shrd(high, low, shift);
    629     shr(high, shift);
    630   }
    631 }
    632 
    633 void MacroAssembler::ShrPair_cl(Register high, Register low) {
    634   shrd_cl(low, high);
    635   shr_cl(high);
    636   Label done;
    637   test(ecx, Immediate(0x20));
    638   j(equal, &done, Label::kNear);
    639   mov(low, high);
    640   xor_(high, high);
    641   bind(&done);
    642 }
    643 
    644 void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
    645   if (shift >= 32) {
    646     mov(low, high);
    647     sar(low, shift - 32);
    648     sar(high, 31);
    649   } else {
    650     shrd(high, low, shift);
    651     sar(high, shift);
    652   }
    653 }
    654 
    655 void MacroAssembler::SarPair_cl(Register high, Register low) {
    656   shrd_cl(low, high);
    657   sar_cl(high);
    658   Label done;
    659   test(ecx, Immediate(0x20));
    660   j(equal, &done, Label::kNear);
    661   mov(low, high);
    662   sar(high, 31);
    663   bind(&done);
    664 }
    665 
    666 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    667   static const int kMaxImmediateBits = 17;
    668   if (!RelocInfo::IsNone(x.rmode_)) return false;
    669   return !is_intn(x.x_, kMaxImmediateBits);
    670 }
    671 
    672 
    673 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
    674   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    675     Move(dst, Immediate(x.x_ ^ jit_cookie()));
    676     xor_(dst, jit_cookie());
    677   } else {
    678     Move(dst, x);
    679   }
    680 }
    681 
    682 
    683 void MacroAssembler::SafePush(const Immediate& x) {
    684   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    685     push(Immediate(x.x_ ^ jit_cookie()));
    686     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    687   } else {
    688     push(x);
    689   }
    690 }
    691 
    692 
    693 void MacroAssembler::CmpObjectType(Register heap_object,
    694                                    InstanceType type,
    695                                    Register map) {
    696   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    697   CmpInstanceType(map, type);
    698 }
    699 
    700 
    701 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    702   cmpb(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
    703 }
    704 
    705 
    706 void MacroAssembler::CheckFastElements(Register map,
    707                                        Label* fail,
    708                                        Label::Distance distance) {
    709   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    710   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    711   STATIC_ASSERT(FAST_ELEMENTS == 2);
    712   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    713   cmpb(FieldOperand(map, Map::kBitField2Offset),
    714        Immediate(Map::kMaximumBitField2FastHoleyElementValue));
    715   j(above, fail, distance);
    716 }
    717 
    718 
    719 void MacroAssembler::CheckFastObjectElements(Register map,
    720                                              Label* fail,
    721                                              Label::Distance distance) {
    722   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    723   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    724   STATIC_ASSERT(FAST_ELEMENTS == 2);
    725   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
    726   cmpb(FieldOperand(map, Map::kBitField2Offset),
    727        Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
    728   j(below_equal, fail, distance);
    729   cmpb(FieldOperand(map, Map::kBitField2Offset),
    730        Immediate(Map::kMaximumBitField2FastHoleyElementValue));
    731   j(above, fail, distance);
    732 }
    733 
    734 
    735 void MacroAssembler::CheckFastSmiElements(Register map,
    736                                           Label* fail,
    737                                           Label::Distance distance) {
    738   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    739   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    740   cmpb(FieldOperand(map, Map::kBitField2Offset),
    741        Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
    742   j(above, fail, distance);
    743 }
    744 
    745 
    746 void MacroAssembler::StoreNumberToDoubleElements(
    747     Register maybe_number,
    748     Register elements,
    749     Register key,
    750     Register scratch,
    751     Label* fail,
    752     int elements_offset) {
    753   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
    754   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
    755 
    756   CheckMap(maybe_number,
    757            isolate()->factory()->heap_number_map(),
    758            fail,
    759            DONT_DO_SMI_CHECK);
    760 
    761   fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
    762   jmp(&done, Label::kNear);
    763 
    764   bind(&smi_value);
    765   // Value is a smi. Convert to a double and store.
    766   // Preserve original value.
    767   mov(scratch, maybe_number);
    768   SmiUntag(scratch);
    769   push(scratch);
    770   fild_s(Operand(esp, 0));
    771   pop(scratch);
    772   bind(&done);
    773   fstp_d(FieldOperand(elements, key, times_4,
    774                       FixedDoubleArray::kHeaderSize - elements_offset));
    775 }
    776 
    777 
    778 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
    779   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    780 }
    781 
    782 
    783 void MacroAssembler::CheckMap(Register obj,
    784                               Handle<Map> map,
    785                               Label* fail,
    786                               SmiCheckType smi_check_type) {
    787   if (smi_check_type == DO_SMI_CHECK) {
    788     JumpIfSmi(obj, fail);
    789   }
    790 
    791   CompareMap(obj, map);
    792   j(not_equal, fail);
    793 }
    794 
    795 
    796 void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
    797                                      Register scratch2, Handle<WeakCell> cell,
    798                                      Handle<Code> success,
    799                                      SmiCheckType smi_check_type) {
    800   Label fail;
    801   if (smi_check_type == DO_SMI_CHECK) {
    802     JumpIfSmi(obj, &fail);
    803   }
    804   mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
    805   CmpWeakValue(scratch1, cell, scratch2);
    806   j(equal, success);
    807 
    808   bind(&fail);
    809 }
    810 
    811 
    812 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    813                                              Register map,
    814                                              Register instance_type) {
    815   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    816   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    817   STATIC_ASSERT(kNotStringTag != 0);
    818   test(instance_type, Immediate(kIsNotStringMask));
    819   return zero;
    820 }
    821 
    822 
    823 Condition MacroAssembler::IsObjectNameType(Register heap_object,
    824                                            Register map,
    825                                            Register instance_type) {
    826   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    827   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    828   cmpb(instance_type, Immediate(LAST_NAME_TYPE));
    829   return below_equal;
    830 }
    831 
    832 
    833 void MacroAssembler::FCmp() {
    834   fucompp();
    835   push(eax);
    836   fnstsw_ax();
    837   sahf();
    838   pop(eax);
    839 }
    840 
    841 
    842 void MacroAssembler::FXamMinusZero() {
    843   fxam();
    844   push(eax);
    845   fnstsw_ax();
    846   and_(eax, Immediate(0x4700));
    847   // For minus zero, C3 == 1 && C1 == 1.
    848   cmp(eax, Immediate(0x4200));
    849   pop(eax);
    850   fstp(0);
    851 }
    852 
    853 
    854 void MacroAssembler::FXamSign() {
    855   fxam();
    856   push(eax);
    857   fnstsw_ax();
    858   // For negative value (including -0.0), C1 == 1.
    859   and_(eax, Immediate(0x0200));
    860   pop(eax);
    861   fstp(0);
    862 }
    863 
    864 
    865 void MacroAssembler::X87CheckIA() {
    866   push(eax);
    867   fnstsw_ax();
    868   // For #IA, IE == 1 && SF == 0.
    869   and_(eax, Immediate(0x0041));
    870   cmp(eax, Immediate(0x0001));
    871   pop(eax);
    872 }
    873 
    874 
    875 // rc=00B, round to nearest.
    876 // rc=01B, round down.
    877 // rc=10B, round up.
    878 // rc=11B, round toward zero.
    879 void MacroAssembler::X87SetRC(int rc) {
    880   sub(esp, Immediate(kPointerSize));
    881   fnstcw(MemOperand(esp, 0));
    882   and_(MemOperand(esp, 0), Immediate(0xF3FF));
    883   or_(MemOperand(esp, 0), Immediate(rc));
    884   fldcw(MemOperand(esp, 0));
    885   add(esp, Immediate(kPointerSize));
    886 }
    887 
    888 
    889 void MacroAssembler::X87SetFPUCW(int cw) {
    890   RecordComment("-- X87SetFPUCW start --");
    891   push(Immediate(cw));
    892   fldcw(MemOperand(esp, 0));
    893   add(esp, Immediate(kPointerSize));
    894   RecordComment("-- X87SetFPUCW end--");
    895 }
    896 
    897 
    898 void MacroAssembler::AssertNumber(Register object) {
    899   if (emit_debug_code()) {
    900     Label ok;
    901     JumpIfSmi(object, &ok);
    902     cmp(FieldOperand(object, HeapObject::kMapOffset),
    903         isolate()->factory()->heap_number_map());
    904     Check(equal, kOperandNotANumber);
    905     bind(&ok);
    906   }
    907 }
    908 
    909 void MacroAssembler::AssertNotNumber(Register object) {
    910   if (emit_debug_code()) {
    911     test(object, Immediate(kSmiTagMask));
    912     Check(not_equal, kOperandIsANumber);
    913     cmp(FieldOperand(object, HeapObject::kMapOffset),
    914         isolate()->factory()->heap_number_map());
    915     Check(not_equal, kOperandIsANumber);
    916   }
    917 }
    918 
    919 void MacroAssembler::AssertSmi(Register object) {
    920   if (emit_debug_code()) {
    921     test(object, Immediate(kSmiTagMask));
    922     Check(equal, kOperandIsNotASmi);
    923   }
    924 }
    925 
    926 
    927 void MacroAssembler::AssertString(Register object) {
    928   if (emit_debug_code()) {
    929     test(object, Immediate(kSmiTagMask));
    930     Check(not_equal, kOperandIsASmiAndNotAString);
    931     push(object);
    932     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    933     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    934     pop(object);
    935     Check(below, kOperandIsNotAString);
    936   }
    937 }
    938 
    939 
    940 void MacroAssembler::AssertName(Register object) {
    941   if (emit_debug_code()) {
    942     test(object, Immediate(kSmiTagMask));
    943     Check(not_equal, kOperandIsASmiAndNotAName);
    944     push(object);
    945     mov(object, FieldOperand(object, HeapObject::kMapOffset));
    946     CmpInstanceType(object, LAST_NAME_TYPE);
    947     pop(object);
    948     Check(below_equal, kOperandIsNotAName);
    949   }
    950 }
    951 
    952 
    953 void MacroAssembler::AssertFunction(Register object) {
    954   if (emit_debug_code()) {
    955     test(object, Immediate(kSmiTagMask));
    956     Check(not_equal, kOperandIsASmiAndNotAFunction);
    957     Push(object);
    958     CmpObjectType(object, JS_FUNCTION_TYPE, object);
    959     Pop(object);
    960     Check(equal, kOperandIsNotAFunction);
    961   }
    962 }
    963 
    964 
    965 void MacroAssembler::AssertBoundFunction(Register object) {
    966   if (emit_debug_code()) {
    967     test(object, Immediate(kSmiTagMask));
    968     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
    969     Push(object);
    970     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
    971     Pop(object);
    972     Check(equal, kOperandIsNotABoundFunction);
    973   }
    974 }
    975 
    976 void MacroAssembler::AssertGeneratorObject(Register object) {
    977   if (emit_debug_code()) {
    978     test(object, Immediate(kSmiTagMask));
    979     Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
    980     Push(object);
    981     CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object);
    982     Pop(object);
    983     Check(equal, kOperandIsNotAGeneratorObject);
    984   }
    985 }
    986 
    987 void MacroAssembler::AssertReceiver(Register object) {
    988   if (emit_debug_code()) {
    989     test(object, Immediate(kSmiTagMask));
    990     Check(not_equal, kOperandIsASmiAndNotAReceiver);
    991     Push(object);
    992     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
    993     CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
    994     Pop(object);
    995     Check(above_equal, kOperandIsNotAReceiver);
    996   }
    997 }
    998 
    999 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
   1000   if (emit_debug_code()) {
   1001     Label done_checking;
   1002     AssertNotSmi(object);
   1003     cmp(object, isolate()->factory()->undefined_value());
   1004     j(equal, &done_checking);
   1005     cmp(FieldOperand(object, 0),
   1006         Immediate(isolate()->factory()->allocation_site_map()));
   1007     Assert(equal, kExpectedUndefinedOrCell);
   1008     bind(&done_checking);
   1009   }
   1010 }
   1011 
   1012 
   1013 void MacroAssembler::AssertNotSmi(Register object) {
   1014   if (emit_debug_code()) {
   1015     test(object, Immediate(kSmiTagMask));
   1016     Check(not_equal, kOperandIsASmi);
   1017   }
   1018 }
   1019 
   1020 void MacroAssembler::StubPrologue(StackFrame::Type type) {
   1021   push(ebp);  // Caller's frame pointer.
   1022   mov(ebp, esp);
   1023   push(Immediate(Smi::FromInt(type)));
   1024 }
   1025 
   1026 
   1027 void MacroAssembler::Prologue(bool code_pre_aging) {
   1028   PredictableCodeSizeScope predictible_code_size_scope(this,
   1029       kNoCodeAgeSequenceLength);
   1030   if (code_pre_aging) {
   1031       // Pre-age the code.
   1032     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
   1033         RelocInfo::CODE_AGE_SEQUENCE);
   1034     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
   1035   } else {
   1036     push(ebp);  // Caller's frame pointer.
   1037     mov(ebp, esp);
   1038     push(esi);  // Callee's context.
   1039     push(edi);  // Callee's JS function.
   1040   }
   1041 }
   1042 
   1043 
   1044 void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
   1045   mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1046   mov(vector, FieldOperand(vector, JSFunction::kLiteralsOffset));
   1047   mov(vector, FieldOperand(vector, LiteralsArray::kFeedbackVectorOffset));
   1048 }
   1049 
   1050 
   1051 void MacroAssembler::EnterFrame(StackFrame::Type type,
   1052                                 bool load_constant_pool_pointer_reg) {
   1053   // Out-of-line constant pool not implemented on x87.
   1054   UNREACHABLE();
   1055 }
   1056 
   1057 
   1058 void MacroAssembler::EnterFrame(StackFrame::Type type) {
   1059   push(ebp);
   1060   mov(ebp, esp);
   1061   push(Immediate(Smi::FromInt(type)));
   1062   if (type == StackFrame::INTERNAL) {
   1063     push(Immediate(CodeObject()));
   1064   }
   1065   if (emit_debug_code()) {
   1066     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
   1067     Check(not_equal, kCodeObjectNotProperlyPatched);
   1068   }
   1069 }
   1070 
   1071 
   1072 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
   1073   if (emit_debug_code()) {
   1074     cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
   1075         Immediate(Smi::FromInt(type)));
   1076     Check(equal, kStackFrameTypesMustMatch);
   1077   }
   1078   leave();
   1079 }
   1080 
   1081 
   1082 void MacroAssembler::EnterExitFramePrologue() {
   1083   // Set up the frame structure on the stack.
   1084   DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
   1085   DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
   1086   DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
   1087   push(ebp);
   1088   mov(ebp, esp);
   1089 
   1090   // Reserve room for entry stack pointer and push the code object.
   1091   push(Immediate(Smi::FromInt(StackFrame::EXIT)));
   1092   DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
   1093   push(Immediate(0));  // Saved entry sp, patched before call.
   1094   DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
   1095   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
   1096 
   1097   // Save the frame pointer and the context in top.
   1098   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
   1099   ExternalReference context_address(Isolate::kContextAddress, isolate());
   1100   ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
   1101   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
   1102   mov(Operand::StaticVariable(context_address), esi);
   1103   mov(Operand::StaticVariable(c_function_address), ebx);
   1104 }
   1105 
   1106 
   1107 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
   1108   // Optionally save FPU state.
   1109   if (save_doubles) {
   1110     // Store FPU state to m108byte.
   1111     int space = 108 + argc * kPointerSize;
   1112     sub(esp, Immediate(space));
   1113     const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
   1114     fnsave(MemOperand(ebp, offset - 108));
   1115   } else {
   1116     sub(esp, Immediate(argc * kPointerSize));
   1117   }
   1118 
   1119   // Get the required frame alignment for the OS.
   1120   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
   1121   if (kFrameAlignment > 0) {
   1122     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
   1123     and_(esp, -kFrameAlignment);
   1124   }
   1125 
   1126   // Patch the saved entry sp.
   1127   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
   1128 }
   1129 
   1130 
   1131 void MacroAssembler::EnterExitFrame(int argc, bool save_doubles) {
   1132   EnterExitFramePrologue();
   1133 
   1134   // Set up argc and argv in callee-saved registers.
   1135   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
   1136   mov(edi, eax);
   1137   lea(esi, Operand(ebp, eax, times_4, offset));
   1138 
   1139   // Reserve space for argc, argv and isolate.
   1140   EnterExitFrameEpilogue(argc, save_doubles);
   1141 }
   1142 
   1143 
   1144 void MacroAssembler::EnterApiExitFrame(int argc) {
   1145   EnterExitFramePrologue();
   1146   EnterExitFrameEpilogue(argc, false);
   1147 }
   1148 
   1149 
   1150 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
   1151   // Optionally restore FPU state.
   1152   if (save_doubles) {
   1153     const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
   1154     frstor(MemOperand(ebp, offset - 108));
   1155   }
   1156 
   1157   if (pop_arguments) {
   1158     // Get the return address from the stack and restore the frame pointer.
   1159     mov(ecx, Operand(ebp, 1 * kPointerSize));
   1160     mov(ebp, Operand(ebp, 0 * kPointerSize));
   1161 
   1162     // Pop the arguments and the receiver from the caller stack.
   1163     lea(esp, Operand(esi, 1 * kPointerSize));
   1164 
   1165     // Push the return address to get ready to return.
   1166     push(ecx);
   1167   } else {
   1168     // Otherwise just leave the exit frame.
   1169     leave();
   1170   }
   1171 
   1172   LeaveExitFrameEpilogue(true);
   1173 }
   1174 
   1175 
   1176 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
   1177   // Restore current context from top and clear it in debug mode.
   1178   ExternalReference context_address(Isolate::kContextAddress, isolate());
   1179   if (restore_context) {
   1180     mov(esi, Operand::StaticVariable(context_address));
   1181   }
   1182 #ifdef DEBUG
   1183   mov(Operand::StaticVariable(context_address), Immediate(0));
   1184 #endif
   1185 
   1186   // Clear the top frame.
   1187   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
   1188                                        isolate());
   1189   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
   1190 }
   1191 
   1192 
   1193 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
   1194   mov(esp, ebp);
   1195   pop(ebp);
   1196 
   1197   LeaveExitFrameEpilogue(restore_context);
   1198 }
   1199 
   1200 
   1201 void MacroAssembler::PushStackHandler() {
   1202   // Adjust this code if not the case.
   1203   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
   1204   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1205 
   1206   // Link the current handler as the next handler.
   1207   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1208   push(Operand::StaticVariable(handler_address));
   1209 
   1210   // Set this new handler as the current one.
   1211   mov(Operand::StaticVariable(handler_address), esp);
   1212 }
   1213 
   1214 
   1215 void MacroAssembler::PopStackHandler() {
   1216   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
   1217   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
   1218   pop(Operand::StaticVariable(handler_address));
   1219   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
   1220 }
   1221 
   1222 
   1223 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
   1224                                             Register scratch1,
   1225                                             Register scratch2,
   1226                                             Label* miss) {
   1227   Label same_contexts;
   1228 
   1229   DCHECK(!holder_reg.is(scratch1));
   1230   DCHECK(!holder_reg.is(scratch2));
   1231   DCHECK(!scratch1.is(scratch2));
   1232 
   1233   // Load current lexical context from the active StandardFrame, which
   1234   // may require crawling past STUB frames.
   1235   Label load_context;
   1236   Label has_context;
   1237   mov(scratch2, ebp);
   1238   bind(&load_context);
   1239   mov(scratch1,
   1240       MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
   1241   JumpIfNotSmi(scratch1, &has_context);
   1242   mov(scratch2, MemOperand(scratch2, CommonFrameConstants::kCallerFPOffset));
   1243   jmp(&load_context);
   1244   bind(&has_context);
   1245 
   1246   // When generating debug code, make sure the lexical context is set.
   1247   if (emit_debug_code()) {
   1248     cmp(scratch1, Immediate(0));
   1249     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
   1250   }
   1251   // Load the native context of the current context.
   1252   mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
   1253 
   1254   // Check the context is a native context.
   1255   if (emit_debug_code()) {
   1256     // Read the first word and compare to native_context_map.
   1257     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
   1258         isolate()->factory()->native_context_map());
   1259     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1260   }
   1261 
   1262   // Check if both contexts are the same.
   1263   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1264   j(equal, &same_contexts);
   1265 
   1266   // Compare security tokens, save holder_reg on the stack so we can use it
   1267   // as a temporary register.
   1268   //
   1269   // Check that the security token in the calling global object is
   1270   // compatible with the security token in the receiving global
   1271   // object.
   1272   mov(scratch2,
   1273       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
   1274 
   1275   // Check the context is a native context.
   1276   if (emit_debug_code()) {
   1277     cmp(scratch2, isolate()->factory()->null_value());
   1278     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
   1279 
   1280     // Read the first word and compare to native_context_map(),
   1281     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
   1282         isolate()->factory()->native_context_map());
   1283     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
   1284   }
   1285 
   1286   int token_offset = Context::kHeaderSize +
   1287                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
   1288   mov(scratch1, FieldOperand(scratch1, token_offset));
   1289   cmp(scratch1, FieldOperand(scratch2, token_offset));
   1290   j(not_equal, miss);
   1291 
   1292   bind(&same_contexts);
   1293 }
   1294 
   1295 
   1296 // Compute the hash code from the untagged key.  This must be kept in sync with
   1297 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
   1298 // code-stub-hydrogen.cc
   1299 //
   1300 // Note: r0 will contain hash code
   1301 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
   1302   // Xor original key with a seed.
   1303   if (serializer_enabled()) {
   1304     ExternalReference roots_array_start =
   1305         ExternalReference::roots_array_start(isolate());
   1306     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
   1307     mov(scratch,
   1308         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
   1309     SmiUntag(scratch);
   1310     xor_(r0, scratch);
   1311   } else {
   1312     int32_t seed = isolate()->heap()->HashSeed();
   1313     xor_(r0, Immediate(seed));
   1314   }
   1315 
   1316   // hash = ~hash + (hash << 15);
   1317   mov(scratch, r0);
   1318   not_(r0);
   1319   shl(scratch, 15);
   1320   add(r0, scratch);
   1321   // hash = hash ^ (hash >> 12);
   1322   mov(scratch, r0);
   1323   shr(scratch, 12);
   1324   xor_(r0, scratch);
   1325   // hash = hash + (hash << 2);
   1326   lea(r0, Operand(r0, r0, times_4, 0));
   1327   // hash = hash ^ (hash >> 4);
   1328   mov(scratch, r0);
   1329   shr(scratch, 4);
   1330   xor_(r0, scratch);
   1331   // hash = hash * 2057;
   1332   imul(r0, r0, 2057);
   1333   // hash = hash ^ (hash >> 16);
   1334   mov(scratch, r0);
   1335   shr(scratch, 16);
   1336   xor_(r0, scratch);
   1337   and_(r0, 0x3fffffff);
   1338 }
   1339 
   1340 
   1341 
   1342 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
   1343                                               Register elements,
   1344                                               Register key,
   1345                                               Register r0,
   1346                                               Register r1,
   1347                                               Register r2,
   1348                                               Register result) {
   1349   // Register use:
   1350   //
   1351   // elements - holds the slow-case elements of the receiver and is unchanged.
   1352   //
   1353   // key      - holds the smi key on entry and is unchanged.
   1354   //
   1355   // Scratch registers:
   1356   //
   1357   // r0 - holds the untagged key on entry and holds the hash once computed.
   1358   //
   1359   // r1 - used to hold the capacity mask of the dictionary
   1360   //
   1361   // r2 - used for the index into the dictionary.
   1362   //
   1363   // result - holds the result on exit if the load succeeds and we fall through.
   1364 
   1365   Label done;
   1366 
   1367   GetNumberHash(r0, r1);
   1368 
   1369   // Compute capacity mask.
   1370   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
   1371   shr(r1, kSmiTagSize);  // convert smi to int
   1372   dec(r1);
   1373 
   1374   // Generate an unrolled loop that performs a few probes before giving up.
   1375   for (int i = 0; i < kNumberDictionaryProbes; i++) {
   1376     // Use r2 for index calculations and keep the hash intact in r0.
   1377     mov(r2, r0);
   1378     // Compute the masked index: (hash + i + i * i) & mask.
   1379     if (i > 0) {
   1380       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
   1381     }
   1382     and_(r2, r1);
   1383 
   1384     // Scale the index by multiplying by the entry size.
   1385     DCHECK(SeededNumberDictionary::kEntrySize == 3);
   1386     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
   1387 
   1388     // Check if the key matches.
   1389     cmp(key, FieldOperand(elements,
   1390                           r2,
   1391                           times_pointer_size,
   1392                           SeededNumberDictionary::kElementsStartOffset));
   1393     if (i != (kNumberDictionaryProbes - 1)) {
   1394       j(equal, &done);
   1395     } else {
   1396       j(not_equal, miss);
   1397     }
   1398   }
   1399 
   1400   bind(&done);
   1401   // Check that the value is a field property.
   1402   const int kDetailsOffset =
   1403       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
   1404   DCHECK_EQ(DATA, 0);
   1405   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
   1406        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
   1407   j(not_zero, miss);
   1408 
   1409   // Get the value at the masked, scaled index.
   1410   const int kValueOffset =
   1411       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
   1412   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
   1413 }
   1414 
   1415 
   1416 void MacroAssembler::LoadAllocationTopHelper(Register result,
   1417                                              Register scratch,
   1418                                              AllocationFlags flags) {
   1419   ExternalReference allocation_top =
   1420       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1421 
   1422   // Just return if allocation top is already known.
   1423   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   1424     // No use of scratch if allocation top is provided.
   1425     DCHECK(scratch.is(no_reg));
   1426 #ifdef DEBUG
   1427     // Assert that result actually contains top on entry.
   1428     cmp(result, Operand::StaticVariable(allocation_top));
   1429     Check(equal, kUnexpectedAllocationTop);
   1430 #endif
   1431     return;
   1432   }
   1433 
   1434   // Move address of new object to result. Use scratch register if available.
   1435   if (scratch.is(no_reg)) {
   1436     mov(result, Operand::StaticVariable(allocation_top));
   1437   } else {
   1438     mov(scratch, Immediate(allocation_top));
   1439     mov(result, Operand(scratch, 0));
   1440   }
   1441 }
   1442 
   1443 
   1444 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   1445                                                Register scratch,
   1446                                                AllocationFlags flags) {
   1447   if (emit_debug_code()) {
   1448     test(result_end, Immediate(kObjectAlignmentMask));
   1449     Check(zero, kUnalignedAllocationInNewSpace);
   1450   }
   1451 
   1452   ExternalReference allocation_top =
   1453       AllocationUtils::GetAllocationTopReference(isolate(), flags);
   1454 
   1455   // Update new top. Use scratch if available.
   1456   if (scratch.is(no_reg)) {
   1457     mov(Operand::StaticVariable(allocation_top), result_end);
   1458   } else {
   1459     mov(Operand(scratch, 0), result_end);
   1460   }
   1461 }
   1462 
   1463 
   1464 void MacroAssembler::Allocate(int object_size,
   1465                               Register result,
   1466                               Register result_end,
   1467                               Register scratch,
   1468                               Label* gc_required,
   1469                               AllocationFlags flags) {
   1470   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1471   DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
   1472   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   1473   if (!FLAG_inline_new) {
   1474     if (emit_debug_code()) {
   1475       // Trash the registers to simulate an allocation failure.
   1476       mov(result, Immediate(0x7091));
   1477       if (result_end.is_valid()) {
   1478         mov(result_end, Immediate(0x7191));
   1479       }
   1480       if (scratch.is_valid()) {
   1481         mov(scratch, Immediate(0x7291));
   1482       }
   1483     }
   1484     jmp(gc_required);
   1485     return;
   1486   }
   1487   DCHECK(!result.is(result_end));
   1488 
   1489   // Load address of new object into result.
   1490   LoadAllocationTopHelper(result, scratch, flags);
   1491 
   1492   ExternalReference allocation_limit =
   1493       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1494 
   1495   // Align the next allocation. Storing the filler map without checking top is
   1496   // safe in new-space because the limit of the heap is aligned there.
   1497   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1498     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1499     Label aligned;
   1500     test(result, Immediate(kDoubleAlignmentMask));
   1501     j(zero, &aligned, Label::kNear);
   1502     if ((flags & PRETENURE) != 0) {
   1503       cmp(result, Operand::StaticVariable(allocation_limit));
   1504       j(above_equal, gc_required);
   1505     }
   1506     mov(Operand(result, 0),
   1507         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1508     add(result, Immediate(kDoubleSize / 2));
   1509     bind(&aligned);
   1510   }
   1511 
   1512   // Calculate new top and bail out if space is exhausted.
   1513   Register top_reg = result_end.is_valid() ? result_end : result;
   1514 
   1515   if (!top_reg.is(result)) {
   1516     mov(top_reg, result);
   1517   }
   1518   add(top_reg, Immediate(object_size));
   1519   cmp(top_reg, Operand::StaticVariable(allocation_limit));
   1520   j(above, gc_required);
   1521 
   1522   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
   1523     // The top pointer is not updated for allocation folding dominators.
   1524     UpdateAllocationTopHelper(top_reg, scratch, flags);
   1525   }
   1526 
   1527   if (top_reg.is(result)) {
   1528     sub(result, Immediate(object_size - kHeapObjectTag));
   1529   } else {
   1530     // Tag the result.
   1531     DCHECK(kHeapObjectTag == 1);
   1532     inc(result);
   1533   }
   1534 }
   1535 
   1536 
   1537 void MacroAssembler::Allocate(int header_size,
   1538                               ScaleFactor element_size,
   1539                               Register element_count,
   1540                               RegisterValueType element_count_type,
   1541                               Register result,
   1542                               Register result_end,
   1543                               Register scratch,
   1544                               Label* gc_required,
   1545                               AllocationFlags flags) {
   1546   DCHECK((flags & SIZE_IN_WORDS) == 0);
   1547   DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
   1548   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   1549   if (!FLAG_inline_new) {
   1550     if (emit_debug_code()) {
   1551       // Trash the registers to simulate an allocation failure.
   1552       mov(result, Immediate(0x7091));
   1553       mov(result_end, Immediate(0x7191));
   1554       if (scratch.is_valid()) {
   1555         mov(scratch, Immediate(0x7291));
   1556       }
   1557       // Register element_count is not modified by the function.
   1558     }
   1559     jmp(gc_required);
   1560     return;
   1561   }
   1562   DCHECK(!result.is(result_end));
   1563 
   1564   // Load address of new object into result.
   1565   LoadAllocationTopHelper(result, scratch, flags);
   1566 
   1567   ExternalReference allocation_limit =
   1568       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1569 
   1570   // Align the next allocation. Storing the filler map without checking top is
   1571   // safe in new-space because the limit of the heap is aligned there.
   1572   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1573     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1574     Label aligned;
   1575     test(result, Immediate(kDoubleAlignmentMask));
   1576     j(zero, &aligned, Label::kNear);
   1577     if ((flags & PRETENURE) != 0) {
   1578       cmp(result, Operand::StaticVariable(allocation_limit));
   1579       j(above_equal, gc_required);
   1580     }
   1581     mov(Operand(result, 0),
   1582         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1583     add(result, Immediate(kDoubleSize / 2));
   1584     bind(&aligned);
   1585   }
   1586 
   1587   // Calculate new top and bail out if space is exhausted.
   1588   // We assume that element_count*element_size + header_size does not
   1589   // overflow.
   1590   if (element_count_type == REGISTER_VALUE_IS_SMI) {
   1591     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
   1592     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
   1593     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
   1594     DCHECK(element_size >= times_2);
   1595     DCHECK(kSmiTagSize == 1);
   1596     element_size = static_cast<ScaleFactor>(element_size - 1);
   1597   } else {
   1598     DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
   1599   }
   1600   lea(result_end, Operand(element_count, element_size, header_size));
   1601   add(result_end, result);
   1602   j(carry, gc_required);
   1603   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1604   j(above, gc_required);
   1605 
   1606   // Tag result.
   1607   DCHECK(kHeapObjectTag == 1);
   1608   inc(result);
   1609 
   1610   // Update allocation top.
   1611   UpdateAllocationTopHelper(result_end, scratch, flags);
   1612 }
   1613 
   1614 void MacroAssembler::Allocate(Register object_size,
   1615                               Register result,
   1616                               Register result_end,
   1617                               Register scratch,
   1618                               Label* gc_required,
   1619                               AllocationFlags flags) {
   1620   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   1621   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   1622   if (!FLAG_inline_new) {
   1623     if (emit_debug_code()) {
   1624       // Trash the registers to simulate an allocation failure.
   1625       mov(result, Immediate(0x7091));
   1626       mov(result_end, Immediate(0x7191));
   1627       if (scratch.is_valid()) {
   1628         mov(scratch, Immediate(0x7291));
   1629       }
   1630       // object_size is left unchanged by this function.
   1631     }
   1632     jmp(gc_required);
   1633     return;
   1634   }
   1635   DCHECK(!result.is(result_end));
   1636 
   1637   // Load address of new object into result.
   1638   LoadAllocationTopHelper(result, scratch, flags);
   1639 
   1640   ExternalReference allocation_limit =
   1641       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
   1642 
   1643   // Align the next allocation. Storing the filler map without checking top is
   1644   // safe in new-space because the limit of the heap is aligned there.
   1645   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1646     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1647     Label aligned;
   1648     test(result, Immediate(kDoubleAlignmentMask));
   1649     j(zero, &aligned, Label::kNear);
   1650     if ((flags & PRETENURE) != 0) {
   1651       cmp(result, Operand::StaticVariable(allocation_limit));
   1652       j(above_equal, gc_required);
   1653     }
   1654     mov(Operand(result, 0),
   1655         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1656     add(result, Immediate(kDoubleSize / 2));
   1657     bind(&aligned);
   1658   }
   1659 
   1660   // Calculate new top and bail out if space is exhausted.
   1661   if (!object_size.is(result_end)) {
   1662     mov(result_end, object_size);
   1663   }
   1664   add(result_end, result);
   1665   cmp(result_end, Operand::StaticVariable(allocation_limit));
   1666   j(above, gc_required);
   1667 
   1668   // Tag result.
   1669   DCHECK(kHeapObjectTag == 1);
   1670   inc(result);
   1671 
   1672   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
   1673     // The top pointer is not updated for allocation folding dominators.
   1674     UpdateAllocationTopHelper(result_end, scratch, flags);
   1675   }
   1676 }
   1677 
   1678 void MacroAssembler::FastAllocate(int object_size, Register result,
   1679                                   Register result_end, AllocationFlags flags) {
   1680   DCHECK(!result.is(result_end));
   1681   // Load address of new object into result.
   1682   LoadAllocationTopHelper(result, no_reg, flags);
   1683 
   1684   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1685     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1686     Label aligned;
   1687     test(result, Immediate(kDoubleAlignmentMask));
   1688     j(zero, &aligned, Label::kNear);
   1689     mov(Operand(result, 0),
   1690         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1691     add(result, Immediate(kDoubleSize / 2));
   1692     bind(&aligned);
   1693   }
   1694 
   1695   lea(result_end, Operand(result, object_size));
   1696   UpdateAllocationTopHelper(result_end, no_reg, flags);
   1697 
   1698   DCHECK(kHeapObjectTag == 1);
   1699   inc(result);
   1700 }
   1701 
   1702 void MacroAssembler::FastAllocate(Register object_size, Register result,
   1703                                   Register result_end, AllocationFlags flags) {
   1704   DCHECK(!result.is(result_end));
   1705   // Load address of new object into result.
   1706   LoadAllocationTopHelper(result, no_reg, flags);
   1707 
   1708   if ((flags & DOUBLE_ALIGNMENT) != 0) {
   1709     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
   1710     Label aligned;
   1711     test(result, Immediate(kDoubleAlignmentMask));
   1712     j(zero, &aligned, Label::kNear);
   1713     mov(Operand(result, 0),
   1714         Immediate(isolate()->factory()->one_pointer_filler_map()));
   1715     add(result, Immediate(kDoubleSize / 2));
   1716     bind(&aligned);
   1717   }
   1718 
   1719   lea(result_end, Operand(result, object_size, times_1, 0));
   1720   UpdateAllocationTopHelper(result_end, no_reg, flags);
   1721 
   1722   DCHECK(kHeapObjectTag == 1);
   1723   inc(result);
   1724 }
   1725 
   1726 void MacroAssembler::AllocateHeapNumber(Register result,
   1727                                         Register scratch1,
   1728                                         Register scratch2,
   1729                                         Label* gc_required,
   1730                                         MutableMode mode) {
   1731   // Allocate heap number in new space.
   1732   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
   1733            NO_ALLOCATION_FLAGS);
   1734 
   1735   Handle<Map> map = mode == MUTABLE
   1736       ? isolate()->factory()->mutable_heap_number_map()
   1737       : isolate()->factory()->heap_number_map();
   1738 
   1739   // Set the map.
   1740   mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
   1741 }
   1742 
   1743 
   1744 void MacroAssembler::AllocateTwoByteString(Register result,
   1745                                            Register length,
   1746                                            Register scratch1,
   1747                                            Register scratch2,
   1748                                            Register scratch3,
   1749                                            Label* gc_required) {
   1750   // Calculate the number of bytes needed for the characters in the string while
   1751   // observing object alignment.
   1752   DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1753   DCHECK(kShortSize == 2);
   1754   // scratch1 = length * 2 + kObjectAlignmentMask.
   1755   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
   1756   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1757 
   1758   // Allocate two byte string in new space.
   1759   Allocate(SeqTwoByteString::kHeaderSize, times_1, scratch1,
   1760            REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
   1761            NO_ALLOCATION_FLAGS);
   1762 
   1763   // Set the map, length and hash field.
   1764   mov(FieldOperand(result, HeapObject::kMapOffset),
   1765       Immediate(isolate()->factory()->string_map()));
   1766   mov(scratch1, length);
   1767   SmiTag(scratch1);
   1768   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1769   mov(FieldOperand(result, String::kHashFieldOffset),
   1770       Immediate(String::kEmptyHashField));
   1771 }
   1772 
   1773 
   1774 void MacroAssembler::AllocateOneByteString(Register result, Register length,
   1775                                            Register scratch1, Register scratch2,
   1776                                            Register scratch3,
   1777                                            Label* gc_required) {
   1778   // Calculate the number of bytes needed for the characters in the string while
   1779   // observing object alignment.
   1780   DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1781   mov(scratch1, length);
   1782   DCHECK(kCharSize == 1);
   1783   add(scratch1, Immediate(kObjectAlignmentMask));
   1784   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1785 
   1786   // Allocate one-byte string in new space.
   1787   Allocate(SeqOneByteString::kHeaderSize, times_1, scratch1,
   1788            REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
   1789            NO_ALLOCATION_FLAGS);
   1790 
   1791   // Set the map, length and hash field.
   1792   mov(FieldOperand(result, HeapObject::kMapOffset),
   1793       Immediate(isolate()->factory()->one_byte_string_map()));
   1794   mov(scratch1, length);
   1795   SmiTag(scratch1);
   1796   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1797   mov(FieldOperand(result, String::kHashFieldOffset),
   1798       Immediate(String::kEmptyHashField));
   1799 }
   1800 
   1801 
   1802 void MacroAssembler::AllocateOneByteString(Register result, int length,
   1803                                            Register scratch1, Register scratch2,
   1804                                            Label* gc_required) {
   1805   DCHECK(length > 0);
   1806 
   1807   // Allocate one-byte string in new space.
   1808   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
   1809            gc_required, NO_ALLOCATION_FLAGS);
   1810 
   1811   // Set the map, length and hash field.
   1812   mov(FieldOperand(result, HeapObject::kMapOffset),
   1813       Immediate(isolate()->factory()->one_byte_string_map()));
   1814   mov(FieldOperand(result, String::kLengthOffset),
   1815       Immediate(Smi::FromInt(length)));
   1816   mov(FieldOperand(result, String::kHashFieldOffset),
   1817       Immediate(String::kEmptyHashField));
   1818 }
   1819 
   1820 
   1821 void MacroAssembler::AllocateTwoByteConsString(Register result,
   1822                                         Register scratch1,
   1823                                         Register scratch2,
   1824                                         Label* gc_required) {
   1825   // Allocate heap number in new space.
   1826   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
   1827            NO_ALLOCATION_FLAGS);
   1828 
   1829   // Set the map. The other fields are left uninitialized.
   1830   mov(FieldOperand(result, HeapObject::kMapOffset),
   1831       Immediate(isolate()->factory()->cons_string_map()));
   1832 }
   1833 
   1834 
   1835 void MacroAssembler::AllocateOneByteConsString(Register result,
   1836                                                Register scratch1,
   1837                                                Register scratch2,
   1838                                                Label* gc_required) {
   1839   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
   1840            NO_ALLOCATION_FLAGS);
   1841 
   1842   // Set the map. The other fields are left uninitialized.
   1843   mov(FieldOperand(result, HeapObject::kMapOffset),
   1844       Immediate(isolate()->factory()->cons_one_byte_string_map()));
   1845 }
   1846 
   1847 
   1848 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
   1849                                           Register scratch1,
   1850                                           Register scratch2,
   1851                                           Label* gc_required) {
   1852   // Allocate heap number in new space.
   1853   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1854            NO_ALLOCATION_FLAGS);
   1855 
   1856   // Set the map. The other fields are left uninitialized.
   1857   mov(FieldOperand(result, HeapObject::kMapOffset),
   1858       Immediate(isolate()->factory()->sliced_string_map()));
   1859 }
   1860 
   1861 
   1862 void MacroAssembler::AllocateOneByteSlicedString(Register result,
   1863                                                  Register scratch1,
   1864                                                  Register scratch2,
   1865                                                  Label* gc_required) {
   1866   // Allocate heap number in new space.
   1867   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
   1868            NO_ALLOCATION_FLAGS);
   1869 
   1870   // Set the map. The other fields are left uninitialized.
   1871   mov(FieldOperand(result, HeapObject::kMapOffset),
   1872       Immediate(isolate()->factory()->sliced_one_byte_string_map()));
   1873 }
   1874 
   1875 
   1876 void MacroAssembler::AllocateJSValue(Register result, Register constructor,
   1877                                      Register value, Register scratch,
   1878                                      Label* gc_required) {
   1879   DCHECK(!result.is(constructor));
   1880   DCHECK(!result.is(scratch));
   1881   DCHECK(!result.is(value));
   1882 
   1883   // Allocate JSValue in new space.
   1884   Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
   1885            NO_ALLOCATION_FLAGS);
   1886 
   1887   // Initialize the JSValue.
   1888   LoadGlobalFunctionInitialMap(constructor, scratch);
   1889   mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
   1890   LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
   1891   mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
   1892   mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
   1893   mov(FieldOperand(result, JSValue::kValueOffset), value);
   1894   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
   1895 }
   1896 
   1897 
   1898 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   1899 // long or aligned copies.  The contents of scratch and length are destroyed.
   1900 // Source and destination are incremented by length.
   1901 // Many variants of movsb, loop unrolling, word moves, and indexed operands
   1902 // have been tried here already, and this is fastest.
   1903 // A simpler loop is faster on small copies, but 30% slower on large ones.
   1904 // The cld() instruction must have been emitted, to set the direction flag(),
   1905 // before calling this function.
   1906 void MacroAssembler::CopyBytes(Register source,
   1907                                Register destination,
   1908                                Register length,
   1909                                Register scratch) {
   1910   Label short_loop, len4, len8, len12, done, short_string;
   1911   DCHECK(source.is(esi));
   1912   DCHECK(destination.is(edi));
   1913   DCHECK(length.is(ecx));
   1914   cmp(length, Immediate(4));
   1915   j(below, &short_string, Label::kNear);
   1916 
   1917   // Because source is 4-byte aligned in our uses of this function,
   1918   // we keep source aligned for the rep_movs call by copying the odd bytes
   1919   // at the end of the ranges.
   1920   mov(scratch, Operand(source, length, times_1, -4));
   1921   mov(Operand(destination, length, times_1, -4), scratch);
   1922 
   1923   cmp(length, Immediate(8));
   1924   j(below_equal, &len4, Label::kNear);
   1925   cmp(length, Immediate(12));
   1926   j(below_equal, &len8, Label::kNear);
   1927   cmp(length, Immediate(16));
   1928   j(below_equal, &len12, Label::kNear);
   1929 
   1930   mov(scratch, ecx);
   1931   shr(ecx, 2);
   1932   rep_movs();
   1933   and_(scratch, Immediate(0x3));
   1934   add(destination, scratch);
   1935   jmp(&done, Label::kNear);
   1936 
   1937   bind(&len12);
   1938   mov(scratch, Operand(source, 8));
   1939   mov(Operand(destination, 8), scratch);
   1940   bind(&len8);
   1941   mov(scratch, Operand(source, 4));
   1942   mov(Operand(destination, 4), scratch);
   1943   bind(&len4);
   1944   mov(scratch, Operand(source, 0));
   1945   mov(Operand(destination, 0), scratch);
   1946   add(destination, length);
   1947   jmp(&done, Label::kNear);
   1948 
   1949   bind(&short_string);
   1950   test(length, length);
   1951   j(zero, &done, Label::kNear);
   1952 
   1953   bind(&short_loop);
   1954   mov_b(scratch, Operand(source, 0));
   1955   mov_b(Operand(destination, 0), scratch);
   1956   inc(source);
   1957   inc(destination);
   1958   dec(length);
   1959   j(not_zero, &short_loop);
   1960 
   1961   bind(&done);
   1962 }
   1963 
   1964 
   1965 void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
   1966                                                 Register end_address,
   1967                                                 Register filler) {
   1968   Label loop, entry;
   1969   jmp(&entry, Label::kNear);
   1970   bind(&loop);
   1971   mov(Operand(current_address, 0), filler);
   1972   add(current_address, Immediate(kPointerSize));
   1973   bind(&entry);
   1974   cmp(current_address, end_address);
   1975   j(below, &loop, Label::kNear);
   1976 }
   1977 
   1978 
   1979 void MacroAssembler::BooleanBitTest(Register object,
   1980                                     int field_offset,
   1981                                     int bit_index) {
   1982   bit_index += kSmiTagSize + kSmiShiftSize;
   1983   DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
   1984   int byte_index = bit_index / kBitsPerByte;
   1985   int byte_bit_index = bit_index & (kBitsPerByte - 1);
   1986   test_b(FieldOperand(object, field_offset + byte_index),
   1987          Immediate(1 << byte_bit_index));
   1988 }
   1989 
   1990 
   1991 
   1992 void MacroAssembler::NegativeZeroTest(Register result,
   1993                                       Register op,
   1994                                       Label* then_label) {
   1995   Label ok;
   1996   test(result, result);
   1997   j(not_zero, &ok, Label::kNear);
   1998   test(op, op);
   1999   j(sign, then_label, Label::kNear);
   2000   bind(&ok);
   2001 }
   2002 
   2003 
   2004 void MacroAssembler::NegativeZeroTest(Register result,
   2005                                       Register op1,
   2006                                       Register op2,
   2007                                       Register scratch,
   2008                                       Label* then_label) {
   2009   Label ok;
   2010   test(result, result);
   2011   j(not_zero, &ok, Label::kNear);
   2012   mov(scratch, op1);
   2013   or_(scratch, op2);
   2014   j(sign, then_label, Label::kNear);
   2015   bind(&ok);
   2016 }
   2017 
   2018 
   2019 void MacroAssembler::GetMapConstructor(Register result, Register map,
   2020                                        Register temp) {
   2021   Label done, loop;
   2022   mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
   2023   bind(&loop);
   2024   JumpIfSmi(result, &done, Label::kNear);
   2025   CmpObjectType(result, MAP_TYPE, temp);
   2026   j(not_equal, &done, Label::kNear);
   2027   mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
   2028   jmp(&loop);
   2029   bind(&done);
   2030 }
   2031 
   2032 
   2033 void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
   2034                                              Register scratch, Label* miss) {
   2035   // Get the prototype or initial map from the function.
   2036   mov(result,
   2037       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2038 
   2039   // If the prototype or initial map is the hole, don't return it and
   2040   // simply miss the cache instead. This will allow us to allocate a
   2041   // prototype object on-demand in the runtime system.
   2042   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
   2043   j(equal, miss);
   2044 
   2045   // If the function does not have an initial map, we're done.
   2046   Label done;
   2047   CmpObjectType(result, MAP_TYPE, scratch);
   2048   j(not_equal, &done, Label::kNear);
   2049 
   2050   // Get the prototype from the initial map.
   2051   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   2052 
   2053   // All done.
   2054   bind(&done);
   2055 }
   2056 
   2057 
   2058 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
   2059   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
   2060   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
   2061 }
   2062 
   2063 
   2064 void MacroAssembler::TailCallStub(CodeStub* stub) {
   2065   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   2066 }
   2067 
   2068 
   2069 void MacroAssembler::StubReturn(int argc) {
   2070   DCHECK(argc >= 1 && generating_stub());
   2071   ret((argc - 1) * kPointerSize);
   2072 }
   2073 
   2074 
   2075 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
   2076   return has_frame_ || !stub->SometimesSetsUpAFrame();
   2077 }
   2078 
   2079 
   2080 void MacroAssembler::IndexFromHash(Register hash, Register index) {
   2081   // The assert checks that the constants for the maximum number of digits
   2082   // for an array index cached in the hash field and the number of bits
   2083   // reserved for it does not conflict.
   2084   DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
   2085          (1 << String::kArrayIndexValueBits));
   2086   if (!index.is(hash)) {
   2087     mov(index, hash);
   2088   }
   2089   DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
   2090 }
   2091 
   2092 
   2093 void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
   2094                                  SaveFPRegsMode save_doubles) {
   2095   // If the expected number of arguments of the runtime function is
   2096   // constant, we check that the actual number of arguments match the
   2097   // expectation.
   2098   CHECK(f->nargs < 0 || f->nargs == num_arguments);
   2099 
   2100   // TODO(1236192): Most runtime routines don't need the number of
   2101   // arguments passed in because it is constant. At some point we
   2102   // should remove this need and make the runtime routine entry code
   2103   // smarter.
   2104   Move(eax, Immediate(num_arguments));
   2105   mov(ebx, Immediate(ExternalReference(f, isolate())));
   2106   CEntryStub ces(isolate(), 1, save_doubles);
   2107   CallStub(&ces);
   2108 }
   2109 
   2110 
   2111 void MacroAssembler::CallExternalReference(ExternalReference ref,
   2112                                            int num_arguments) {
   2113   mov(eax, Immediate(num_arguments));
   2114   mov(ebx, Immediate(ref));
   2115 
   2116   CEntryStub stub(isolate(), 1);
   2117   CallStub(&stub);
   2118 }
   2119 
   2120 
   2121 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
   2122   // ----------- S t a t e -------------
   2123   //  -- esp[0]                 : return address
   2124   //  -- esp[8]                 : argument num_arguments - 1
   2125   //  ...
   2126   //  -- esp[8 * num_arguments] : argument 0 (receiver)
   2127   //
   2128   //  For runtime functions with variable arguments:
   2129   //  -- eax                    : number of  arguments
   2130   // -----------------------------------
   2131 
   2132   const Runtime::Function* function = Runtime::FunctionForId(fid);
   2133   DCHECK_EQ(1, function->result_size);
   2134   if (function->nargs >= 0) {
   2135     // TODO(1236192): Most runtime routines don't need the number of
   2136     // arguments passed in because it is constant. At some point we
   2137     // should remove this need and make the runtime routine entry code
   2138     // smarter.
   2139     mov(eax, Immediate(function->nargs));
   2140   }
   2141   JumpToExternalReference(ExternalReference(fid, isolate()));
   2142 }
   2143 
   2144 
   2145 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   2146   // Set the entry point and jump to the C entry runtime stub.
   2147   mov(ebx, Immediate(ext));
   2148   CEntryStub ces(isolate(), 1);
   2149   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   2150 }
   2151 
   2152 void MacroAssembler::PrepareForTailCall(
   2153     const ParameterCount& callee_args_count, Register caller_args_count_reg,
   2154     Register scratch0, Register scratch1, ReturnAddressState ra_state,
   2155     int number_of_temp_values_after_return_address) {
   2156 #if DEBUG
   2157   if (callee_args_count.is_reg()) {
   2158     DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
   2159                        scratch1));
   2160   } else {
   2161     DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
   2162   }
   2163   DCHECK(ra_state != ReturnAddressState::kNotOnStack ||
   2164          number_of_temp_values_after_return_address == 0);
   2165 #endif
   2166 
   2167   // Calculate the destination address where we will put the return address
   2168   // after we drop current frame.
   2169   Register new_sp_reg = scratch0;
   2170   if (callee_args_count.is_reg()) {
   2171     sub(caller_args_count_reg, callee_args_count.reg());
   2172     lea(new_sp_reg,
   2173         Operand(ebp, caller_args_count_reg, times_pointer_size,
   2174                 StandardFrameConstants::kCallerPCOffset -
   2175                     number_of_temp_values_after_return_address * kPointerSize));
   2176   } else {
   2177     lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
   2178                             StandardFrameConstants::kCallerPCOffset -
   2179                                 (callee_args_count.immediate() +
   2180                                  number_of_temp_values_after_return_address) *
   2181                                     kPointerSize));
   2182   }
   2183 
   2184   if (FLAG_debug_code) {
   2185     cmp(esp, new_sp_reg);
   2186     Check(below, kStackAccessBelowStackPointer);
   2187   }
   2188 
   2189   // Copy return address from caller's frame to current frame's return address
   2190   // to avoid its trashing and let the following loop copy it to the right
   2191   // place.
   2192   Register tmp_reg = scratch1;
   2193   if (ra_state == ReturnAddressState::kOnStack) {
   2194     mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
   2195     mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
   2196         tmp_reg);
   2197   } else {
   2198     DCHECK(ReturnAddressState::kNotOnStack == ra_state);
   2199     DCHECK_EQ(0, number_of_temp_values_after_return_address);
   2200     Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset));
   2201   }
   2202 
   2203   // Restore caller's frame pointer now as it could be overwritten by
   2204   // the copying loop.
   2205   mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   2206 
   2207   // +2 here is to copy both receiver and return address.
   2208   Register count_reg = caller_args_count_reg;
   2209   if (callee_args_count.is_reg()) {
   2210     lea(count_reg, Operand(callee_args_count.reg(),
   2211                            2 + number_of_temp_values_after_return_address));
   2212   } else {
   2213     mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
   2214                              number_of_temp_values_after_return_address));
   2215     // TODO(ishell): Unroll copying loop for small immediate values.
   2216   }
   2217 
   2218   // Now copy callee arguments to the caller frame going backwards to avoid
   2219   // callee arguments corruption (source and destination areas could overlap).
   2220   Label loop, entry;
   2221   jmp(&entry, Label::kNear);
   2222   bind(&loop);
   2223   dec(count_reg);
   2224   mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
   2225   mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
   2226   bind(&entry);
   2227   cmp(count_reg, Immediate(0));
   2228   j(not_equal, &loop, Label::kNear);
   2229 
   2230   // Leave current frame.
   2231   mov(esp, new_sp_reg);
   2232 }
   2233 
   2234 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   2235                                     const ParameterCount& actual,
   2236                                     Label* done,
   2237                                     bool* definitely_mismatches,
   2238                                     InvokeFlag flag,
   2239                                     Label::Distance done_near,
   2240                                     const CallWrapper& call_wrapper) {
   2241   bool definitely_matches = false;
   2242   *definitely_mismatches = false;
   2243   Label invoke;
   2244   if (expected.is_immediate()) {
   2245     DCHECK(actual.is_immediate());
   2246     mov(eax, actual.immediate());
   2247     if (expected.immediate() == actual.immediate()) {
   2248       definitely_matches = true;
   2249     } else {
   2250       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   2251       if (expected.immediate() == sentinel) {
   2252         // Don't worry about adapting arguments for builtins that
   2253         // don't want that done. Skip adaption code by making it look
   2254         // like we have a match between expected and actual number of
   2255         // arguments.
   2256         definitely_matches = true;
   2257       } else {
   2258         *definitely_mismatches = true;
   2259         mov(ebx, expected.immediate());
   2260       }
   2261     }
   2262   } else {
   2263     if (actual.is_immediate()) {
   2264       // Expected is in register, actual is immediate. This is the
   2265       // case when we invoke function values without going through the
   2266       // IC mechanism.
   2267       mov(eax, actual.immediate());
   2268       cmp(expected.reg(), actual.immediate());
   2269       j(equal, &invoke);
   2270       DCHECK(expected.reg().is(ebx));
   2271     } else if (!expected.reg().is(actual.reg())) {
   2272       // Both expected and actual are in (different) registers. This
   2273       // is the case when we invoke functions using call and apply.
   2274       cmp(expected.reg(), actual.reg());
   2275       j(equal, &invoke);
   2276       DCHECK(actual.reg().is(eax));
   2277       DCHECK(expected.reg().is(ebx));
   2278     } else {
   2279       Move(eax, actual.reg());
   2280     }
   2281   }
   2282 
   2283   if (!definitely_matches) {
   2284     Handle<Code> adaptor =
   2285         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   2286     if (flag == CALL_FUNCTION) {
   2287       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
   2288       call(adaptor, RelocInfo::CODE_TARGET);
   2289       call_wrapper.AfterCall();
   2290       if (!*definitely_mismatches) {
   2291         jmp(done, done_near);
   2292       }
   2293     } else {
   2294       jmp(adaptor, RelocInfo::CODE_TARGET);
   2295     }
   2296     bind(&invoke);
   2297   }
   2298 }
   2299 
   2300 
   2301 void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
   2302                                              const ParameterCount& expected,
   2303                                              const ParameterCount& actual) {
   2304   Label skip_flooding;
   2305   ExternalReference last_step_action =
   2306       ExternalReference::debug_last_step_action_address(isolate());
   2307   STATIC_ASSERT(StepFrame > StepIn);
   2308   cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
   2309   j(less, &skip_flooding);
   2310   {
   2311     FrameScope frame(this,
   2312                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
   2313     if (expected.is_reg()) {
   2314       SmiTag(expected.reg());
   2315       Push(expected.reg());
   2316     }
   2317     if (actual.is_reg()) {
   2318       SmiTag(actual.reg());
   2319       Push(actual.reg());
   2320     }
   2321     if (new_target.is_valid()) {
   2322       Push(new_target);
   2323     }
   2324     Push(fun);
   2325     Push(fun);
   2326     CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
   2327     Pop(fun);
   2328     if (new_target.is_valid()) {
   2329       Pop(new_target);
   2330     }
   2331     if (actual.is_reg()) {
   2332       Pop(actual.reg());
   2333       SmiUntag(actual.reg());
   2334     }
   2335     if (expected.is_reg()) {
   2336       Pop(expected.reg());
   2337       SmiUntag(expected.reg());
   2338     }
   2339   }
   2340   bind(&skip_flooding);
   2341 }
   2342 
   2343 
   2344 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
   2345                                         const ParameterCount& expected,
   2346                                         const ParameterCount& actual,
   2347                                         InvokeFlag flag,
   2348                                         const CallWrapper& call_wrapper) {
   2349   // You can't call a function without a valid frame.
   2350   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2351   DCHECK(function.is(edi));
   2352   DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
   2353 
   2354   if (call_wrapper.NeedsDebugStepCheck()) {
   2355     FloodFunctionIfStepping(function, new_target, expected, actual);
   2356   }
   2357 
   2358   // Clear the new.target register if not given.
   2359   if (!new_target.is_valid()) {
   2360     mov(edx, isolate()->factory()->undefined_value());
   2361   }
   2362 
   2363   Label done;
   2364   bool definitely_mismatches = false;
   2365   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
   2366                  Label::kNear, call_wrapper);
   2367   if (!definitely_mismatches) {
   2368     // We call indirectly through the code field in the function to
   2369     // allow recompilation to take effect without changing any of the
   2370     // call sites.
   2371     Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
   2372     if (flag == CALL_FUNCTION) {
   2373       call_wrapper.BeforeCall(CallSize(code));
   2374       call(code);
   2375       call_wrapper.AfterCall();
   2376     } else {
   2377       DCHECK(flag == JUMP_FUNCTION);
   2378       jmp(code);
   2379     }
   2380     bind(&done);
   2381   }
   2382 }
   2383 
   2384 
   2385 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
   2386                                     const ParameterCount& actual,
   2387                                     InvokeFlag flag,
   2388                                     const CallWrapper& call_wrapper) {
   2389   // You can't call a function without a valid frame.
   2390   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2391 
   2392   DCHECK(fun.is(edi));
   2393   mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2394   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2395   mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
   2396   SmiUntag(ebx);
   2397 
   2398   ParameterCount expected(ebx);
   2399   InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
   2400 }
   2401 
   2402 
   2403 void MacroAssembler::InvokeFunction(Register fun,
   2404                                     const ParameterCount& expected,
   2405                                     const ParameterCount& actual,
   2406                                     InvokeFlag flag,
   2407                                     const CallWrapper& call_wrapper) {
   2408   // You can't call a function without a valid frame.
   2409   DCHECK(flag == JUMP_FUNCTION || has_frame());
   2410 
   2411   DCHECK(fun.is(edi));
   2412   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2413 
   2414   InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
   2415 }
   2416 
   2417 
   2418 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
   2419                                     const ParameterCount& expected,
   2420                                     const ParameterCount& actual,
   2421                                     InvokeFlag flag,
   2422                                     const CallWrapper& call_wrapper) {
   2423   LoadHeapObject(edi, function);
   2424   InvokeFunction(edi, expected, actual, flag, call_wrapper);
   2425 }
   2426 
   2427 
   2428 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   2429   if (context_chain_length > 0) {
   2430     // Move up the chain of contexts to the context containing the slot.
   2431     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2432     for (int i = 1; i < context_chain_length; i++) {
   2433       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2434     }
   2435   } else {
   2436     // Slot is in the current function context.  Move it into the
   2437     // destination register in case we store into it (the write barrier
   2438     // cannot be allowed to destroy the context in esi).
   2439     mov(dst, esi);
   2440   }
   2441 
   2442   // We should not have found a with context by walking the context chain
   2443   // (i.e., the static scope chain and runtime context chain do not agree).
   2444   // A variable occurring in such a scope should have slot type LOOKUP and
   2445   // not CONTEXT.
   2446   if (emit_debug_code()) {
   2447     cmp(FieldOperand(dst, HeapObject::kMapOffset),
   2448         isolate()->factory()->with_context_map());
   2449     Check(not_equal, kVariableResolvedToWithContext);
   2450   }
   2451 }
   2452 
   2453 
   2454 void MacroAssembler::LoadGlobalProxy(Register dst) {
   2455   mov(dst, NativeContextOperand());
   2456   mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
   2457 }
   2458 
   2459 
   2460 void MacroAssembler::LoadTransitionedArrayMapConditional(
   2461     ElementsKind expected_kind,
   2462     ElementsKind transitioned_kind,
   2463     Register map_in_out,
   2464     Register scratch,
   2465     Label* no_map_match) {
   2466   DCHECK(IsFastElementsKind(expected_kind));
   2467   DCHECK(IsFastElementsKind(transitioned_kind));
   2468 
   2469   // Check that the function's map is the same as the expected cached map.
   2470   mov(scratch, NativeContextOperand());
   2471   cmp(map_in_out,
   2472       ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
   2473   j(not_equal, no_map_match);
   2474 
   2475   // Use the transitioned cached map.
   2476   mov(map_in_out,
   2477       ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
   2478 }
   2479 
   2480 
   2481 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2482   // Load the native context from the current context.
   2483   mov(function, NativeContextOperand());
   2484   // Load the function from the native context.
   2485   mov(function, ContextOperand(function, index));
   2486 }
   2487 
   2488 
   2489 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2490                                                   Register map) {
   2491   // Load the initial map.  The global functions all have initial maps.
   2492   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2493   if (emit_debug_code()) {
   2494     Label ok, fail;
   2495     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
   2496     jmp(&ok);
   2497     bind(&fail);
   2498     Abort(kGlobalFunctionsMustHaveInitialMap);
   2499     bind(&ok);
   2500   }
   2501 }
   2502 
   2503 
   2504 // Store the value in register src in the safepoint register stack
   2505 // slot for register dst.
   2506 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   2507   mov(SafepointRegisterSlot(dst), src);
   2508 }
   2509 
   2510 
   2511 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   2512   mov(SafepointRegisterSlot(dst), src);
   2513 }
   2514 
   2515 
   2516 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   2517   mov(dst, SafepointRegisterSlot(src));
   2518 }
   2519 
   2520 
   2521 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   2522   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   2523 }
   2524 
   2525 
   2526 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   2527   // The registers are pushed starting with the lowest encoding,
   2528   // which means that lowest encodings are furthest away from
   2529   // the stack pointer.
   2530   DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   2531   return kNumSafepointRegisters - reg_code - 1;
   2532 }
   2533 
   2534 
   2535 void MacroAssembler::LoadHeapObject(Register result,
   2536                                     Handle<HeapObject> object) {
   2537   AllowDeferredHandleDereference embedding_raw_address;
   2538   if (isolate()->heap()->InNewSpace(*object)) {
   2539     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2540     mov(result, Operand::ForCell(cell));
   2541   } else {
   2542     mov(result, object);
   2543   }
   2544 }
   2545 
   2546 
   2547 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
   2548   AllowDeferredHandleDereference using_raw_address;
   2549   if (isolate()->heap()->InNewSpace(*object)) {
   2550     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2551     cmp(reg, Operand::ForCell(cell));
   2552   } else {
   2553     cmp(reg, object);
   2554   }
   2555 }
   2556 
   2557 
   2558 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
   2559   AllowDeferredHandleDereference using_raw_address;
   2560   if (isolate()->heap()->InNewSpace(*object)) {
   2561     Handle<Cell> cell = isolate()->factory()->NewCell(object);
   2562     push(Operand::ForCell(cell));
   2563   } else {
   2564     Push(object);
   2565   }
   2566 }
   2567 
   2568 
   2569 void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
   2570                                   Register scratch) {
   2571   mov(scratch, cell);
   2572   cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
   2573 }
   2574 
   2575 
   2576 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
   2577   mov(value, cell);
   2578   mov(value, FieldOperand(value, WeakCell::kValueOffset));
   2579 }
   2580 
   2581 
   2582 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
   2583                                    Label* miss) {
   2584   GetWeakValue(value, cell);
   2585   JumpIfSmi(value, miss);
   2586 }
   2587 
   2588 
   2589 void MacroAssembler::Ret() {
   2590   ret(0);
   2591 }
   2592 
   2593 
   2594 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   2595   if (is_uint16(bytes_dropped)) {
   2596     ret(bytes_dropped);
   2597   } else {
   2598     pop(scratch);
   2599     add(esp, Immediate(bytes_dropped));
   2600     push(scratch);
   2601     ret(0);
   2602   }
   2603 }
   2604 
   2605 
   2606 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
   2607   // Turn off the stack depth check when serializer is enabled to reduce the
   2608   // code size.
   2609   if (serializer_enabled()) return;
   2610   // Make sure the floating point stack is either empty or has depth items.
   2611   DCHECK(depth <= 7);
   2612   // This is very expensive.
   2613   DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
   2614 
   2615   // The top-of-stack (tos) is 7 if there is one item pushed.
   2616   int tos = (8 - depth) % 8;
   2617   const int kTopMask = 0x3800;
   2618   push(eax);
   2619   fwait();
   2620   fnstsw_ax();
   2621   and_(eax, kTopMask);
   2622   shr(eax, 11);
   2623   cmp(eax, Immediate(tos));
   2624   Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
   2625   fnclex();
   2626   pop(eax);
   2627 }
   2628 
   2629 
   2630 void MacroAssembler::Drop(int stack_elements) {
   2631   if (stack_elements > 0) {
   2632     add(esp, Immediate(stack_elements * kPointerSize));
   2633   }
   2634 }
   2635 
   2636 
   2637 void MacroAssembler::Move(Register dst, Register src) {
   2638   if (!dst.is(src)) {
   2639     mov(dst, src);
   2640   }
   2641 }
   2642 
   2643 
   2644 void MacroAssembler::Move(Register dst, const Immediate& x) {
   2645   if (x.is_zero() && RelocInfo::IsNone(x.rmode_)) {
   2646     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
   2647   } else {
   2648     mov(dst, x);
   2649   }
   2650 }
   2651 
   2652 
   2653 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
   2654   mov(dst, x);
   2655 }
   2656 
   2657 
   2658 void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
   2659   // TODO(intel): Add support for LZCNT (with ABM/BMI1).
   2660   Label not_zero_src;
   2661   bsr(dst, src);
   2662   j(not_zero, &not_zero_src, Label::kNear);
   2663   Move(dst, Immediate(63));  // 63^31 == 32
   2664   bind(&not_zero_src);
   2665   xor_(dst, Immediate(31));  // for x in [0..31], 31^x == 31-x.
   2666 }
   2667 
   2668 
   2669 void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
   2670   // TODO(intel): Add support for TZCNT (with ABM/BMI1).
   2671   Label not_zero_src;
   2672   bsf(dst, src);
   2673   j(not_zero, &not_zero_src, Label::kNear);
   2674   Move(dst, Immediate(32));  // The result of tzcnt is 32 if src = 0.
   2675   bind(&not_zero_src);
   2676 }
   2677 
   2678 
   2679 void MacroAssembler::Popcnt(Register dst, const Operand& src) {
   2680   // TODO(intel): Add support for POPCNT (with POPCNT)
   2681   // if (CpuFeatures::IsSupported(POPCNT)) {
   2682   //   CpuFeatureScope scope(this, POPCNT);
   2683   //   popcnt(dst, src);
   2684   //   return;
   2685   // }
   2686   UNREACHABLE();
   2687 }
   2688 
   2689 
   2690 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2691   if (FLAG_native_code_counters && counter->Enabled()) {
   2692     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   2693   }
   2694 }
   2695 
   2696 
   2697 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2698   DCHECK(value > 0);
   2699   if (FLAG_native_code_counters && counter->Enabled()) {
   2700     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2701     if (value == 1) {
   2702       inc(operand);
   2703     } else {
   2704       add(operand, Immediate(value));
   2705     }
   2706   }
   2707 }
   2708 
   2709 
   2710 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2711   DCHECK(value > 0);
   2712   if (FLAG_native_code_counters && counter->Enabled()) {
   2713     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2714     if (value == 1) {
   2715       dec(operand);
   2716     } else {
   2717       sub(operand, Immediate(value));
   2718     }
   2719   }
   2720 }
   2721 
   2722 
   2723 void MacroAssembler::IncrementCounter(Condition cc,
   2724                                       StatsCounter* counter,
   2725                                       int value) {
   2726   DCHECK(value > 0);
   2727   if (FLAG_native_code_counters && counter->Enabled()) {
   2728     Label skip;
   2729     j(NegateCondition(cc), &skip);
   2730     pushfd();
   2731     IncrementCounter(counter, value);
   2732     popfd();
   2733     bind(&skip);
   2734   }
   2735 }
   2736 
   2737 
   2738 void MacroAssembler::DecrementCounter(Condition cc,
   2739                                       StatsCounter* counter,
   2740                                       int value) {
   2741   DCHECK(value > 0);
   2742   if (FLAG_native_code_counters && counter->Enabled()) {
   2743     Label skip;
   2744     j(NegateCondition(cc), &skip);
   2745     pushfd();
   2746     DecrementCounter(counter, value);
   2747     popfd();
   2748     bind(&skip);
   2749   }
   2750 }
   2751 
   2752 
   2753 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
   2754   if (emit_debug_code()) Check(cc, reason);
   2755 }
   2756 
   2757 
   2758 void MacroAssembler::AssertFastElements(Register elements) {
   2759   if (emit_debug_code()) {
   2760     Factory* factory = isolate()->factory();
   2761     Label ok;
   2762     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2763         Immediate(factory->fixed_array_map()));
   2764     j(equal, &ok);
   2765     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2766         Immediate(factory->fixed_double_array_map()));
   2767     j(equal, &ok);
   2768     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2769         Immediate(factory->fixed_cow_array_map()));
   2770     j(equal, &ok);
   2771     Abort(kJSObjectWithFastElementsMapHasSlowElements);
   2772     bind(&ok);
   2773   }
   2774 }
   2775 
   2776 
   2777 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
   2778   Label L;
   2779   j(cc, &L);
   2780   Abort(reason);
   2781   // will not return here
   2782   bind(&L);
   2783 }
   2784 
   2785 
   2786 void MacroAssembler::CheckStackAlignment() {
   2787   int frame_alignment = base::OS::ActivationFrameAlignment();
   2788   int frame_alignment_mask = frame_alignment - 1;
   2789   if (frame_alignment > kPointerSize) {
   2790     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
   2791     Label alignment_as_expected;
   2792     test(esp, Immediate(frame_alignment_mask));
   2793     j(zero, &alignment_as_expected);
   2794     // Abort if stack is not aligned.
   2795     int3();
   2796     bind(&alignment_as_expected);
   2797   }
   2798 }
   2799 
   2800 
   2801 void MacroAssembler::Abort(BailoutReason reason) {
   2802 #ifdef DEBUG
   2803   const char* msg = GetBailoutReason(reason);
   2804   if (msg != NULL) {
   2805     RecordComment("Abort message: ");
   2806     RecordComment(msg);
   2807   }
   2808 
   2809   if (FLAG_trap_on_abort) {
   2810     int3();
   2811     return;
   2812   }
   2813 #endif
   2814 
   2815   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
   2816   // Disable stub call restrictions to always allow calls to abort.
   2817   if (!has_frame_) {
   2818     // We don't actually want to generate a pile of code for this, so just
   2819     // claim there is a stack frame, without generating one.
   2820     FrameScope scope(this, StackFrame::NONE);
   2821     CallRuntime(Runtime::kAbort);
   2822   } else {
   2823     CallRuntime(Runtime::kAbort);
   2824   }
   2825   // will not return here
   2826   int3();
   2827 }
   2828 
   2829 
   2830 void MacroAssembler::LoadInstanceDescriptors(Register map,
   2831                                              Register descriptors) {
   2832   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
   2833 }
   2834 
   2835 
   2836 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
   2837   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   2838   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
   2839 }
   2840 
   2841 
   2842 void MacroAssembler::LoadAccessor(Register dst, Register holder,
   2843                                   int accessor_index,
   2844                                   AccessorComponent accessor) {
   2845   mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
   2846   LoadInstanceDescriptors(dst, dst);
   2847   mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
   2848   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
   2849                                            : AccessorPair::kSetterOffset;
   2850   mov(dst, FieldOperand(dst, offset));
   2851 }
   2852 
   2853 
   2854 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
   2855     Register instance_type, Register scratch, Label* failure) {
   2856   if (!scratch.is(instance_type)) {
   2857     mov(scratch, instance_type);
   2858   }
   2859   and_(scratch,
   2860        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   2861   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
   2862   j(not_equal, failure);
   2863 }
   2864 
   2865 
   2866 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
   2867                                                            Register object2,
   2868                                                            Register scratch1,
   2869                                                            Register scratch2,
   2870                                                            Label* failure) {
   2871   // Check that both objects are not smis.
   2872   STATIC_ASSERT(kSmiTag == 0);
   2873   mov(scratch1, object1);
   2874   and_(scratch1, object2);
   2875   JumpIfSmi(scratch1, failure);
   2876 
   2877   // Load instance type for both strings.
   2878   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   2879   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   2880   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   2881   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   2882 
   2883   // Check that both are flat one-byte strings.
   2884   const int kFlatOneByteStringMask =
   2885       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   2886   const int kFlatOneByteStringTag =
   2887       kStringTag | kOneByteStringTag | kSeqStringTag;
   2888   // Interleave bits from both instance types and compare them in one check.
   2889   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
   2890   and_(scratch1, kFlatOneByteStringMask);
   2891   and_(scratch2, kFlatOneByteStringMask);
   2892   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   2893   cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
   2894   j(not_equal, failure);
   2895 }
   2896 
   2897 
   2898 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
   2899                                                      Label* not_unique_name,
   2900                                                      Label::Distance distance) {
   2901   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2902   Label succeed;
   2903   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2904   j(zero, &succeed);
   2905   cmpb(operand, Immediate(SYMBOL_TYPE));
   2906   j(not_equal, not_unique_name, distance);
   2907 
   2908   bind(&succeed);
   2909 }
   2910 
   2911 
   2912 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
   2913                                                Register index,
   2914                                                Register value,
   2915                                                uint32_t encoding_mask) {
   2916   Label is_object;
   2917   JumpIfNotSmi(string, &is_object, Label::kNear);
   2918   Abort(kNonObject);
   2919   bind(&is_object);
   2920 
   2921   push(value);
   2922   mov(value, FieldOperand(string, HeapObject::kMapOffset));
   2923   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
   2924 
   2925   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
   2926   cmp(value, Immediate(encoding_mask));
   2927   pop(value);
   2928   Check(equal, kUnexpectedStringType);
   2929 
   2930   // The index is assumed to be untagged coming in, tag it to compare with the
   2931   // string length without using a temp register, it is restored at the end of
   2932   // this function.
   2933   SmiTag(index);
   2934   Check(no_overflow, kIndexIsTooLarge);
   2935 
   2936   cmp(index, FieldOperand(string, String::kLengthOffset));
   2937   Check(less, kIndexIsTooLarge);
   2938 
   2939   cmp(index, Immediate(Smi::FromInt(0)));
   2940   Check(greater_equal, kIndexIsNegative);
   2941 
   2942   // Restore the index
   2943   SmiUntag(index);
   2944 }
   2945 
   2946 
   2947 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2948   int frame_alignment = base::OS::ActivationFrameAlignment();
   2949   if (frame_alignment != 0) {
   2950     // Make stack end at alignment and make room for num_arguments words
   2951     // and the original value of esp.
   2952     mov(scratch, esp);
   2953     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
   2954     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
   2955     and_(esp, -frame_alignment);
   2956     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2957   } else {
   2958     sub(esp, Immediate(num_arguments * kPointerSize));
   2959   }
   2960 }
   2961 
   2962 
   2963 void MacroAssembler::CallCFunction(ExternalReference function,
   2964                                    int num_arguments) {
   2965   // Trashing eax is ok as it will be the return value.
   2966   mov(eax, Immediate(function));
   2967   CallCFunction(eax, num_arguments);
   2968 }
   2969 
   2970 
   2971 void MacroAssembler::CallCFunction(Register function,
   2972                                    int num_arguments) {
   2973   DCHECK(has_frame());
   2974   // Check stack alignment.
   2975   if (emit_debug_code()) {
   2976     CheckStackAlignment();
   2977   }
   2978 
   2979   call(function);
   2980   if (base::OS::ActivationFrameAlignment() != 0) {
   2981     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2982   } else {
   2983     add(esp, Immediate(num_arguments * kPointerSize));
   2984   }
   2985 }
   2986 
   2987 
   2988 #ifdef DEBUG
   2989 bool AreAliased(Register reg1,
   2990                 Register reg2,
   2991                 Register reg3,
   2992                 Register reg4,
   2993                 Register reg5,
   2994                 Register reg6,
   2995                 Register reg7,
   2996                 Register reg8) {
   2997   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
   2998       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
   2999       reg7.is_valid() + reg8.is_valid();
   3000 
   3001   RegList regs = 0;
   3002   if (reg1.is_valid()) regs |= reg1.bit();
   3003   if (reg2.is_valid()) regs |= reg2.bit();
   3004   if (reg3.is_valid()) regs |= reg3.bit();
   3005   if (reg4.is_valid()) regs |= reg4.bit();
   3006   if (reg5.is_valid()) regs |= reg5.bit();
   3007   if (reg6.is_valid()) regs |= reg6.bit();
   3008   if (reg7.is_valid()) regs |= reg7.bit();
   3009   if (reg8.is_valid()) regs |= reg8.bit();
   3010   int n_of_non_aliasing_regs = NumRegs(regs);
   3011 
   3012   return n_of_valid_regs != n_of_non_aliasing_regs;
   3013 }
   3014 #endif
   3015 
   3016 
   3017 CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
   3018     : address_(address),
   3019       size_(size),
   3020       masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
   3021   // Create a new macro assembler pointing to the address of the code to patch.
   3022   // The size is adjusted with kGap on order for the assembler to generate size
   3023   // bytes of instructions without failing with buffer size constraints.
   3024   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   3025 }
   3026 
   3027 
   3028 CodePatcher::~CodePatcher() {
   3029   // Indicate that code has changed.
   3030   Assembler::FlushICache(masm_.isolate(), address_, size_);
   3031 
   3032   // Check that the code was patched as expected.
   3033   DCHECK(masm_.pc_ == address_ + size_);
   3034   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   3035 }
   3036 
   3037 
   3038 void MacroAssembler::CheckPageFlag(
   3039     Register object,
   3040     Register scratch,
   3041     int mask,
   3042     Condition cc,
   3043     Label* condition_met,
   3044     Label::Distance condition_met_distance) {
   3045   DCHECK(cc == zero || cc == not_zero);
   3046   if (scratch.is(object)) {
   3047     and_(scratch, Immediate(~Page::kPageAlignmentMask));
   3048   } else {
   3049     mov(scratch, Immediate(~Page::kPageAlignmentMask));
   3050     and_(scratch, object);
   3051   }
   3052   if (mask < (1 << kBitsPerByte)) {
   3053     test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   3054   } else {
   3055     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   3056   }
   3057   j(cc, condition_met, condition_met_distance);
   3058 }
   3059 
   3060 
   3061 void MacroAssembler::CheckPageFlagForMap(
   3062     Handle<Map> map,
   3063     int mask,
   3064     Condition cc,
   3065     Label* condition_met,
   3066     Label::Distance condition_met_distance) {
   3067   DCHECK(cc == zero || cc == not_zero);
   3068   Page* page = Page::FromAddress(map->address());
   3069   DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
   3070   ExternalReference reference(ExternalReference::page_flags(page));
   3071   // The inlined static address check of the page's flags relies
   3072   // on maps never being compacted.
   3073   DCHECK(!isolate()->heap()->mark_compact_collector()->
   3074          IsOnEvacuationCandidate(*map));
   3075   if (mask < (1 << kBitsPerByte)) {
   3076     test_b(Operand::StaticVariable(reference), Immediate(mask));
   3077   } else {
   3078     test(Operand::StaticVariable(reference), Immediate(mask));
   3079   }
   3080   j(cc, condition_met, condition_met_distance);
   3081 }
   3082 
   3083 
   3084 void MacroAssembler::JumpIfBlack(Register object,
   3085                                  Register scratch0,
   3086                                  Register scratch1,
   3087                                  Label* on_black,
   3088                                  Label::Distance on_black_near) {
   3089   HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
   3090            1);  // kBlackBitPattern.
   3091   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
   3092 }
   3093 
   3094 
   3095 void MacroAssembler::HasColor(Register object,
   3096                               Register bitmap_scratch,
   3097                               Register mask_scratch,
   3098                               Label* has_color,
   3099                               Label::Distance has_color_distance,
   3100                               int first_bit,
   3101                               int second_bit) {
   3102   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
   3103 
   3104   GetMarkBits(object, bitmap_scratch, mask_scratch);
   3105 
   3106   Label other_color, word_boundary;
   3107   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3108   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
   3109   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
   3110   j(zero, &word_boundary, Label::kNear);
   3111   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3112   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   3113   jmp(&other_color, Label::kNear);
   3114 
   3115   bind(&word_boundary);
   3116   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize),
   3117          Immediate(1));
   3118 
   3119   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   3120   bind(&other_color);
   3121 }
   3122 
   3123 
   3124 void MacroAssembler::GetMarkBits(Register addr_reg,
   3125                                  Register bitmap_reg,
   3126                                  Register mask_reg) {
   3127   DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
   3128   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
   3129   and_(bitmap_reg, addr_reg);
   3130   mov(ecx, addr_reg);
   3131   int shift =
   3132       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
   3133   shr(ecx, shift);
   3134   and_(ecx,
   3135        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
   3136 
   3137   add(bitmap_reg, ecx);
   3138   mov(ecx, addr_reg);
   3139   shr(ecx, kPointerSizeLog2);
   3140   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
   3141   mov(mask_reg, Immediate(1));
   3142   shl_cl(mask_reg);
   3143 }
   3144 
   3145 
   3146 void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
   3147                                  Register mask_scratch, Label* value_is_white,
   3148                                  Label::Distance distance) {
   3149   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
   3150   GetMarkBits(value, bitmap_scratch, mask_scratch);
   3151 
   3152   // If the value is black or grey we don't need to do anything.
   3153   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
   3154   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
   3155   DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
   3156   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
   3157 
   3158   // Since both black and grey have a 1 in the first position and white does
   3159   // not have a 1 there we only need to check one bit.
   3160   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   3161   j(zero, value_is_white, Label::kNear);
   3162 }
   3163 
   3164 
   3165 void MacroAssembler::EnumLength(Register dst, Register map) {
   3166   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
   3167   mov(dst, FieldOperand(map, Map::kBitField3Offset));
   3168   and_(dst, Immediate(Map::EnumLengthBits::kMask));
   3169   SmiTag(dst);
   3170 }
   3171 
   3172 
   3173 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
   3174   Label next, start;
   3175   mov(ecx, eax);
   3176 
   3177   // Check if the enum length field is properly initialized, indicating that
   3178   // there is an enum cache.
   3179   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   3180 
   3181   EnumLength(edx, ebx);
   3182   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
   3183   j(equal, call_runtime);
   3184 
   3185   jmp(&start);
   3186 
   3187   bind(&next);
   3188   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   3189 
   3190   // For all objects but the receiver, check that the cache is empty.
   3191   EnumLength(edx, ebx);
   3192   cmp(edx, Immediate(Smi::FromInt(0)));
   3193   j(not_equal, call_runtime);
   3194 
   3195   bind(&start);
   3196 
   3197   // Check that there are no elements. Register rcx contains the current JS
   3198   // object we've reached through the prototype chain.
   3199   Label no_elements;
   3200   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
   3201   cmp(ecx, isolate()->factory()->empty_fixed_array());
   3202   j(equal, &no_elements);
   3203 
   3204   // Second chance, the object may be using the empty slow element dictionary.
   3205   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
   3206   j(not_equal, call_runtime);
   3207 
   3208   bind(&no_elements);
   3209   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   3210   cmp(ecx, isolate()->factory()->null_value());
   3211   j(not_equal, &next);
   3212 }
   3213 
   3214 
   3215 void MacroAssembler::TestJSArrayForAllocationMemento(
   3216     Register receiver_reg,
   3217     Register scratch_reg,
   3218     Label* no_memento_found) {
   3219   Label map_check;
   3220   Label top_check;
   3221   ExternalReference new_space_allocation_top =
   3222       ExternalReference::new_space_allocation_top_address(isolate());
   3223   const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
   3224   const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
   3225 
   3226   // Bail out if the object is not in new space.
   3227   JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
   3228   // If the object is in new space, we need to check whether it is on the same
   3229   // page as the current top.
   3230   lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
   3231   xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   3232   test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
   3233   j(zero, &top_check);
   3234   // The object is on a different page than allocation top. Bail out if the
   3235   // object sits on the page boundary as no memento can follow and we cannot
   3236   // touch the memory following it.
   3237   lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
   3238   xor_(scratch_reg, receiver_reg);
   3239   test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
   3240   j(not_zero, no_memento_found);
   3241   // Continue with the actual map check.
   3242   jmp(&map_check);
   3243   // If top is on the same page as the current object, we need to check whether
   3244   // we are below top.
   3245   bind(&top_check);
   3246   lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
   3247   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   3248   j(greater, no_memento_found);
   3249   // Memento map check.
   3250   bind(&map_check);
   3251   mov(scratch_reg, Operand(receiver_reg, kMementoMapOffset));
   3252   cmp(scratch_reg, Immediate(isolate()->factory()->allocation_memento_map()));
   3253 }
   3254 
   3255 
   3256 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
   3257     Register object,
   3258     Register scratch0,
   3259     Register scratch1,
   3260     Label* found) {
   3261   DCHECK(!scratch1.is(scratch0));
   3262   Factory* factory = isolate()->factory();
   3263   Register current = scratch0;
   3264   Label loop_again, end;
   3265 
   3266   // scratch contained elements pointer.
   3267   mov(current, object);
   3268   mov(current, FieldOperand(current, HeapObject::kMapOffset));
   3269   mov(current, FieldOperand(current, Map::kPrototypeOffset));
   3270   cmp(current, Immediate(factory->null_value()));
   3271   j(equal, &end);
   3272 
   3273   // Loop based on the map going up the prototype chain.
   3274   bind(&loop_again);
   3275   mov(current, FieldOperand(current, HeapObject::kMapOffset));
   3276   STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
   3277   STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
   3278   CmpInstanceType(current, JS_OBJECT_TYPE);
   3279   j(below, found);
   3280   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
   3281   DecodeField<Map::ElementsKindBits>(scratch1);
   3282   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
   3283   j(equal, found);
   3284   mov(current, FieldOperand(current, Map::kPrototypeOffset));
   3285   cmp(current, Immediate(factory->null_value()));
   3286   j(not_equal, &loop_again);
   3287 
   3288   bind(&end);
   3289 }
   3290 
   3291 
   3292 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
   3293   DCHECK(!dividend.is(eax));
   3294   DCHECK(!dividend.is(edx));
   3295   base::MagicNumbersForDivision<uint32_t> mag =
   3296       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
   3297   mov(eax, Immediate(mag.multiplier));
   3298   imul(dividend);
   3299   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
   3300   if (divisor > 0 && neg) add(edx, dividend);
   3301   if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
   3302   if (mag.shift > 0) sar(edx, mag.shift);
   3303   mov(eax, dividend);
   3304   shr(eax, 31);
   3305   add(edx, eax);
   3306 }
   3307 
   3308 
   3309 }  // namespace internal
   3310 }  // namespace v8
   3311 
   3312 #endif  // V8_TARGET_ARCH_X87
   3313