Home | History | Annotate | Download | only in x64
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_X64)
     31 
     32 #include "bootstrapper.h"
     33 #include "codegen.h"
     34 #include "assembler-x64.h"
     35 #include "macro-assembler-x64.h"
     36 #include "serialize.h"
     37 #include "debug.h"
     38 #include "heap.h"
     39 
     40 namespace v8 {
     41 namespace internal {
     42 
     43 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     44     : Assembler(arg_isolate, buffer, size),
     45       generating_stub_(false),
     46       allow_stub_calls_(true),
     47       root_array_available_(true) {
     48   if (isolate() != NULL) {
     49     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
     50                                   isolate());
     51   }
     52 }
     53 
     54 
     55 static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
     56   Address roots_register_value = kRootRegisterBias +
     57       reinterpret_cast<Address>(isolate->heap()->roots_address());
     58   intptr_t delta = other.address() - roots_register_value;
     59   return delta;
     60 }
     61 
     62 
     63 Operand MacroAssembler::ExternalOperand(ExternalReference target,
     64                                         Register scratch) {
     65   if (root_array_available_ && !Serializer::enabled()) {
     66     intptr_t delta = RootRegisterDelta(target, isolate());
     67     if (is_int32(delta)) {
     68       Serializer::TooLateToEnableNow();
     69       return Operand(kRootRegister, static_cast<int32_t>(delta));
     70     }
     71   }
     72   movq(scratch, target);
     73   return Operand(scratch, 0);
     74 }
     75 
     76 
     77 void MacroAssembler::Load(Register destination, ExternalReference source) {
     78   if (root_array_available_ && !Serializer::enabled()) {
     79     intptr_t delta = RootRegisterDelta(source, isolate());
     80     if (is_int32(delta)) {
     81       Serializer::TooLateToEnableNow();
     82       movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
     83       return;
     84     }
     85   }
     86   // Safe code.
     87   if (destination.is(rax)) {
     88     load_rax(source);
     89   } else {
     90     movq(kScratchRegister, source);
     91     movq(destination, Operand(kScratchRegister, 0));
     92   }
     93 }
     94 
     95 
     96 void MacroAssembler::Store(ExternalReference destination, Register source) {
     97   if (root_array_available_ && !Serializer::enabled()) {
     98     intptr_t delta = RootRegisterDelta(destination, isolate());
     99     if (is_int32(delta)) {
    100       Serializer::TooLateToEnableNow();
    101       movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
    102       return;
    103     }
    104   }
    105   // Safe code.
    106   if (source.is(rax)) {
    107     store_rax(destination);
    108   } else {
    109     movq(kScratchRegister, destination);
    110     movq(Operand(kScratchRegister, 0), source);
    111   }
    112 }
    113 
    114 
    115 void MacroAssembler::LoadAddress(Register destination,
    116                                  ExternalReference source) {
    117   if (root_array_available_ && !Serializer::enabled()) {
    118     intptr_t delta = RootRegisterDelta(source, isolate());
    119     if (is_int32(delta)) {
    120       Serializer::TooLateToEnableNow();
    121       lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
    122       return;
    123     }
    124   }
    125   // Safe code.
    126   movq(destination, source);
    127 }
    128 
    129 
    130 int MacroAssembler::LoadAddressSize(ExternalReference source) {
    131   if (root_array_available_ && !Serializer::enabled()) {
    132     // This calculation depends on the internals of LoadAddress.
    133     // It's correctness is ensured by the asserts in the Call
    134     // instruction below.
    135     intptr_t delta = RootRegisterDelta(source, isolate());
    136     if (is_int32(delta)) {
    137       Serializer::TooLateToEnableNow();
    138       // Operand is lea(scratch, Operand(kRootRegister, delta));
    139       // Opcodes : REX.W 8D ModRM Disp8/Disp32  - 4 or 7.
    140       int size = 4;
    141       if (!is_int8(static_cast<int32_t>(delta))) {
    142         size += 3;  // Need full four-byte displacement in lea.
    143       }
    144       return size;
    145     }
    146   }
    147   // Size of movq(destination, src);
    148   return 10;
    149 }
    150 
    151 
    152 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
    153   ASSERT(root_array_available_);
    154   movq(destination, Operand(kRootRegister,
    155                             (index << kPointerSizeLog2) - kRootRegisterBias));
    156 }
    157 
    158 
    159 void MacroAssembler::LoadRootIndexed(Register destination,
    160                                      Register variable_offset,
    161                                      int fixed_offset) {
    162   ASSERT(root_array_available_);
    163   movq(destination,
    164        Operand(kRootRegister,
    165                variable_offset, times_pointer_size,
    166                (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
    167 }
    168 
    169 
    170 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
    171   ASSERT(root_array_available_);
    172   movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
    173        source);
    174 }
    175 
    176 
    177 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
    178   ASSERT(root_array_available_);
    179   push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
    180 }
    181 
    182 
    183 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
    184   ASSERT(root_array_available_);
    185   cmpq(with, Operand(kRootRegister,
    186                      (index << kPointerSizeLog2) - kRootRegisterBias));
    187 }
    188 
    189 
    190 void MacroAssembler::CompareRoot(const Operand& with,
    191                                  Heap::RootListIndex index) {
    192   ASSERT(root_array_available_);
    193   ASSERT(!with.AddressUsesRegister(kScratchRegister));
    194   LoadRoot(kScratchRegister, index);
    195   cmpq(with, kScratchRegister);
    196 }
    197 
    198 
    199 void MacroAssembler::RecordWriteHelper(Register object,
    200                                        Register addr,
    201                                        Register scratch) {
    202   if (emit_debug_code()) {
    203     // Check that the object is not in new space.
    204     NearLabel not_in_new_space;
    205     InNewSpace(object, scratch, not_equal, &not_in_new_space);
    206     Abort("new-space object passed to RecordWriteHelper");
    207     bind(&not_in_new_space);
    208   }
    209 
    210   // Compute the page start address from the heap object pointer, and reuse
    211   // the 'object' register for it.
    212   and_(object, Immediate(~Page::kPageAlignmentMask));
    213 
    214   // Compute number of region covering addr. See Page::GetRegionNumberForAddress
    215   // method for more details.
    216   shrl(addr, Immediate(Page::kRegionSizeLog2));
    217   andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
    218 
    219   // Set dirty mark for region.
    220   bts(Operand(object, Page::kDirtyFlagOffset), addr);
    221 }
    222 
    223 
    224 void MacroAssembler::RecordWrite(Register object,
    225                                  int offset,
    226                                  Register value,
    227                                  Register index) {
    228   // The compiled code assumes that record write doesn't change the
    229   // context register, so we check that none of the clobbered
    230   // registers are rsi.
    231   ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
    232 
    233   // First, check if a write barrier is even needed. The tests below
    234   // catch stores of smis and stores into the young generation.
    235   Label done;
    236   JumpIfSmi(value, &done);
    237 
    238   RecordWriteNonSmi(object, offset, value, index);
    239   bind(&done);
    240 
    241   // Clobber all input registers when running with the debug-code flag
    242   // turned on to provoke errors. This clobbering repeats the
    243   // clobbering done inside RecordWriteNonSmi but it's necessary to
    244   // avoid having the fast case for smis leave the registers
    245   // unchanged.
    246   if (emit_debug_code()) {
    247     movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    248     movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    249     movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    250   }
    251 }
    252 
    253 
    254 void MacroAssembler::RecordWrite(Register object,
    255                                  Register address,
    256                                  Register value) {
    257   // The compiled code assumes that record write doesn't change the
    258   // context register, so we check that none of the clobbered
    259   // registers are rsi.
    260   ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
    261 
    262   // First, check if a write barrier is even needed. The tests below
    263   // catch stores of smis and stores into the young generation.
    264   Label done;
    265   JumpIfSmi(value, &done);
    266 
    267   InNewSpace(object, value, equal, &done);
    268 
    269   RecordWriteHelper(object, address, value);
    270 
    271   bind(&done);
    272 
    273   // Clobber all input registers when running with the debug-code flag
    274   // turned on to provoke errors.
    275   if (emit_debug_code()) {
    276     movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    277     movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    278     movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    279   }
    280 }
    281 
    282 
    283 void MacroAssembler::RecordWriteNonSmi(Register object,
    284                                        int offset,
    285                                        Register scratch,
    286                                        Register index) {
    287   Label done;
    288 
    289   if (emit_debug_code()) {
    290     NearLabel okay;
    291     JumpIfNotSmi(object, &okay);
    292     Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
    293     bind(&okay);
    294 
    295     if (offset == 0) {
    296       // index must be int32.
    297       Register tmp = index.is(rax) ? rbx : rax;
    298       push(tmp);
    299       movl(tmp, index);
    300       cmpq(tmp, index);
    301       Check(equal, "Index register for RecordWrite must be untagged int32.");
    302       pop(tmp);
    303     }
    304   }
    305 
    306   // Test that the object address is not in the new space. We cannot
    307   // update page dirty marks for new space pages.
    308   InNewSpace(object, scratch, equal, &done);
    309 
    310   // The offset is relative to a tagged or untagged HeapObject pointer,
    311   // so either offset or offset + kHeapObjectTag must be a
    312   // multiple of kPointerSize.
    313   ASSERT(IsAligned(offset, kPointerSize) ||
    314          IsAligned(offset + kHeapObjectTag, kPointerSize));
    315 
    316   Register dst = index;
    317   if (offset != 0) {
    318     lea(dst, Operand(object, offset));
    319   } else {
    320     // array access: calculate the destination address in the same manner as
    321     // KeyedStoreIC::GenerateGeneric.
    322     lea(dst, FieldOperand(object,
    323                           index,
    324                           times_pointer_size,
    325                           FixedArray::kHeaderSize));
    326   }
    327   RecordWriteHelper(object, dst, scratch);
    328 
    329   bind(&done);
    330 
    331   // Clobber all input registers when running with the debug-code flag
    332   // turned on to provoke errors.
    333   if (emit_debug_code()) {
    334     movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    335     movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    336     movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
    337   }
    338 }
    339 
    340 void MacroAssembler::Assert(Condition cc, const char* msg) {
    341   if (emit_debug_code()) Check(cc, msg);
    342 }
    343 
    344 
    345 void MacroAssembler::AssertFastElements(Register elements) {
    346   if (emit_debug_code()) {
    347     NearLabel ok;
    348     CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
    349                 Heap::kFixedArrayMapRootIndex);
    350     j(equal, &ok);
    351     CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
    352                 Heap::kFixedCOWArrayMapRootIndex);
    353     j(equal, &ok);
    354     Abort("JSObject with fast elements map has slow elements");
    355     bind(&ok);
    356   }
    357 }
    358 
    359 
    360 void MacroAssembler::Check(Condition cc, const char* msg) {
    361   NearLabel L;
    362   j(cc, &L);
    363   Abort(msg);
    364   // will not return here
    365   bind(&L);
    366 }
    367 
    368 
    369 void MacroAssembler::CheckStackAlignment() {
    370   int frame_alignment = OS::ActivationFrameAlignment();
    371   int frame_alignment_mask = frame_alignment - 1;
    372   if (frame_alignment > kPointerSize) {
    373     ASSERT(IsPowerOf2(frame_alignment));
    374     NearLabel alignment_as_expected;
    375     testq(rsp, Immediate(frame_alignment_mask));
    376     j(zero, &alignment_as_expected);
    377     // Abort if stack is not aligned.
    378     int3();
    379     bind(&alignment_as_expected);
    380   }
    381 }
    382 
    383 
    384 void MacroAssembler::NegativeZeroTest(Register result,
    385                                       Register op,
    386                                       Label* then_label) {
    387   NearLabel ok;
    388   testl(result, result);
    389   j(not_zero, &ok);
    390   testl(op, op);
    391   j(sign, then_label);
    392   bind(&ok);
    393 }
    394 
    395 
    396 void MacroAssembler::Abort(const char* msg) {
    397   // We want to pass the msg string like a smi to avoid GC
    398   // problems, however msg is not guaranteed to be aligned
    399   // properly. Instead, we pass an aligned pointer that is
    400   // a proper v8 smi, but also pass the alignment difference
    401   // from the real pointer as a smi.
    402   intptr_t p1 = reinterpret_cast<intptr_t>(msg);
    403   intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
    404   // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
    405   ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
    406 #ifdef DEBUG
    407   if (msg != NULL) {
    408     RecordComment("Abort message: ");
    409     RecordComment(msg);
    410   }
    411 #endif
    412   // Disable stub call restrictions to always allow calls to abort.
    413   AllowStubCallsScope allow_scope(this, true);
    414 
    415   push(rax);
    416   movq(kScratchRegister, p0, RelocInfo::NONE);
    417   push(kScratchRegister);
    418   movq(kScratchRegister,
    419        reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
    420        RelocInfo::NONE);
    421   push(kScratchRegister);
    422   CallRuntime(Runtime::kAbort, 2);
    423   // will not return here
    424   int3();
    425 }
    426 
    427 
    428 void MacroAssembler::CallStub(CodeStub* stub) {
    429   ASSERT(allow_stub_calls());  // calls are not allowed in some stubs
    430   Call(stub->GetCode(), RelocInfo::CODE_TARGET);
    431 }
    432 
    433 
    434 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
    435   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
    436   MaybeObject* result = stub->TryGetCode();
    437   if (!result->IsFailure()) {
    438     call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
    439          RelocInfo::CODE_TARGET);
    440   }
    441   return result;
    442 }
    443 
    444 
    445 void MacroAssembler::TailCallStub(CodeStub* stub) {
    446   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
    447   Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
    448 }
    449 
    450 
    451 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
    452   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
    453   MaybeObject* result = stub->TryGetCode();
    454   if (!result->IsFailure()) {
    455     jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
    456         RelocInfo::CODE_TARGET);
    457   }
    458   return result;
    459 }
    460 
    461 
    462 void MacroAssembler::StubReturn(int argc) {
    463   ASSERT(argc >= 1 && generating_stub());
    464   ret((argc - 1) * kPointerSize);
    465 }
    466 
    467 
    468 void MacroAssembler::IllegalOperation(int num_arguments) {
    469   if (num_arguments > 0) {
    470     addq(rsp, Immediate(num_arguments * kPointerSize));
    471   }
    472   LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    473 }
    474 
    475 
    476 void MacroAssembler::IndexFromHash(Register hash, Register index) {
    477   // The assert checks that the constants for the maximum number of digits
    478   // for an array index cached in the hash field and the number of bits
    479   // reserved for it does not conflict.
    480   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
    481          (1 << String::kArrayIndexValueBits));
    482   // We want the smi-tagged index in key. Even if we subsequently go to
    483   // the slow case, converting the key to a smi is always valid.
    484   // key: string key
    485   // hash: key's hash field, including its array index value.
    486   and_(hash, Immediate(String::kArrayIndexValueMask));
    487   shr(hash, Immediate(String::kHashShift));
    488   // Here we actually clobber the key which will be used if calling into
    489   // runtime later. However as the new key is the numeric value of a string key
    490   // there is no difference in using either key.
    491   Integer32ToSmi(index, hash);
    492 }
    493 
    494 
    495 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
    496   CallRuntime(Runtime::FunctionForId(id), num_arguments);
    497 }
    498 
    499 
    500 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
    501   const Runtime::Function* function = Runtime::FunctionForId(id);
    502   Set(rax, function->nargs);
    503   LoadAddress(rbx, ExternalReference(function, isolate()));
    504   CEntryStub ces(1);
    505   ces.SaveDoubles();
    506   CallStub(&ces);
    507 }
    508 
    509 
    510 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
    511                                             int num_arguments) {
    512   return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
    513 }
    514 
    515 
    516 void MacroAssembler::CallRuntime(const Runtime::Function* f,
    517                                  int num_arguments) {
    518   // If the expected number of arguments of the runtime function is
    519   // constant, we check that the actual number of arguments match the
    520   // expectation.
    521   if (f->nargs >= 0 && f->nargs != num_arguments) {
    522     IllegalOperation(num_arguments);
    523     return;
    524   }
    525 
    526   // TODO(1236192): Most runtime routines don't need the number of
    527   // arguments passed in because it is constant. At some point we
    528   // should remove this need and make the runtime routine entry code
    529   // smarter.
    530   Set(rax, num_arguments);
    531   LoadAddress(rbx, ExternalReference(f, isolate()));
    532   CEntryStub ces(f->result_size);
    533   CallStub(&ces);
    534 }
    535 
    536 
    537 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
    538                                             int num_arguments) {
    539   if (f->nargs >= 0 && f->nargs != num_arguments) {
    540     IllegalOperation(num_arguments);
    541     // Since we did not call the stub, there was no allocation failure.
    542     // Return some non-failure object.
    543     return HEAP->undefined_value();
    544   }
    545 
    546   // TODO(1236192): Most runtime routines don't need the number of
    547   // arguments passed in because it is constant. At some point we
    548   // should remove this need and make the runtime routine entry code
    549   // smarter.
    550   Set(rax, num_arguments);
    551   LoadAddress(rbx, ExternalReference(f, isolate()));
    552   CEntryStub ces(f->result_size);
    553   return TryCallStub(&ces);
    554 }
    555 
    556 
    557 void MacroAssembler::CallExternalReference(const ExternalReference& ext,
    558                                            int num_arguments) {
    559   Set(rax, num_arguments);
    560   LoadAddress(rbx, ext);
    561 
    562   CEntryStub stub(1);
    563   CallStub(&stub);
    564 }
    565 
    566 
    567 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
    568                                                int num_arguments,
    569                                                int result_size) {
    570   // ----------- S t a t e -------------
    571   //  -- rsp[0] : return address
    572   //  -- rsp[8] : argument num_arguments - 1
    573   //  ...
    574   //  -- rsp[8 * num_arguments] : argument 0 (receiver)
    575   // -----------------------------------
    576 
    577   // TODO(1236192): Most runtime routines don't need the number of
    578   // arguments passed in because it is constant. At some point we
    579   // should remove this need and make the runtime routine entry code
    580   // smarter.
    581   Set(rax, num_arguments);
    582   JumpToExternalReference(ext, result_size);
    583 }
    584 
    585 
    586 MaybeObject* MacroAssembler::TryTailCallExternalReference(
    587     const ExternalReference& ext, int num_arguments, int result_size) {
    588   // ----------- S t a t e -------------
    589   //  -- rsp[0] : return address
    590   //  -- rsp[8] : argument num_arguments - 1
    591   //  ...
    592   //  -- rsp[8 * num_arguments] : argument 0 (receiver)
    593   // -----------------------------------
    594 
    595   // TODO(1236192): Most runtime routines don't need the number of
    596   // arguments passed in because it is constant. At some point we
    597   // should remove this need and make the runtime routine entry code
    598   // smarter.
    599   Set(rax, num_arguments);
    600   return TryJumpToExternalReference(ext, result_size);
    601 }
    602 
    603 
    604 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
    605                                      int num_arguments,
    606                                      int result_size) {
    607   TailCallExternalReference(ExternalReference(fid, isolate()),
    608                             num_arguments,
    609                             result_size);
    610 }
    611 
    612 
    613 MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
    614                                                 int num_arguments,
    615                                                 int result_size) {
    616   return TryTailCallExternalReference(ExternalReference(fid, isolate()),
    617                                       num_arguments,
    618                                       result_size);
    619 }
    620 
    621 
    622 static int Offset(ExternalReference ref0, ExternalReference ref1) {
    623   int64_t offset = (ref0.address() - ref1.address());
    624   // Check that fits into int.
    625   ASSERT(static_cast<int>(offset) == offset);
    626   return static_cast<int>(offset);
    627 }
    628 
    629 
    630 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
    631 #ifdef _WIN64
    632   // We need to prepare a slot for result handle on stack and put
    633   // a pointer to it into 1st arg register.
    634   EnterApiExitFrame(arg_stack_space + 1);
    635 
    636   // rcx must be used to pass the pointer to the return value slot.
    637   lea(rcx, StackSpaceOperand(arg_stack_space));
    638 #else
    639   EnterApiExitFrame(arg_stack_space);
    640 #endif
    641 }
    642 
    643 
    644 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
    645     ApiFunction* function, int stack_space) {
    646   Label empty_result;
    647   Label prologue;
    648   Label promote_scheduled_exception;
    649   Label delete_allocated_handles;
    650   Label leave_exit_frame;
    651   Label write_back;
    652 
    653   ExternalReference next_address =
    654       ExternalReference::handle_scope_next_address();
    655   const int kNextOffset = 0;
    656   const int kLimitOffset = Offset(
    657       ExternalReference::handle_scope_limit_address(),
    658       next_address);
    659   const int kLevelOffset = Offset(
    660       ExternalReference::handle_scope_level_address(),
    661       next_address);
    662   ExternalReference scheduled_exception_address =
    663       ExternalReference::scheduled_exception_address(isolate());
    664 
    665   // Allocate HandleScope in callee-save registers.
    666   Register prev_next_address_reg = r14;
    667   Register prev_limit_reg = rbx;
    668   Register base_reg = r15;
    669   movq(base_reg, next_address);
    670   movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
    671   movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
    672   addl(Operand(base_reg, kLevelOffset), Immediate(1));
    673   // Call the api function!
    674   movq(rax,
    675        reinterpret_cast<int64_t>(function->address()),
    676        RelocInfo::RUNTIME_ENTRY);
    677   call(rax);
    678 
    679 #ifdef _WIN64
    680   // rax keeps a pointer to v8::Handle, unpack it.
    681   movq(rax, Operand(rax, 0));
    682 #endif
    683   // Check if the result handle holds 0.
    684   testq(rax, rax);
    685   j(zero, &empty_result);
    686   // It was non-zero.  Dereference to get the result value.
    687   movq(rax, Operand(rax, 0));
    688   bind(&prologue);
    689 
    690   // No more valid handles (the result handle was the last one). Restore
    691   // previous handle scope.
    692   subl(Operand(base_reg, kLevelOffset), Immediate(1));
    693   movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
    694   cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
    695   j(not_equal, &delete_allocated_handles);
    696   bind(&leave_exit_frame);
    697 
    698   // Check if the function scheduled an exception.
    699   movq(rsi, scheduled_exception_address);
    700   Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
    701   j(not_equal, &promote_scheduled_exception);
    702 
    703   LeaveApiExitFrame();
    704   ret(stack_space * kPointerSize);
    705 
    706   bind(&promote_scheduled_exception);
    707   MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
    708                                            0, 1);
    709   if (result->IsFailure()) {
    710     return result;
    711   }
    712 
    713   bind(&empty_result);
    714   // It was zero; the result is undefined.
    715   Move(rax, FACTORY->undefined_value());
    716   jmp(&prologue);
    717 
    718   // HandleScope limit has changed. Delete allocated extensions.
    719   bind(&delete_allocated_handles);
    720   movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
    721   movq(prev_limit_reg, rax);
    722 #ifdef _WIN64
    723   LoadAddress(rcx, ExternalReference::isolate_address());
    724 #else
    725   LoadAddress(rdi, ExternalReference::isolate_address());
    726 #endif
    727   LoadAddress(rax,
    728               ExternalReference::delete_handle_scope_extensions(isolate()));
    729   call(rax);
    730   movq(rax, prev_limit_reg);
    731   jmp(&leave_exit_frame);
    732 
    733   return result;
    734 }
    735 
    736 
    737 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
    738                                              int result_size) {
    739   // Set the entry point and jump to the C entry runtime stub.
    740   LoadAddress(rbx, ext);
    741   CEntryStub ces(result_size);
    742   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
    743 }
    744 
    745 
    746 MaybeObject* MacroAssembler::TryJumpToExternalReference(
    747     const ExternalReference& ext, int result_size) {
    748   // Set the entry point and jump to the C entry runtime stub.
    749   LoadAddress(rbx, ext);
    750   CEntryStub ces(result_size);
    751   return TryTailCallStub(&ces);
    752 }
    753 
    754 
    755 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
    756                                    InvokeFlag flag,
    757                                    CallWrapper* call_wrapper) {
    758   // Calls are not allowed in some stubs.
    759   ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
    760 
    761   // Rely on the assertion to check that the number of provided
    762   // arguments match the expected number of arguments. Fake a
    763   // parameter count to avoid emitting code to do the check.
    764   ParameterCount expected(0);
    765   GetBuiltinEntry(rdx, id);
    766   InvokeCode(rdx, expected, expected, flag, call_wrapper);
    767 }
    768 
    769 
    770 void MacroAssembler::GetBuiltinFunction(Register target,
    771                                         Builtins::JavaScript id) {
    772   // Load the builtins object into target register.
    773   movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
    774   movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
    775   movq(target, FieldOperand(target,
    776                             JSBuiltinsObject::OffsetOfFunctionWithId(id)));
    777 }
    778 
    779 
    780 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
    781   ASSERT(!target.is(rdi));
    782   // Load the JavaScript builtin function from the builtins object.
    783   GetBuiltinFunction(rdi, id);
    784   movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
    785 }
    786 
    787 
    788 void MacroAssembler::Set(Register dst, int64_t x) {
    789   if (x == 0) {
    790     xorl(dst, dst);
    791   } else if (is_uint32(x)) {
    792     movl(dst, Immediate(static_cast<uint32_t>(x)));
    793   } else if (is_int32(x)) {
    794     movq(dst, Immediate(static_cast<int32_t>(x)));
    795   } else {
    796     movq(dst, x, RelocInfo::NONE);
    797   }
    798 }
    799 
    800 void MacroAssembler::Set(const Operand& dst, int64_t x) {
    801   if (is_int32(x)) {
    802     movq(dst, Immediate(static_cast<int32_t>(x)));
    803   } else {
    804     Set(kScratchRegister, x);
    805     movq(dst, kScratchRegister);
    806   }
    807 }
    808 
    809 // ----------------------------------------------------------------------------
    810 // Smi tagging, untagging and tag detection.
    811 
    812 Register MacroAssembler::GetSmiConstant(Smi* source) {
    813   int value = source->value();
    814   if (value == 0) {
    815     xorl(kScratchRegister, kScratchRegister);
    816     return kScratchRegister;
    817   }
    818   if (value == 1) {
    819     return kSmiConstantRegister;
    820   }
    821   LoadSmiConstant(kScratchRegister, source);
    822   return kScratchRegister;
    823 }
    824 
    825 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
    826   if (emit_debug_code()) {
    827     movq(dst,
    828          reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
    829          RelocInfo::NONE);
    830     cmpq(dst, kSmiConstantRegister);
    831     if (allow_stub_calls()) {
    832       Assert(equal, "Uninitialized kSmiConstantRegister");
    833     } else {
    834       NearLabel ok;
    835       j(equal, &ok);
    836       int3();
    837       bind(&ok);
    838     }
    839   }
    840   int value = source->value();
    841   if (value == 0) {
    842     xorl(dst, dst);
    843     return;
    844   }
    845   bool negative = value < 0;
    846   unsigned int uvalue = negative ? -value : value;
    847 
    848   switch (uvalue) {
    849     case 9:
    850       lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
    851       break;
    852     case 8:
    853       xorl(dst, dst);
    854       lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
    855       break;
    856     case 4:
    857       xorl(dst, dst);
    858       lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
    859       break;
    860     case 5:
    861       lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
    862       break;
    863     case 3:
    864       lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
    865       break;
    866     case 2:
    867       lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
    868       break;
    869     case 1:
    870       movq(dst, kSmiConstantRegister);
    871       break;
    872     case 0:
    873       UNREACHABLE();
    874       return;
    875     default:
    876       movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
    877       return;
    878   }
    879   if (negative) {
    880     neg(dst);
    881   }
    882 }
    883 
    884 
    885 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
    886   ASSERT_EQ(0, kSmiTag);
    887   if (!dst.is(src)) {
    888     movl(dst, src);
    889   }
    890   shl(dst, Immediate(kSmiShift));
    891 }
    892 
    893 
    894 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
    895   if (emit_debug_code()) {
    896     testb(dst, Immediate(0x01));
    897     NearLabel ok;
    898     j(zero, &ok);
    899     if (allow_stub_calls()) {
    900       Abort("Integer32ToSmiField writing to non-smi location");
    901     } else {
    902       int3();
    903     }
    904     bind(&ok);
    905   }
    906   ASSERT(kSmiShift % kBitsPerByte == 0);
    907   movl(Operand(dst, kSmiShift / kBitsPerByte), src);
    908 }
    909 
    910 
    911 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
    912                                                 Register src,
    913                                                 int constant) {
    914   if (dst.is(src)) {
    915     addl(dst, Immediate(constant));
    916   } else {
    917     leal(dst, Operand(src, constant));
    918   }
    919   shl(dst, Immediate(kSmiShift));
    920 }
    921 
    922 
    923 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
    924   ASSERT_EQ(0, kSmiTag);
    925   if (!dst.is(src)) {
    926     movq(dst, src);
    927   }
    928   shr(dst, Immediate(kSmiShift));
    929 }
    930 
    931 
    932 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
    933   movl(dst, Operand(src, kSmiShift / kBitsPerByte));
    934 }
    935 
    936 
    937 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
    938   ASSERT_EQ(0, kSmiTag);
    939   if (!dst.is(src)) {
    940     movq(dst, src);
    941   }
    942   sar(dst, Immediate(kSmiShift));
    943 }
    944 
    945 
    946 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
    947   movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
    948 }
    949 
    950 
    951 void MacroAssembler::SmiTest(Register src) {
    952   testq(src, src);
    953 }
    954 
    955 
    956 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
    957   if (emit_debug_code()) {
    958     AbortIfNotSmi(smi1);
    959     AbortIfNotSmi(smi2);
    960   }
    961   cmpq(smi1, smi2);
    962 }
    963 
    964 
    965 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
    966   if (emit_debug_code()) {
    967     AbortIfNotSmi(dst);
    968   }
    969   Cmp(dst, src);
    970 }
    971 
    972 
    973 void MacroAssembler::Cmp(Register dst, Smi* src) {
    974   ASSERT(!dst.is(kScratchRegister));
    975   if (src->value() == 0) {
    976     testq(dst, dst);
    977   } else {
    978     Register constant_reg = GetSmiConstant(src);
    979     cmpq(dst, constant_reg);
    980   }
    981 }
    982 
    983 
    984 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
    985   if (emit_debug_code()) {
    986     AbortIfNotSmi(dst);
    987     AbortIfNotSmi(src);
    988   }
    989   cmpq(dst, src);
    990 }
    991 
    992 
    993 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
    994   if (emit_debug_code()) {
    995     AbortIfNotSmi(dst);
    996     AbortIfNotSmi(src);
    997   }
    998   cmpq(dst, src);
    999 }
   1000 
   1001 
   1002 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
   1003   if (emit_debug_code()) {
   1004     AbortIfNotSmi(dst);
   1005   }
   1006   cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
   1007 }
   1008 
   1009 
   1010 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
   1011   // The Operand cannot use the smi register.
   1012   Register smi_reg = GetSmiConstant(src);
   1013   ASSERT(!dst.AddressUsesRegister(smi_reg));
   1014   cmpq(dst, smi_reg);
   1015 }
   1016 
   1017 
   1018 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
   1019   cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
   1020 }
   1021 
   1022 
   1023 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
   1024                                                            Register src,
   1025                                                            int power) {
   1026   ASSERT(power >= 0);
   1027   ASSERT(power < 64);
   1028   if (power == 0) {
   1029     SmiToInteger64(dst, src);
   1030     return;
   1031   }
   1032   if (!dst.is(src)) {
   1033     movq(dst, src);
   1034   }
   1035   if (power < kSmiShift) {
   1036     sar(dst, Immediate(kSmiShift - power));
   1037   } else if (power > kSmiShift) {
   1038     shl(dst, Immediate(power - kSmiShift));
   1039   }
   1040 }
   1041 
   1042 
   1043 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
   1044                                                          Register src,
   1045                                                          int power) {
   1046   ASSERT((0 <= power) && (power < 32));
   1047   if (dst.is(src)) {
   1048     shr(dst, Immediate(power + kSmiShift));
   1049   } else {
   1050     UNIMPLEMENTED();  // Not used.
   1051   }
   1052 }
   1053 
   1054 
   1055 Condition MacroAssembler::CheckSmi(Register src) {
   1056   ASSERT_EQ(0, kSmiTag);
   1057   testb(src, Immediate(kSmiTagMask));
   1058   return zero;
   1059 }
   1060 
   1061 
   1062 Condition MacroAssembler::CheckSmi(const Operand& src) {
   1063   ASSERT_EQ(0, kSmiTag);
   1064   testb(src, Immediate(kSmiTagMask));
   1065   return zero;
   1066 }
   1067 
   1068 
   1069 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
   1070   ASSERT_EQ(0, kSmiTag);
   1071   // Test that both bits of the mask 0x8000000000000001 are zero.
   1072   movq(kScratchRegister, src);
   1073   rol(kScratchRegister, Immediate(1));
   1074   testb(kScratchRegister, Immediate(3));
   1075   return zero;
   1076 }
   1077 
   1078 
   1079 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
   1080   if (first.is(second)) {
   1081     return CheckSmi(first);
   1082   }
   1083   ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
   1084   leal(kScratchRegister, Operand(first, second, times_1, 0));
   1085   testb(kScratchRegister, Immediate(0x03));
   1086   return zero;
   1087 }
   1088 
   1089 
   1090 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
   1091                                                   Register second) {
   1092   if (first.is(second)) {
   1093     return CheckNonNegativeSmi(first);
   1094   }
   1095   movq(kScratchRegister, first);
   1096   or_(kScratchRegister, second);
   1097   rol(kScratchRegister, Immediate(1));
   1098   testl(kScratchRegister, Immediate(3));
   1099   return zero;
   1100 }
   1101 
   1102 
   1103 Condition MacroAssembler::CheckEitherSmi(Register first,
   1104                                          Register second,
   1105                                          Register scratch) {
   1106   if (first.is(second)) {
   1107     return CheckSmi(first);
   1108   }
   1109   if (scratch.is(second)) {
   1110     andl(scratch, first);
   1111   } else {
   1112     if (!scratch.is(first)) {
   1113       movl(scratch, first);
   1114     }
   1115     andl(scratch, second);
   1116   }
   1117   testb(scratch, Immediate(kSmiTagMask));
   1118   return zero;
   1119 }
   1120 
   1121 
   1122 Condition MacroAssembler::CheckIsMinSmi(Register src) {
   1123   ASSERT(!src.is(kScratchRegister));
   1124   // If we overflow by subtracting one, it's the minimal smi value.
   1125   cmpq(src, kSmiConstantRegister);
   1126   return overflow;
   1127 }
   1128 
   1129 
   1130 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
   1131   // A 32-bit integer value can always be converted to a smi.
   1132   return always;
   1133 }
   1134 
   1135 
   1136 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
   1137   // An unsigned 32-bit integer value is valid as long as the high bit
   1138   // is not set.
   1139   testl(src, src);
   1140   return positive;
   1141 }
   1142 
   1143 
   1144 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
   1145   if (dst.is(src)) {
   1146     andl(dst, Immediate(kSmiTagMask));
   1147   } else {
   1148     movl(dst, Immediate(kSmiTagMask));
   1149     andl(dst, src);
   1150   }
   1151 }
   1152 
   1153 
   1154 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
   1155   if (!(src.AddressUsesRegister(dst))) {
   1156     movl(dst, Immediate(kSmiTagMask));
   1157     andl(dst, src);
   1158   } else {
   1159     movl(dst, src);
   1160     andl(dst, Immediate(kSmiTagMask));
   1161   }
   1162 }
   1163 
   1164 
   1165 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
   1166   if (constant->value() == 0) {
   1167     if (!dst.is(src)) {
   1168       movq(dst, src);
   1169     }
   1170     return;
   1171   } else if (dst.is(src)) {
   1172     ASSERT(!dst.is(kScratchRegister));
   1173     switch (constant->value()) {
   1174       case 1:
   1175         addq(dst, kSmiConstantRegister);
   1176         return;
   1177       case 2:
   1178         lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
   1179         return;
   1180       case 4:
   1181         lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
   1182         return;
   1183       case 8:
   1184         lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
   1185         return;
   1186       default:
   1187         Register constant_reg = GetSmiConstant(constant);
   1188         addq(dst, constant_reg);
   1189         return;
   1190     }
   1191   } else {
   1192     switch (constant->value()) {
   1193       case 1:
   1194         lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
   1195         return;
   1196       case 2:
   1197         lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
   1198         return;
   1199       case 4:
   1200         lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
   1201         return;
   1202       case 8:
   1203         lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
   1204         return;
   1205       default:
   1206         LoadSmiConstant(dst, constant);
   1207         addq(dst, src);
   1208         return;
   1209     }
   1210   }
   1211 }
   1212 
   1213 
   1214 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
   1215   if (constant->value() != 0) {
   1216     addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
   1217   }
   1218 }
   1219 
   1220 
   1221 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
   1222   if (constant->value() == 0) {
   1223     if (!dst.is(src)) {
   1224       movq(dst, src);
   1225     }
   1226   } else if (dst.is(src)) {
   1227     ASSERT(!dst.is(kScratchRegister));
   1228     Register constant_reg = GetSmiConstant(constant);
   1229     subq(dst, constant_reg);
   1230   } else {
   1231     if (constant->value() == Smi::kMinValue) {
   1232       LoadSmiConstant(dst, constant);
   1233       // Adding and subtracting the min-value gives the same result, it only
   1234       // differs on the overflow bit, which we don't check here.
   1235       addq(dst, src);
   1236     } else {
   1237       // Subtract by adding the negation.
   1238       LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
   1239       addq(dst, src);
   1240     }
   1241   }
   1242 }
   1243 
   1244 
   1245 void MacroAssembler::SmiAdd(Register dst,
   1246                             Register src1,
   1247                             Register src2) {
   1248   // No overflow checking. Use only when it's known that
   1249   // overflowing is impossible.
   1250   ASSERT(!dst.is(src2));
   1251   if (!dst.is(src1)) {
   1252     movq(dst, src1);
   1253   }
   1254   addq(dst, src2);
   1255   Assert(no_overflow, "Smi addition overflow");
   1256 }
   1257 
   1258 
   1259 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
   1260   // No overflow checking. Use only when it's known that
   1261   // overflowing is impossible (e.g., subtracting two positive smis).
   1262   ASSERT(!dst.is(src2));
   1263   if (!dst.is(src1)) {
   1264     movq(dst, src1);
   1265   }
   1266   subq(dst, src2);
   1267   Assert(no_overflow, "Smi subtraction overflow");
   1268 }
   1269 
   1270 
   1271 void MacroAssembler::SmiSub(Register dst,
   1272                             Register src1,
   1273                             const Operand& src2) {
   1274   // No overflow checking. Use only when it's known that
   1275   // overflowing is impossible (e.g., subtracting two positive smis).
   1276   if (!dst.is(src1)) {
   1277     movq(dst, src1);
   1278   }
   1279   subq(dst, src2);
   1280   Assert(no_overflow, "Smi subtraction overflow");
   1281 }
   1282 
   1283 
   1284 void MacroAssembler::SmiNot(Register dst, Register src) {
   1285   ASSERT(!dst.is(kScratchRegister));
   1286   ASSERT(!src.is(kScratchRegister));
   1287   // Set tag and padding bits before negating, so that they are zero afterwards.
   1288   movl(kScratchRegister, Immediate(~0));
   1289   if (dst.is(src)) {
   1290     xor_(dst, kScratchRegister);
   1291   } else {
   1292     lea(dst, Operand(src, kScratchRegister, times_1, 0));
   1293   }
   1294   not_(dst);
   1295 }
   1296 
   1297 
   1298 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
   1299   ASSERT(!dst.is(src2));
   1300   if (!dst.is(src1)) {
   1301     movq(dst, src1);
   1302   }
   1303   and_(dst, src2);
   1304 }
   1305 
   1306 
   1307 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
   1308   if (constant->value() == 0) {
   1309     Set(dst, 0);
   1310   } else if (dst.is(src)) {
   1311     ASSERT(!dst.is(kScratchRegister));
   1312     Register constant_reg = GetSmiConstant(constant);
   1313     and_(dst, constant_reg);
   1314   } else {
   1315     LoadSmiConstant(dst, constant);
   1316     and_(dst, src);
   1317   }
   1318 }
   1319 
   1320 
   1321 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
   1322   if (!dst.is(src1)) {
   1323     ASSERT(!src1.is(src2));
   1324     movq(dst, src1);
   1325   }
   1326   or_(dst, src2);
   1327 }
   1328 
   1329 
   1330 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
   1331   if (dst.is(src)) {
   1332     ASSERT(!dst.is(kScratchRegister));
   1333     Register constant_reg = GetSmiConstant(constant);
   1334     or_(dst, constant_reg);
   1335   } else {
   1336     LoadSmiConstant(dst, constant);
   1337     or_(dst, src);
   1338   }
   1339 }
   1340 
   1341 
   1342 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
   1343   if (!dst.is(src1)) {
   1344     ASSERT(!src1.is(src2));
   1345     movq(dst, src1);
   1346   }
   1347   xor_(dst, src2);
   1348 }
   1349 
   1350 
   1351 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
   1352   if (dst.is(src)) {
   1353     ASSERT(!dst.is(kScratchRegister));
   1354     Register constant_reg = GetSmiConstant(constant);
   1355     xor_(dst, constant_reg);
   1356   } else {
   1357     LoadSmiConstant(dst, constant);
   1358     xor_(dst, src);
   1359   }
   1360 }
   1361 
   1362 
   1363 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
   1364                                                      Register src,
   1365                                                      int shift_value) {
   1366   ASSERT(is_uint5(shift_value));
   1367   if (shift_value > 0) {
   1368     if (dst.is(src)) {
   1369       sar(dst, Immediate(shift_value + kSmiShift));
   1370       shl(dst, Immediate(kSmiShift));
   1371     } else {
   1372       UNIMPLEMENTED();  // Not used.
   1373     }
   1374   }
   1375 }
   1376 
   1377 
   1378 void MacroAssembler::SmiShiftLeftConstant(Register dst,
   1379                                           Register src,
   1380                                           int shift_value) {
   1381   if (!dst.is(src)) {
   1382     movq(dst, src);
   1383   }
   1384   if (shift_value > 0) {
   1385     shl(dst, Immediate(shift_value));
   1386   }
   1387 }
   1388 
   1389 
   1390 void MacroAssembler::SmiShiftLeft(Register dst,
   1391                                   Register src1,
   1392                                   Register src2) {
   1393   ASSERT(!dst.is(rcx));
   1394   NearLabel result_ok;
   1395   // Untag shift amount.
   1396   if (!dst.is(src1)) {
   1397     movq(dst, src1);
   1398   }
   1399   SmiToInteger32(rcx, src2);
   1400   // Shift amount specified by lower 5 bits, not six as the shl opcode.
   1401   and_(rcx, Immediate(0x1f));
   1402   shl_cl(dst);
   1403 }
   1404 
   1405 
   1406 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
   1407                                              Register src1,
   1408                                              Register src2) {
   1409   ASSERT(!dst.is(kScratchRegister));
   1410   ASSERT(!src1.is(kScratchRegister));
   1411   ASSERT(!src2.is(kScratchRegister));
   1412   ASSERT(!dst.is(rcx));
   1413   if (src1.is(rcx)) {
   1414     movq(kScratchRegister, src1);
   1415   } else if (src2.is(rcx)) {
   1416     movq(kScratchRegister, src2);
   1417   }
   1418   if (!dst.is(src1)) {
   1419     movq(dst, src1);
   1420   }
   1421   SmiToInteger32(rcx, src2);
   1422   orl(rcx, Immediate(kSmiShift));
   1423   sar_cl(dst);  // Shift 32 + original rcx & 0x1f.
   1424   shl(dst, Immediate(kSmiShift));
   1425   if (src1.is(rcx)) {
   1426     movq(src1, kScratchRegister);
   1427   } else if (src2.is(rcx)) {
   1428     movq(src2, kScratchRegister);
   1429   }
   1430 }
   1431 
   1432 
   1433 SmiIndex MacroAssembler::SmiToIndex(Register dst,
   1434                                     Register src,
   1435                                     int shift) {
   1436   ASSERT(is_uint6(shift));
   1437   // There is a possible optimization if shift is in the range 60-63, but that
   1438   // will (and must) never happen.
   1439   if (!dst.is(src)) {
   1440     movq(dst, src);
   1441   }
   1442   if (shift < kSmiShift) {
   1443     sar(dst, Immediate(kSmiShift - shift));
   1444   } else {
   1445     shl(dst, Immediate(shift - kSmiShift));
   1446   }
   1447   return SmiIndex(dst, times_1);
   1448 }
   1449 
   1450 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
   1451                                             Register src,
   1452                                             int shift) {
   1453   // Register src holds a positive smi.
   1454   ASSERT(is_uint6(shift));
   1455   if (!dst.is(src)) {
   1456     movq(dst, src);
   1457   }
   1458   neg(dst);
   1459   if (shift < kSmiShift) {
   1460     sar(dst, Immediate(kSmiShift - shift));
   1461   } else {
   1462     shl(dst, Immediate(shift - kSmiShift));
   1463   }
   1464   return SmiIndex(dst, times_1);
   1465 }
   1466 
   1467 
   1468 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
   1469   ASSERT_EQ(0, kSmiShift % kBitsPerByte);
   1470   addl(dst, Operand(src, kSmiShift / kBitsPerByte));
   1471 }
   1472 
   1473 
   1474 
   1475 void MacroAssembler::Move(Register dst, Register src) {
   1476   if (!dst.is(src)) {
   1477     movq(dst, src);
   1478   }
   1479 }
   1480 
   1481 
   1482 void MacroAssembler::Move(Register dst, Handle<Object> source) {
   1483   ASSERT(!source->IsFailure());
   1484   if (source->IsSmi()) {
   1485     Move(dst, Smi::cast(*source));
   1486   } else {
   1487     movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
   1488   }
   1489 }
   1490 
   1491 
   1492 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
   1493   ASSERT(!source->IsFailure());
   1494   if (source->IsSmi()) {
   1495     Move(dst, Smi::cast(*source));
   1496   } else {
   1497     movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
   1498     movq(dst, kScratchRegister);
   1499   }
   1500 }
   1501 
   1502 
   1503 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
   1504   if (source->IsSmi()) {
   1505     Cmp(dst, Smi::cast(*source));
   1506   } else {
   1507     Move(kScratchRegister, source);
   1508     cmpq(dst, kScratchRegister);
   1509   }
   1510 }
   1511 
   1512 
   1513 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
   1514   if (source->IsSmi()) {
   1515     Cmp(dst, Smi::cast(*source));
   1516   } else {
   1517     ASSERT(source->IsHeapObject());
   1518     movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
   1519     cmpq(dst, kScratchRegister);
   1520   }
   1521 }
   1522 
   1523 
   1524 void MacroAssembler::Push(Handle<Object> source) {
   1525   if (source->IsSmi()) {
   1526     Push(Smi::cast(*source));
   1527   } else {
   1528     ASSERT(source->IsHeapObject());
   1529     movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
   1530     push(kScratchRegister);
   1531   }
   1532 }
   1533 
   1534 
   1535 void MacroAssembler::Push(Smi* source) {
   1536   intptr_t smi = reinterpret_cast<intptr_t>(source);
   1537   if (is_int32(smi)) {
   1538     push(Immediate(static_cast<int32_t>(smi)));
   1539   } else {
   1540     Register constant = GetSmiConstant(source);
   1541     push(constant);
   1542   }
   1543 }
   1544 
   1545 
   1546 void MacroAssembler::Drop(int stack_elements) {
   1547   if (stack_elements > 0) {
   1548     addq(rsp, Immediate(stack_elements * kPointerSize));
   1549   }
   1550 }
   1551 
   1552 
   1553 void MacroAssembler::Test(const Operand& src, Smi* source) {
   1554   testl(Operand(src, kIntSize), Immediate(source->value()));
   1555 }
   1556 
   1557 
   1558 void MacroAssembler::Jump(ExternalReference ext) {
   1559   LoadAddress(kScratchRegister, ext);
   1560   jmp(kScratchRegister);
   1561 }
   1562 
   1563 
   1564 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
   1565   movq(kScratchRegister, destination, rmode);
   1566   jmp(kScratchRegister);
   1567 }
   1568 
   1569 
   1570 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
   1571   // TODO(X64): Inline this
   1572   jmp(code_object, rmode);
   1573 }
   1574 
   1575 
   1576 int MacroAssembler::CallSize(ExternalReference ext) {
   1577   // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
   1578   const int kCallInstructionSize = 3;
   1579   return LoadAddressSize(ext) + kCallInstructionSize;
   1580 }
   1581 
   1582 
   1583 void MacroAssembler::Call(ExternalReference ext) {
   1584 #ifdef DEBUG
   1585   int end_position = pc_offset() + CallSize(ext);
   1586 #endif
   1587   LoadAddress(kScratchRegister, ext);
   1588   call(kScratchRegister);
   1589 #ifdef DEBUG
   1590   CHECK_EQ(end_position, pc_offset());
   1591 #endif
   1592 }
   1593 
   1594 
   1595 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
   1596 #ifdef DEBUG
   1597   int end_position = pc_offset() + CallSize(destination, rmode);
   1598 #endif
   1599   movq(kScratchRegister, destination, rmode);
   1600   call(kScratchRegister);
   1601 #ifdef DEBUG
   1602   CHECK_EQ(pc_offset(), end_position);
   1603 #endif
   1604 }
   1605 
   1606 
   1607 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
   1608 #ifdef DEBUG
   1609   int end_position = pc_offset() + CallSize(code_object);
   1610 #endif
   1611   ASSERT(RelocInfo::IsCodeTarget(rmode));
   1612   call(code_object, rmode);
   1613 #ifdef DEBUG
   1614   CHECK_EQ(end_position, pc_offset());
   1615 #endif
   1616 }
   1617 
   1618 
   1619 void MacroAssembler::Pushad() {
   1620   push(rax);
   1621   push(rcx);
   1622   push(rdx);
   1623   push(rbx);
   1624   // Not pushing rsp or rbp.
   1625   push(rsi);
   1626   push(rdi);
   1627   push(r8);
   1628   push(r9);
   1629   // r10 is kScratchRegister.
   1630   push(r11);
   1631   // r12 is kSmiConstantRegister.
   1632   // r13 is kRootRegister.
   1633   push(r14);
   1634   push(r15);
   1635   STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
   1636   // Use lea for symmetry with Popad.
   1637   int sp_delta =
   1638       (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
   1639   lea(rsp, Operand(rsp, -sp_delta));
   1640 }
   1641 
   1642 
   1643 void MacroAssembler::Popad() {
   1644   // Popad must not change the flags, so use lea instead of addq.
   1645   int sp_delta =
   1646       (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
   1647   lea(rsp, Operand(rsp, sp_delta));
   1648   pop(r15);
   1649   pop(r14);
   1650   pop(r11);
   1651   pop(r9);
   1652   pop(r8);
   1653   pop(rdi);
   1654   pop(rsi);
   1655   pop(rbx);
   1656   pop(rdx);
   1657   pop(rcx);
   1658   pop(rax);
   1659 }
   1660 
   1661 
   1662 void MacroAssembler::Dropad() {
   1663   addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
   1664 }
   1665 
   1666 
   1667 // Order general registers are pushed by Pushad:
   1668 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
   1669 int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
   1670     0,
   1671     1,
   1672     2,
   1673     3,
   1674     -1,
   1675     -1,
   1676     4,
   1677     5,
   1678     6,
   1679     7,
   1680     -1,
   1681     8,
   1682     -1,
   1683     -1,
   1684     9,
   1685     10
   1686 };
   1687 
   1688 
   1689 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   1690   movq(SafepointRegisterSlot(dst), src);
   1691 }
   1692 
   1693 
   1694 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   1695   movq(dst, SafepointRegisterSlot(src));
   1696 }
   1697 
   1698 
   1699 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   1700   return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   1701 }
   1702 
   1703 
   1704 void MacroAssembler::PushTryHandler(CodeLocation try_location,
   1705                                     HandlerType type) {
   1706   // Adjust this code if not the case.
   1707   ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
   1708 
   1709   // The pc (return address) is already on TOS.  This code pushes state,
   1710   // frame pointer and current handler.  Check that they are expected
   1711   // next on the stack, in that order.
   1712   ASSERT_EQ(StackHandlerConstants::kStateOffset,
   1713             StackHandlerConstants::kPCOffset - kPointerSize);
   1714   ASSERT_EQ(StackHandlerConstants::kFPOffset,
   1715             StackHandlerConstants::kStateOffset - kPointerSize);
   1716   ASSERT_EQ(StackHandlerConstants::kNextOffset,
   1717             StackHandlerConstants::kFPOffset - kPointerSize);
   1718 
   1719   if (try_location == IN_JAVASCRIPT) {
   1720     if (type == TRY_CATCH_HANDLER) {
   1721       push(Immediate(StackHandler::TRY_CATCH));
   1722     } else {
   1723       push(Immediate(StackHandler::TRY_FINALLY));
   1724     }
   1725     push(rbp);
   1726   } else {
   1727     ASSERT(try_location == IN_JS_ENTRY);
   1728     // The frame pointer does not point to a JS frame so we save NULL
   1729     // for rbp. We expect the code throwing an exception to check rbp
   1730     // before dereferencing it to restore the context.
   1731     push(Immediate(StackHandler::ENTRY));
   1732     push(Immediate(0));  // NULL frame pointer.
   1733   }
   1734   // Save the current handler.
   1735   Operand handler_operand =
   1736       ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
   1737   push(handler_operand);
   1738   // Link this handler.
   1739   movq(handler_operand, rsp);
   1740 }
   1741 
   1742 
   1743 void MacroAssembler::PopTryHandler() {
   1744   ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
   1745   // Unlink this handler.
   1746   Operand handler_operand =
   1747       ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
   1748   pop(handler_operand);
   1749   // Remove the remaining fields.
   1750   addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
   1751 }
   1752 
   1753 
   1754 void MacroAssembler::Throw(Register value) {
   1755   // Check that stack should contain next handler, frame pointer, state and
   1756   // return address in that order.
   1757   STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
   1758             StackHandlerConstants::kStateOffset);
   1759   STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
   1760             StackHandlerConstants::kPCOffset);
   1761   // Keep thrown value in rax.
   1762   if (!value.is(rax)) {
   1763     movq(rax, value);
   1764   }
   1765 
   1766   ExternalReference handler_address(Isolate::k_handler_address, isolate());
   1767   Operand handler_operand = ExternalOperand(handler_address);
   1768   movq(rsp, handler_operand);
   1769   // get next in chain
   1770   pop(handler_operand);
   1771   pop(rbp);  // pop frame pointer
   1772   pop(rdx);  // remove state
   1773 
   1774   // Before returning we restore the context from the frame pointer if not NULL.
   1775   // The frame pointer is NULL in the exception handler of a JS entry frame.
   1776   Set(rsi, 0);  // Tentatively set context pointer to NULL
   1777   NearLabel skip;
   1778   cmpq(rbp, Immediate(0));
   1779   j(equal, &skip);
   1780   movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   1781   bind(&skip);
   1782   ret(0);
   1783 }
   1784 
   1785 
   1786 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
   1787                                       Register value) {
   1788   // Keep thrown value in rax.
   1789   if (!value.is(rax)) {
   1790     movq(rax, value);
   1791   }
   1792   // Fetch top stack handler.
   1793   ExternalReference handler_address(Isolate::k_handler_address, isolate());
   1794   Load(rsp, handler_address);
   1795 
   1796   // Unwind the handlers until the ENTRY handler is found.
   1797   NearLabel loop, done;
   1798   bind(&loop);
   1799   // Load the type of the current stack handler.
   1800   const int kStateOffset = StackHandlerConstants::kStateOffset;
   1801   cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
   1802   j(equal, &done);
   1803   // Fetch the next handler in the list.
   1804   const int kNextOffset = StackHandlerConstants::kNextOffset;
   1805   movq(rsp, Operand(rsp, kNextOffset));
   1806   jmp(&loop);
   1807   bind(&done);
   1808 
   1809   // Set the top handler address to next handler past the current ENTRY handler.
   1810   Operand handler_operand = ExternalOperand(handler_address);
   1811   pop(handler_operand);
   1812 
   1813   if (type == OUT_OF_MEMORY) {
   1814     // Set external caught exception to false.
   1815     ExternalReference external_caught(
   1816         Isolate::k_external_caught_exception_address, isolate());
   1817     Set(rax, static_cast<int64_t>(false));
   1818     Store(external_caught, rax);
   1819 
   1820     // Set pending exception and rax to out of memory exception.
   1821     ExternalReference pending_exception(Isolate::k_pending_exception_address,
   1822                                         isolate());
   1823     movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
   1824     Store(pending_exception, rax);
   1825   }
   1826 
   1827   // Clear the context pointer.
   1828   Set(rsi, 0);
   1829 
   1830   // Restore registers from handler.
   1831   STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
   1832                 StackHandlerConstants::kFPOffset);
   1833   pop(rbp);  // FP
   1834   STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
   1835                 StackHandlerConstants::kStateOffset);
   1836   pop(rdx);  // State
   1837 
   1838   STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
   1839                 StackHandlerConstants::kPCOffset);
   1840   ret(0);
   1841 }
   1842 
   1843 
   1844 void MacroAssembler::Ret() {
   1845   ret(0);
   1846 }
   1847 
   1848 
   1849 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   1850   if (is_uint16(bytes_dropped)) {
   1851     ret(bytes_dropped);
   1852   } else {
   1853     pop(scratch);
   1854     addq(rsp, Immediate(bytes_dropped));
   1855     push(scratch);
   1856     ret(0);
   1857   }
   1858 }
   1859 
   1860 
   1861 void MacroAssembler::FCmp() {
   1862   fucomip();
   1863   fstp(0);
   1864 }
   1865 
   1866 
   1867 void MacroAssembler::CmpObjectType(Register heap_object,
   1868                                    InstanceType type,
   1869                                    Register map) {
   1870   movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
   1871   CmpInstanceType(map, type);
   1872 }
   1873 
   1874 
   1875 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
   1876   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
   1877        Immediate(static_cast<int8_t>(type)));
   1878 }
   1879 
   1880 
   1881 void MacroAssembler::CheckMap(Register obj,
   1882                               Handle<Map> map,
   1883                               Label* fail,
   1884                               bool is_heap_object) {
   1885   if (!is_heap_object) {
   1886     JumpIfSmi(obj, fail);
   1887   }
   1888   Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
   1889   j(not_equal, fail);
   1890 }
   1891 
   1892 
   1893 void MacroAssembler::AbortIfNotNumber(Register object) {
   1894   NearLabel ok;
   1895   Condition is_smi = CheckSmi(object);
   1896   j(is_smi, &ok);
   1897   Cmp(FieldOperand(object, HeapObject::kMapOffset),
   1898       FACTORY->heap_number_map());
   1899   Assert(equal, "Operand not a number");
   1900   bind(&ok);
   1901 }
   1902 
   1903 
   1904 void MacroAssembler::AbortIfSmi(Register object) {
   1905   NearLabel ok;
   1906   Condition is_smi = CheckSmi(object);
   1907   Assert(NegateCondition(is_smi), "Operand is a smi");
   1908 }
   1909 
   1910 
   1911 void MacroAssembler::AbortIfNotSmi(Register object) {
   1912   Condition is_smi = CheckSmi(object);
   1913   Assert(is_smi, "Operand is not a smi");
   1914 }
   1915 
   1916 
   1917 void MacroAssembler::AbortIfNotSmi(const Operand& object) {
   1918   Condition is_smi = CheckSmi(object);
   1919   Assert(is_smi, "Operand is not a smi");
   1920 }
   1921 
   1922 
   1923 void MacroAssembler::AbortIfNotString(Register object) {
   1924   testb(object, Immediate(kSmiTagMask));
   1925   Assert(not_equal, "Operand is not a string");
   1926   push(object);
   1927   movq(object, FieldOperand(object, HeapObject::kMapOffset));
   1928   CmpInstanceType(object, FIRST_NONSTRING_TYPE);
   1929   pop(object);
   1930   Assert(below, "Operand is not a string");
   1931 }
   1932 
   1933 
   1934 void MacroAssembler::AbortIfNotRootValue(Register src,
   1935                                          Heap::RootListIndex root_value_index,
   1936                                          const char* message) {
   1937   ASSERT(!src.is(kScratchRegister));
   1938   LoadRoot(kScratchRegister, root_value_index);
   1939   cmpq(src, kScratchRegister);
   1940   Check(equal, message);
   1941 }
   1942 
   1943 
   1944 
   1945 Condition MacroAssembler::IsObjectStringType(Register heap_object,
   1946                                              Register map,
   1947                                              Register instance_type) {
   1948   movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
   1949   movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
   1950   ASSERT(kNotStringTag != 0);
   1951   testb(instance_type, Immediate(kIsNotStringMask));
   1952   return zero;
   1953 }
   1954 
   1955 
   1956 void MacroAssembler::TryGetFunctionPrototype(Register function,
   1957                                              Register result,
   1958                                              Label* miss) {
   1959   // Check that the receiver isn't a smi.
   1960   testl(function, Immediate(kSmiTagMask));
   1961   j(zero, miss);
   1962 
   1963   // Check that the function really is a function.
   1964   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   1965   j(not_equal, miss);
   1966 
   1967   // Make sure that the function has an instance prototype.
   1968   NearLabel non_instance;
   1969   testb(FieldOperand(result, Map::kBitFieldOffset),
   1970         Immediate(1 << Map::kHasNonInstancePrototype));
   1971   j(not_zero, &non_instance);
   1972 
   1973   // Get the prototype or initial map from the function.
   1974   movq(result,
   1975        FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1976 
   1977   // If the prototype or initial map is the hole, don't return it and
   1978   // simply miss the cache instead. This will allow us to allocate a
   1979   // prototype object on-demand in the runtime system.
   1980   CompareRoot(result, Heap::kTheHoleValueRootIndex);
   1981   j(equal, miss);
   1982 
   1983   // If the function does not have an initial map, we're done.
   1984   NearLabel done;
   1985   CmpObjectType(result, MAP_TYPE, kScratchRegister);
   1986   j(not_equal, &done);
   1987 
   1988   // Get the prototype from the initial map.
   1989   movq(result, FieldOperand(result, Map::kPrototypeOffset));
   1990   jmp(&done);
   1991 
   1992   // Non-instance prototype: Fetch prototype from constructor field
   1993   // in initial map.
   1994   bind(&non_instance);
   1995   movq(result, FieldOperand(result, Map::kConstructorOffset));
   1996 
   1997   // All done.
   1998   bind(&done);
   1999 }
   2000 
   2001 
   2002 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2003   if (FLAG_native_code_counters && counter->Enabled()) {
   2004     Operand counter_operand = ExternalOperand(ExternalReference(counter));
   2005     movl(counter_operand, Immediate(value));
   2006   }
   2007 }
   2008 
   2009 
   2010 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2011   ASSERT(value > 0);
   2012   if (FLAG_native_code_counters && counter->Enabled()) {
   2013     Operand counter_operand = ExternalOperand(ExternalReference(counter));
   2014     if (value == 1) {
   2015       incl(counter_operand);
   2016     } else {
   2017       addl(counter_operand, Immediate(value));
   2018     }
   2019   }
   2020 }
   2021 
   2022 
   2023 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2024   ASSERT(value > 0);
   2025   if (FLAG_native_code_counters && counter->Enabled()) {
   2026     Operand counter_operand = ExternalOperand(ExternalReference(counter));
   2027     if (value == 1) {
   2028       decl(counter_operand);
   2029     } else {
   2030       subl(counter_operand, Immediate(value));
   2031     }
   2032   }
   2033 }
   2034 
   2035 
   2036 #ifdef ENABLE_DEBUGGER_SUPPORT
   2037 void MacroAssembler::DebugBreak() {
   2038   ASSERT(allow_stub_calls());
   2039   Set(rax, 0);  // No arguments.
   2040   LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
   2041   CEntryStub ces(1);
   2042   Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
   2043 }
   2044 #endif  // ENABLE_DEBUGGER_SUPPORT
   2045 
   2046 
   2047 void MacroAssembler::InvokeCode(Register code,
   2048                                 const ParameterCount& expected,
   2049                                 const ParameterCount& actual,
   2050                                 InvokeFlag flag,
   2051                                 CallWrapper* call_wrapper) {
   2052   NearLabel done;
   2053   InvokePrologue(expected,
   2054                  actual,
   2055                  Handle<Code>::null(),
   2056                  code,
   2057                  &done,
   2058                  flag,
   2059                  call_wrapper);
   2060   if (flag == CALL_FUNCTION) {
   2061     if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
   2062     call(code);
   2063     if (call_wrapper != NULL) call_wrapper->AfterCall();
   2064   } else {
   2065     ASSERT(flag == JUMP_FUNCTION);
   2066     jmp(code);
   2067   }
   2068   bind(&done);
   2069 }
   2070 
   2071 
   2072 void MacroAssembler::InvokeCode(Handle<Code> code,
   2073                                 const ParameterCount& expected,
   2074                                 const ParameterCount& actual,
   2075                                 RelocInfo::Mode rmode,
   2076                                 InvokeFlag flag,
   2077                                 CallWrapper* call_wrapper) {
   2078   NearLabel done;
   2079   Register dummy = rax;
   2080   InvokePrologue(expected,
   2081                  actual,
   2082                  code,
   2083                  dummy,
   2084                  &done,
   2085                  flag,
   2086                  call_wrapper);
   2087   if (flag == CALL_FUNCTION) {
   2088     if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
   2089     Call(code, rmode);
   2090     if (call_wrapper != NULL) call_wrapper->AfterCall();
   2091   } else {
   2092     ASSERT(flag == JUMP_FUNCTION);
   2093     Jump(code, rmode);
   2094   }
   2095   bind(&done);
   2096 }
   2097 
   2098 
   2099 void MacroAssembler::InvokeFunction(Register function,
   2100                                     const ParameterCount& actual,
   2101                                     InvokeFlag flag,
   2102                                     CallWrapper* call_wrapper) {
   2103   ASSERT(function.is(rdi));
   2104   movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
   2105   movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
   2106   movsxlq(rbx,
   2107           FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
   2108   // Advances rdx to the end of the Code object header, to the start of
   2109   // the executable code.
   2110   movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2111 
   2112   ParameterCount expected(rbx);
   2113   InvokeCode(rdx, expected, actual, flag, call_wrapper);
   2114 }
   2115 
   2116 
   2117 void MacroAssembler::InvokeFunction(JSFunction* function,
   2118                                     const ParameterCount& actual,
   2119                                     InvokeFlag flag,
   2120                                     CallWrapper* call_wrapper) {
   2121   ASSERT(function->is_compiled());
   2122   // Get the function and setup the context.
   2123   Move(rdi, Handle<JSFunction>(function));
   2124   movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   2125 
   2126   if (V8::UseCrankshaft()) {
   2127     // Since Crankshaft can recompile a function, we need to load
   2128     // the Code object every time we call the function.
   2129     movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2130     ParameterCount expected(function->shared()->formal_parameter_count());
   2131     InvokeCode(rdx, expected, actual, flag, call_wrapper);
   2132   } else {
   2133     // Invoke the cached code.
   2134     Handle<Code> code(function->code());
   2135     ParameterCount expected(function->shared()->formal_parameter_count());
   2136     InvokeCode(code,
   2137                expected,
   2138                actual,
   2139                RelocInfo::CODE_TARGET,
   2140                flag,
   2141                call_wrapper);
   2142   }
   2143 }
   2144 
   2145 
   2146 void MacroAssembler::EnterFrame(StackFrame::Type type) {
   2147   push(rbp);
   2148   movq(rbp, rsp);
   2149   push(rsi);  // Context.
   2150   Push(Smi::FromInt(type));
   2151   movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
   2152   push(kScratchRegister);
   2153   if (emit_debug_code()) {
   2154     movq(kScratchRegister,
   2155          FACTORY->undefined_value(),
   2156          RelocInfo::EMBEDDED_OBJECT);
   2157     cmpq(Operand(rsp, 0), kScratchRegister);
   2158     Check(not_equal, "code object not properly patched");
   2159   }
   2160 }
   2161 
   2162 
   2163 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
   2164   if (emit_debug_code()) {
   2165     Move(kScratchRegister, Smi::FromInt(type));
   2166     cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
   2167     Check(equal, "stack frame types must match");
   2168   }
   2169   movq(rsp, rbp);
   2170   pop(rbp);
   2171 }
   2172 
   2173 
   2174 void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
   2175   // Setup the frame structure on the stack.
   2176   // All constants are relative to the frame pointer of the exit frame.
   2177   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
   2178   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
   2179   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
   2180   push(rbp);
   2181   movq(rbp, rsp);
   2182 
   2183   // Reserve room for entry stack pointer and push the code object.
   2184   ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
   2185   push(Immediate(0));  // Saved entry sp, patched before call.
   2186   movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
   2187   push(kScratchRegister);  // Accessed from EditFrame::code_slot.
   2188 
   2189   // Save the frame pointer and the context in top.
   2190   if (save_rax) {
   2191     movq(r14, rax);  // Backup rax in callee-save register.
   2192   }
   2193 
   2194   Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
   2195   Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
   2196 }
   2197 
   2198 
   2199 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
   2200                                             bool save_doubles) {
   2201 #ifdef _WIN64
   2202   const int kShadowSpace = 4;
   2203   arg_stack_space += kShadowSpace;
   2204 #endif
   2205   // Optionally save all XMM registers.
   2206   if (save_doubles) {
   2207     int space = XMMRegister::kNumRegisters * kDoubleSize +
   2208         arg_stack_space * kPointerSize;
   2209     subq(rsp, Immediate(space));
   2210     int offset = -2 * kPointerSize;
   2211     for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
   2212       XMMRegister reg = XMMRegister::FromAllocationIndex(i);
   2213       movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
   2214     }
   2215   } else if (arg_stack_space > 0) {
   2216     subq(rsp, Immediate(arg_stack_space * kPointerSize));
   2217   }
   2218 
   2219   // Get the required frame alignment for the OS.
   2220   const int kFrameAlignment = OS::ActivationFrameAlignment();
   2221   if (kFrameAlignment > 0) {
   2222     ASSERT(IsPowerOf2(kFrameAlignment));
   2223     ASSERT(is_int8(kFrameAlignment));
   2224     and_(rsp, Immediate(-kFrameAlignment));
   2225   }
   2226 
   2227   // Patch the saved entry sp.
   2228   movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
   2229 }
   2230 
   2231 
   2232 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
   2233   EnterExitFramePrologue(true);
   2234 
   2235   // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
   2236   // so it must be retained across the C-call.
   2237   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
   2238   lea(r15, Operand(rbp, r14, times_pointer_size, offset));
   2239 
   2240   EnterExitFrameEpilogue(arg_stack_space, save_doubles);
   2241 }
   2242 
   2243 
   2244 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
   2245   EnterExitFramePrologue(false);
   2246   EnterExitFrameEpilogue(arg_stack_space, false);
   2247 }
   2248 
   2249 
   2250 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
   2251   // Registers:
   2252   // r15 : argv
   2253   if (save_doubles) {
   2254     int offset = -2 * kPointerSize;
   2255     for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
   2256       XMMRegister reg = XMMRegister::FromAllocationIndex(i);
   2257       movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
   2258     }
   2259   }
   2260   // Get the return address from the stack and restore the frame pointer.
   2261   movq(rcx, Operand(rbp, 1 * kPointerSize));
   2262   movq(rbp, Operand(rbp, 0 * kPointerSize));
   2263 
   2264   // Drop everything up to and including the arguments and the receiver
   2265   // from the caller stack.
   2266   lea(rsp, Operand(r15, 1 * kPointerSize));
   2267 
   2268   // Push the return address to get ready to return.
   2269   push(rcx);
   2270 
   2271   LeaveExitFrameEpilogue();
   2272 }
   2273 
   2274 
   2275 void MacroAssembler::LeaveApiExitFrame() {
   2276   movq(rsp, rbp);
   2277   pop(rbp);
   2278 
   2279   LeaveExitFrameEpilogue();
   2280 }
   2281 
   2282 
   2283 void MacroAssembler::LeaveExitFrameEpilogue() {
   2284   // Restore current context from top and clear it in debug mode.
   2285   ExternalReference context_address(Isolate::k_context_address, isolate());
   2286   Operand context_operand = ExternalOperand(context_address);
   2287   movq(rsi, context_operand);
   2288 #ifdef DEBUG
   2289   movq(context_operand, Immediate(0));
   2290 #endif
   2291 
   2292   // Clear the top frame.
   2293   ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
   2294                                        isolate());
   2295   Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
   2296   movq(c_entry_fp_operand, Immediate(0));
   2297 }
   2298 
   2299 
   2300 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
   2301                                             Register scratch,
   2302                                             Label* miss) {
   2303   Label same_contexts;
   2304 
   2305   ASSERT(!holder_reg.is(scratch));
   2306   ASSERT(!scratch.is(kScratchRegister));
   2307   // Load current lexical context from the stack frame.
   2308   movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
   2309 
   2310   // When generating debug code, make sure the lexical context is set.
   2311   if (emit_debug_code()) {
   2312     cmpq(scratch, Immediate(0));
   2313     Check(not_equal, "we should not have an empty lexical context");
   2314   }
   2315   // Load the global context of the current context.
   2316   int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
   2317   movq(scratch, FieldOperand(scratch, offset));
   2318   movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
   2319 
   2320   // Check the context is a global context.
   2321   if (emit_debug_code()) {
   2322     Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
   2323         FACTORY->global_context_map());
   2324     Check(equal, "JSGlobalObject::global_context should be a global context.");
   2325   }
   2326 
   2327   // Check if both contexts are the same.
   2328   cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
   2329   j(equal, &same_contexts);
   2330 
   2331   // Compare security tokens.
   2332   // Check that the security token in the calling global object is
   2333   // compatible with the security token in the receiving global
   2334   // object.
   2335 
   2336   // Check the context is a global context.
   2337   if (emit_debug_code()) {
   2338     // Preserve original value of holder_reg.
   2339     push(holder_reg);
   2340     movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
   2341     CompareRoot(holder_reg, Heap::kNullValueRootIndex);
   2342     Check(not_equal, "JSGlobalProxy::context() should not be null.");
   2343 
   2344     // Read the first word and compare to global_context_map(),
   2345     movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
   2346     CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
   2347     Check(equal, "JSGlobalObject::global_context should be a global context.");
   2348     pop(holder_reg);
   2349   }
   2350 
   2351   movq(kScratchRegister,
   2352        FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
   2353   int token_offset =
   2354       Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
   2355   movq(scratch, FieldOperand(scratch, token_offset));
   2356   cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
   2357   j(not_equal, miss);
   2358 
   2359   bind(&same_contexts);
   2360 }
   2361 
   2362 
   2363 void MacroAssembler::LoadAllocationTopHelper(Register result,
   2364                                              Register scratch,
   2365                                              AllocationFlags flags) {
   2366   ExternalReference new_space_allocation_top =
   2367       ExternalReference::new_space_allocation_top_address(isolate());
   2368 
   2369   // Just return if allocation top is already known.
   2370   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   2371     // No use of scratch if allocation top is provided.
   2372     ASSERT(!scratch.is_valid());
   2373 #ifdef DEBUG
   2374     // Assert that result actually contains top on entry.
   2375     Operand top_operand = ExternalOperand(new_space_allocation_top);
   2376     cmpq(result, top_operand);
   2377     Check(equal, "Unexpected allocation top");
   2378 #endif
   2379     return;
   2380   }
   2381 
   2382   // Move address of new object to result. Use scratch register if available,
   2383   // and keep address in scratch until call to UpdateAllocationTopHelper.
   2384   if (scratch.is_valid()) {
   2385     LoadAddress(scratch, new_space_allocation_top);
   2386     movq(result, Operand(scratch, 0));
   2387   } else {
   2388     Load(result, new_space_allocation_top);
   2389   }
   2390 }
   2391 
   2392 
   2393 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   2394                                                Register scratch) {
   2395   if (emit_debug_code()) {
   2396     testq(result_end, Immediate(kObjectAlignmentMask));
   2397     Check(zero, "Unaligned allocation in new space");
   2398   }
   2399 
   2400   ExternalReference new_space_allocation_top =
   2401       ExternalReference::new_space_allocation_top_address(isolate());
   2402 
   2403   // Update new top.
   2404   if (scratch.is_valid()) {
   2405     // Scratch already contains address of allocation top.
   2406     movq(Operand(scratch, 0), result_end);
   2407   } else {
   2408     Store(new_space_allocation_top, result_end);
   2409   }
   2410 }
   2411 
   2412 
   2413 void MacroAssembler::AllocateInNewSpace(int object_size,
   2414                                         Register result,
   2415                                         Register result_end,
   2416                                         Register scratch,
   2417                                         Label* gc_required,
   2418                                         AllocationFlags flags) {
   2419   if (!FLAG_inline_new) {
   2420     if (emit_debug_code()) {
   2421       // Trash the registers to simulate an allocation failure.
   2422       movl(result, Immediate(0x7091));
   2423       if (result_end.is_valid()) {
   2424         movl(result_end, Immediate(0x7191));
   2425       }
   2426       if (scratch.is_valid()) {
   2427         movl(scratch, Immediate(0x7291));
   2428       }
   2429     }
   2430     jmp(gc_required);
   2431     return;
   2432   }
   2433   ASSERT(!result.is(result_end));
   2434 
   2435   // Load address of new object into result.
   2436   LoadAllocationTopHelper(result, scratch, flags);
   2437 
   2438   // Calculate new top and bail out if new space is exhausted.
   2439   ExternalReference new_space_allocation_limit =
   2440       ExternalReference::new_space_allocation_limit_address(isolate());
   2441 
   2442   Register top_reg = result_end.is_valid() ? result_end : result;
   2443 
   2444   if (!top_reg.is(result)) {
   2445     movq(top_reg, result);
   2446   }
   2447   addq(top_reg, Immediate(object_size));
   2448   j(carry, gc_required);
   2449   Operand limit_operand = ExternalOperand(new_space_allocation_limit);
   2450   cmpq(top_reg, limit_operand);
   2451   j(above, gc_required);
   2452 
   2453   // Update allocation top.
   2454   UpdateAllocationTopHelper(top_reg, scratch);
   2455 
   2456   if (top_reg.is(result)) {
   2457     if ((flags & TAG_OBJECT) != 0) {
   2458       subq(result, Immediate(object_size - kHeapObjectTag));
   2459     } else {
   2460       subq(result, Immediate(object_size));
   2461     }
   2462   } else if ((flags & TAG_OBJECT) != 0) {
   2463     // Tag the result if requested.
   2464     addq(result, Immediate(kHeapObjectTag));
   2465   }
   2466 }
   2467 
   2468 
   2469 void MacroAssembler::AllocateInNewSpace(int header_size,
   2470                                         ScaleFactor element_size,
   2471                                         Register element_count,
   2472                                         Register result,
   2473                                         Register result_end,
   2474                                         Register scratch,
   2475                                         Label* gc_required,
   2476                                         AllocationFlags flags) {
   2477   if (!FLAG_inline_new) {
   2478     if (emit_debug_code()) {
   2479       // Trash the registers to simulate an allocation failure.
   2480       movl(result, Immediate(0x7091));
   2481       movl(result_end, Immediate(0x7191));
   2482       if (scratch.is_valid()) {
   2483         movl(scratch, Immediate(0x7291));
   2484       }
   2485       // Register element_count is not modified by the function.
   2486     }
   2487     jmp(gc_required);
   2488     return;
   2489   }
   2490   ASSERT(!result.is(result_end));
   2491 
   2492   // Load address of new object into result.
   2493   LoadAllocationTopHelper(result, scratch, flags);
   2494 
   2495   // Calculate new top and bail out if new space is exhausted.
   2496   ExternalReference new_space_allocation_limit =
   2497       ExternalReference::new_space_allocation_limit_address(isolate());
   2498 
   2499   // We assume that element_count*element_size + header_size does not
   2500   // overflow.
   2501   lea(result_end, Operand(element_count, element_size, header_size));
   2502   addq(result_end, result);
   2503   j(carry, gc_required);
   2504   Operand limit_operand = ExternalOperand(new_space_allocation_limit);
   2505   cmpq(result_end, limit_operand);
   2506   j(above, gc_required);
   2507 
   2508   // Update allocation top.
   2509   UpdateAllocationTopHelper(result_end, scratch);
   2510 
   2511   // Tag the result if requested.
   2512   if ((flags & TAG_OBJECT) != 0) {
   2513     addq(result, Immediate(kHeapObjectTag));
   2514   }
   2515 }
   2516 
   2517 
   2518 void MacroAssembler::AllocateInNewSpace(Register object_size,
   2519                                         Register result,
   2520                                         Register result_end,
   2521                                         Register scratch,
   2522                                         Label* gc_required,
   2523                                         AllocationFlags flags) {
   2524   if (!FLAG_inline_new) {
   2525     if (emit_debug_code()) {
   2526       // Trash the registers to simulate an allocation failure.
   2527       movl(result, Immediate(0x7091));
   2528       movl(result_end, Immediate(0x7191));
   2529       if (scratch.is_valid()) {
   2530         movl(scratch, Immediate(0x7291));
   2531       }
   2532       // object_size is left unchanged by this function.
   2533     }
   2534     jmp(gc_required);
   2535     return;
   2536   }
   2537   ASSERT(!result.is(result_end));
   2538 
   2539   // Load address of new object into result.
   2540   LoadAllocationTopHelper(result, scratch, flags);
   2541 
   2542   // Calculate new top and bail out if new space is exhausted.
   2543   ExternalReference new_space_allocation_limit =
   2544       ExternalReference::new_space_allocation_limit_address(isolate());
   2545   if (!object_size.is(result_end)) {
   2546     movq(result_end, object_size);
   2547   }
   2548   addq(result_end, result);
   2549   j(carry, gc_required);
   2550   Operand limit_operand = ExternalOperand(new_space_allocation_limit);
   2551   cmpq(result_end, limit_operand);
   2552   j(above, gc_required);
   2553 
   2554   // Update allocation top.
   2555   UpdateAllocationTopHelper(result_end, scratch);
   2556 
   2557   // Tag the result if requested.
   2558   if ((flags & TAG_OBJECT) != 0) {
   2559     addq(result, Immediate(kHeapObjectTag));
   2560   }
   2561 }
   2562 
   2563 
   2564 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
   2565   ExternalReference new_space_allocation_top =
   2566       ExternalReference::new_space_allocation_top_address(isolate());
   2567 
   2568   // Make sure the object has no tag before resetting top.
   2569   and_(object, Immediate(~kHeapObjectTagMask));
   2570   Operand top_operand = ExternalOperand(new_space_allocation_top);
   2571 #ifdef DEBUG
   2572   cmpq(object, top_operand);
   2573   Check(below, "Undo allocation of non allocated memory");
   2574 #endif
   2575   movq(top_operand, object);
   2576 }
   2577 
   2578 
   2579 void MacroAssembler::AllocateHeapNumber(Register result,
   2580                                         Register scratch,
   2581                                         Label* gc_required) {
   2582   // Allocate heap number in new space.
   2583   AllocateInNewSpace(HeapNumber::kSize,
   2584                      result,
   2585                      scratch,
   2586                      no_reg,
   2587                      gc_required,
   2588                      TAG_OBJECT);
   2589 
   2590   // Set the map.
   2591   LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
   2592   movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
   2593 }
   2594 
   2595 
   2596 void MacroAssembler::AllocateTwoByteString(Register result,
   2597                                            Register length,
   2598                                            Register scratch1,
   2599                                            Register scratch2,
   2600                                            Register scratch3,
   2601                                            Label* gc_required) {
   2602   // Calculate the number of bytes needed for the characters in the string while
   2603   // observing object alignment.
   2604   const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
   2605                                kObjectAlignmentMask;
   2606   ASSERT(kShortSize == 2);
   2607   // scratch1 = length * 2 + kObjectAlignmentMask.
   2608   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
   2609                 kHeaderAlignment));
   2610   and_(scratch1, Immediate(~kObjectAlignmentMask));
   2611   if (kHeaderAlignment > 0) {
   2612     subq(scratch1, Immediate(kHeaderAlignment));
   2613   }
   2614 
   2615   // Allocate two byte string in new space.
   2616   AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
   2617                      times_1,
   2618                      scratch1,
   2619                      result,
   2620                      scratch2,
   2621                      scratch3,
   2622                      gc_required,
   2623                      TAG_OBJECT);
   2624 
   2625   // Set the map, length and hash field.
   2626   LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
   2627   movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
   2628   Integer32ToSmi(scratch1, length);
   2629   movq(FieldOperand(result, String::kLengthOffset), scratch1);
   2630   movq(FieldOperand(result, String::kHashFieldOffset),
   2631        Immediate(String::kEmptyHashField));
   2632 }
   2633 
   2634 
   2635 void MacroAssembler::AllocateAsciiString(Register result,
   2636                                          Register length,
   2637                                          Register scratch1,
   2638                                          Register scratch2,
   2639                                          Register scratch3,
   2640                                          Label* gc_required) {
   2641   // Calculate the number of bytes needed for the characters in the string while
   2642   // observing object alignment.
   2643   const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
   2644                                kObjectAlignmentMask;
   2645   movl(scratch1, length);
   2646   ASSERT(kCharSize == 1);
   2647   addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
   2648   and_(scratch1, Immediate(~kObjectAlignmentMask));
   2649   if (kHeaderAlignment > 0) {
   2650     subq(scratch1, Immediate(kHeaderAlignment));
   2651   }
   2652 
   2653   // Allocate ascii string in new space.
   2654   AllocateInNewSpace(SeqAsciiString::kHeaderSize,
   2655                      times_1,
   2656                      scratch1,
   2657                      result,
   2658                      scratch2,
   2659                      scratch3,
   2660                      gc_required,
   2661                      TAG_OBJECT);
   2662 
   2663   // Set the map, length and hash field.
   2664   LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
   2665   movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
   2666   Integer32ToSmi(scratch1, length);
   2667   movq(FieldOperand(result, String::kLengthOffset), scratch1);
   2668   movq(FieldOperand(result, String::kHashFieldOffset),
   2669        Immediate(String::kEmptyHashField));
   2670 }
   2671 
   2672 
   2673 void MacroAssembler::AllocateConsString(Register result,
   2674                                         Register scratch1,
   2675                                         Register scratch2,
   2676                                         Label* gc_required) {
   2677   // Allocate heap number in new space.
   2678   AllocateInNewSpace(ConsString::kSize,
   2679                      result,
   2680                      scratch1,
   2681                      scratch2,
   2682                      gc_required,
   2683                      TAG_OBJECT);
   2684 
   2685   // Set the map. The other fields are left uninitialized.
   2686   LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
   2687   movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
   2688 }
   2689 
   2690 
   2691 void MacroAssembler::AllocateAsciiConsString(Register result,
   2692                                              Register scratch1,
   2693                                              Register scratch2,
   2694                                              Label* gc_required) {
   2695   // Allocate heap number in new space.
   2696   AllocateInNewSpace(ConsString::kSize,
   2697                      result,
   2698                      scratch1,
   2699                      scratch2,
   2700                      gc_required,
   2701                      TAG_OBJECT);
   2702 
   2703   // Set the map. The other fields are left uninitialized.
   2704   LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
   2705   movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
   2706 }
   2707 
   2708 
   2709 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   2710 // long or aligned copies.  The contents of scratch and length are destroyed.
   2711 // Destination is incremented by length, source, length and scratch are
   2712 // clobbered.
   2713 // A simpler loop is faster on small copies, but slower on large ones.
   2714 // The cld() instruction must have been emitted, to set the direction flag(),
   2715 // before calling this function.
   2716 void MacroAssembler::CopyBytes(Register destination,
   2717                                Register source,
   2718                                Register length,
   2719                                int min_length,
   2720                                Register scratch) {
   2721   ASSERT(min_length >= 0);
   2722   if (FLAG_debug_code) {
   2723     cmpl(length, Immediate(min_length));
   2724     Assert(greater_equal, "Invalid min_length");
   2725   }
   2726   Label loop, done, short_string, short_loop;
   2727 
   2728   const int kLongStringLimit = 20;
   2729   if (min_length <= kLongStringLimit) {
   2730     cmpl(length, Immediate(kLongStringLimit));
   2731     j(less_equal, &short_string);
   2732   }
   2733 
   2734   ASSERT(source.is(rsi));
   2735   ASSERT(destination.is(rdi));
   2736   ASSERT(length.is(rcx));
   2737 
   2738   // Because source is 8-byte aligned in our uses of this function,
   2739   // we keep source aligned for the rep movs operation by copying the odd bytes
   2740   // at the end of the ranges.
   2741   movq(scratch, length);
   2742   shrl(length, Immediate(3));
   2743   repmovsq();
   2744   // Move remaining bytes of length.
   2745   andl(scratch, Immediate(0x7));
   2746   movq(length, Operand(source, scratch, times_1, -8));
   2747   movq(Operand(destination, scratch, times_1, -8), length);
   2748   addq(destination, scratch);
   2749 
   2750   if (min_length <= kLongStringLimit) {
   2751     jmp(&done);
   2752 
   2753     bind(&short_string);
   2754     if (min_length == 0) {
   2755       testl(length, length);
   2756       j(zero, &done);
   2757     }
   2758     lea(scratch, Operand(destination, length, times_1, 0));
   2759 
   2760     bind(&short_loop);
   2761     movb(length, Operand(source, 0));
   2762     movb(Operand(destination, 0), length);
   2763     incq(source);
   2764     incq(destination);
   2765     cmpq(destination, scratch);
   2766     j(not_equal, &short_loop);
   2767 
   2768     bind(&done);
   2769   }
   2770 }
   2771 
   2772 
   2773 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   2774   if (context_chain_length > 0) {
   2775     // Move up the chain of contexts to the context containing the slot.
   2776     movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
   2777     // Load the function context (which is the incoming, outer context).
   2778     movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
   2779     for (int i = 1; i < context_chain_length; i++) {
   2780       movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
   2781       movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
   2782     }
   2783     // The context may be an intermediate context, not a function context.
   2784     movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
   2785   } else {
   2786     // Slot is in the current function context.  Move it into the
   2787     // destination register in case we store into it (the write barrier
   2788     // cannot be allowed to destroy the context in rsi).
   2789     movq(dst, rsi);
   2790   }
   2791 
   2792   // We should not have found a 'with' context by walking the context chain
   2793   // (i.e., the static scope chain and runtime context chain do not agree).
   2794   // A variable occurring in such a scope should have slot type LOOKUP and
   2795   // not CONTEXT.
   2796   if (emit_debug_code()) {
   2797     cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
   2798     Check(equal, "Yo dawg, I heard you liked function contexts "
   2799                  "so I put function contexts in all your contexts");
   2800   }
   2801 }
   2802 
   2803 #ifdef _WIN64
   2804 static const int kRegisterPassedArguments = 4;
   2805 #else
   2806 static const int kRegisterPassedArguments = 6;
   2807 #endif
   2808 
   2809 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2810   // Load the global or builtins object from the current context.
   2811   movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   2812   // Load the global context from the global or builtins object.
   2813   movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
   2814   // Load the function from the global context.
   2815   movq(function, Operand(function, Context::SlotOffset(index)));
   2816 }
   2817 
   2818 
   2819 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2820                                                   Register map) {
   2821   // Load the initial map.  The global functions all have initial maps.
   2822   movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2823   if (emit_debug_code()) {
   2824     Label ok, fail;
   2825     CheckMap(map, FACTORY->meta_map(), &fail, false);
   2826     jmp(&ok);
   2827     bind(&fail);
   2828     Abort("Global functions must have initial map");
   2829     bind(&ok);
   2830   }
   2831 }
   2832 
   2833 
   2834 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
   2835   // On Windows 64 stack slots are reserved by the caller for all arguments
   2836   // including the ones passed in registers, and space is always allocated for
   2837   // the four register arguments even if the function takes fewer than four
   2838   // arguments.
   2839   // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
   2840   // and the caller does not reserve stack slots for them.
   2841   ASSERT(num_arguments >= 0);
   2842 #ifdef _WIN64
   2843   const int kMinimumStackSlots = kRegisterPassedArguments;
   2844   if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
   2845   return num_arguments;
   2846 #else
   2847   if (num_arguments < kRegisterPassedArguments) return 0;
   2848   return num_arguments - kRegisterPassedArguments;
   2849 #endif
   2850 }
   2851 
   2852 
   2853 void MacroAssembler::PrepareCallCFunction(int num_arguments) {
   2854   int frame_alignment = OS::ActivationFrameAlignment();
   2855   ASSERT(frame_alignment != 0);
   2856   ASSERT(num_arguments >= 0);
   2857 
   2858   // Make stack end at alignment and allocate space for arguments and old rsp.
   2859   movq(kScratchRegister, rsp);
   2860   ASSERT(IsPowerOf2(frame_alignment));
   2861   int argument_slots_on_stack =
   2862       ArgumentStackSlotsForCFunctionCall(num_arguments);
   2863   subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
   2864   and_(rsp, Immediate(-frame_alignment));
   2865   movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
   2866 }
   2867 
   2868 
   2869 void MacroAssembler::CallCFunction(ExternalReference function,
   2870                                    int num_arguments) {
   2871   LoadAddress(rax, function);
   2872   CallCFunction(rax, num_arguments);
   2873 }
   2874 
   2875 
   2876 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
   2877   // Check stack alignment.
   2878   if (emit_debug_code()) {
   2879     CheckStackAlignment();
   2880   }
   2881 
   2882   call(function);
   2883   ASSERT(OS::ActivationFrameAlignment() != 0);
   2884   ASSERT(num_arguments >= 0);
   2885   int argument_slots_on_stack =
   2886       ArgumentStackSlotsForCFunctionCall(num_arguments);
   2887   movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
   2888 }
   2889 
   2890 
   2891 CodePatcher::CodePatcher(byte* address, int size)
   2892     : address_(address),
   2893       size_(size),
   2894       masm_(Isolate::Current(), address, size + Assembler::kGap) {
   2895   // Create a new macro assembler pointing to the address of the code to patch.
   2896   // The size is adjusted with kGap on order for the assembler to generate size
   2897   // bytes of instructions without failing with buffer size constraints.
   2898   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2899 }
   2900 
   2901 
   2902 CodePatcher::~CodePatcher() {
   2903   // Indicate that code has changed.
   2904   CPU::FlushICache(address_, size_);
   2905 
   2906   // Check that the code was patched as expected.
   2907   ASSERT(masm_.pc_ == address_ + size_);
   2908   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2909 }
   2910 
   2911 } }  // namespace v8::internal
   2912 
   2913 #endif  // V8_TARGET_ARCH_X64
   2914