Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
     29 #define V8_X64_ASSEMBLER_X64_INL_H_
     30 
     31 #include "x64/assembler-x64.h"
     32 
     33 #include "cpu.h"
     34 #include "debug.h"
     35 #include "v8memory.h"
     36 
     37 namespace v8 {
     38 namespace internal {
     39 
     40 
     41 // -----------------------------------------------------------------------------
     42 // Implementation of Assembler
     43 
     44 
     45 static const byte kCallOpcode = 0xE8;
     46 static const int kNoCodeAgeSequenceLength = 6;
     47 
     48 
     49 void Assembler::emitl(uint32_t x) {
     50   Memory::uint32_at(pc_) = x;
     51   pc_ += sizeof(uint32_t);
     52 }
     53 
     54 
     55 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
     56   uintptr_t value = reinterpret_cast<uintptr_t>(x);
     57   Memory::uintptr_at(pc_) = value;
     58   if (!RelocInfo::IsNone(rmode)) {
     59     RecordRelocInfo(rmode, value);
     60   }
     61   pc_ += sizeof(uintptr_t);
     62 }
     63 
     64 
     65 void Assembler::emitq(uint64_t x) {
     66   Memory::uint64_at(pc_) = x;
     67   pc_ += sizeof(uint64_t);
     68 }
     69 
     70 
     71 void Assembler::emitw(uint16_t x) {
     72   Memory::uint16_at(pc_) = x;
     73   pc_ += sizeof(uint16_t);
     74 }
     75 
     76 
     77 void Assembler::emit_code_target(Handle<Code> target,
     78                                  RelocInfo::Mode rmode,
     79                                  TypeFeedbackId ast_id) {
     80   ASSERT(RelocInfo::IsCodeTarget(rmode) ||
     81       rmode == RelocInfo::CODE_AGE_SEQUENCE);
     82   if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
     83     RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
     84   } else {
     85     RecordRelocInfo(rmode);
     86   }
     87   int current = code_targets_.length();
     88   if (current > 0 && code_targets_.last().is_identical_to(target)) {
     89     // Optimization if we keep jumping to the same code target.
     90     emitl(current - 1);
     91   } else {
     92     code_targets_.Add(target);
     93     emitl(current);
     94   }
     95 }
     96 
     97 
     98 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
     99   ASSERT(RelocInfo::IsRuntimeEntry(rmode));
    100   ASSERT(isolate()->code_range()->exists());
    101   RecordRelocInfo(rmode);
    102   emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
    103 }
    104 
    105 
    106 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
    107   emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
    108 }
    109 
    110 
    111 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
    112   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
    113 }
    114 
    115 
    116 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
    117   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
    118 }
    119 
    120 
    121 void Assembler::emit_rex_64(Register reg, const Operand& op) {
    122   emit(0x48 | reg.high_bit() << 2 | op.rex_);
    123 }
    124 
    125 
    126 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
    127   emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
    128 }
    129 
    130 
    131 void Assembler::emit_rex_64(Register rm_reg) {
    132   ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
    133   emit(0x48 | rm_reg.high_bit());
    134 }
    135 
    136 
    137 void Assembler::emit_rex_64(const Operand& op) {
    138   emit(0x48 | op.rex_);
    139 }
    140 
    141 
    142 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
    143   emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
    144 }
    145 
    146 
    147 void Assembler::emit_rex_32(Register reg, const Operand& op) {
    148   emit(0x40 | reg.high_bit() << 2  | op.rex_);
    149 }
    150 
    151 
    152 void Assembler::emit_rex_32(Register rm_reg) {
    153   emit(0x40 | rm_reg.high_bit());
    154 }
    155 
    156 
    157 void Assembler::emit_rex_32(const Operand& op) {
    158   emit(0x40 | op.rex_);
    159 }
    160 
    161 
    162 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
    163   byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
    164   if (rex_bits != 0) emit(0x40 | rex_bits);
    165 }
    166 
    167 
    168 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
    169   byte rex_bits =  reg.high_bit() << 2 | op.rex_;
    170   if (rex_bits != 0) emit(0x40 | rex_bits);
    171 }
    172 
    173 
    174 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
    175   byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
    176   if (rex_bits != 0) emit(0x40 | rex_bits);
    177 }
    178 
    179 
    180 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
    181   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
    182   if (rex_bits != 0) emit(0x40 | rex_bits);
    183 }
    184 
    185 
    186 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
    187   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
    188   if (rex_bits != 0) emit(0x40 | rex_bits);
    189 }
    190 
    191 
    192 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
    193   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
    194   if (rex_bits != 0) emit(0x40 | rex_bits);
    195 }
    196 
    197 
    198 void Assembler::emit_optional_rex_32(Register rm_reg) {
    199   if (rm_reg.high_bit()) emit(0x41);
    200 }
    201 
    202 
    203 void Assembler::emit_optional_rex_32(const Operand& op) {
    204   if (op.rex_ != 0) emit(0x40 | op.rex_);
    205 }
    206 
    207 
    208 Address Assembler::target_address_at(Address pc) {
    209   return Memory::int32_at(pc) + pc + 4;
    210 }
    211 
    212 
    213 void Assembler::set_target_address_at(Address pc, Address target) {
    214   Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
    215   CPU::FlushICache(pc, sizeof(int32_t));
    216 }
    217 
    218 
    219 Address Assembler::target_address_from_return_address(Address pc) {
    220   return pc - kCallTargetAddressOffset;
    221 }
    222 
    223 
    224 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
    225   return code_targets_[Memory::int32_at(pc)];
    226 }
    227 
    228 
    229 Address Assembler::runtime_entry_at(Address pc) {
    230   ASSERT(isolate()->code_range()->exists());
    231   return Memory::int32_at(pc) + isolate()->code_range()->start();
    232 }
    233 
    234 // -----------------------------------------------------------------------------
    235 // Implementation of RelocInfo
    236 
    237 // The modes possibly affected by apply must be in kApplyMask.
    238 void RelocInfo::apply(intptr_t delta) {
    239   if (IsInternalReference(rmode_)) {
    240     // absolute code pointer inside code object moves with the code object.
    241     Memory::Address_at(pc_) += static_cast<int32_t>(delta);
    242     CPU::FlushICache(pc_, sizeof(Address));
    243   } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
    244     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
    245     CPU::FlushICache(pc_, sizeof(int32_t));
    246   } else if (rmode_ == CODE_AGE_SEQUENCE) {
    247     if (*pc_ == kCallOpcode) {
    248       int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
    249       *p -= static_cast<int32_t>(delta);  // Relocate entry.
    250       CPU::FlushICache(p, sizeof(uint32_t));
    251     }
    252   }
    253 }
    254 
    255 
    256 Address RelocInfo::target_address() {
    257   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    258   return Assembler::target_address_at(pc_);
    259 }
    260 
    261 
    262 Address RelocInfo::target_address_address() {
    263   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
    264                               || rmode_ == EMBEDDED_OBJECT
    265                               || rmode_ == EXTERNAL_REFERENCE);
    266   return reinterpret_cast<Address>(pc_);
    267 }
    268 
    269 
    270 int RelocInfo::target_address_size() {
    271   if (IsCodedSpecially()) {
    272     return Assembler::kSpecialTargetSize;
    273   } else {
    274     return kPointerSize;
    275   }
    276 }
    277 
    278 
    279 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
    280   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    281   Assembler::set_target_address_at(pc_, target);
    282   if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
    283     Object* target_code = Code::GetCodeFromTargetAddress(target);
    284     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    285         host(), this, HeapObject::cast(target_code));
    286   }
    287 }
    288 
    289 
    290 Object* RelocInfo::target_object() {
    291   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    292   return Memory::Object_at(pc_);
    293 }
    294 
    295 
    296 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
    297   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    298   if (rmode_ == EMBEDDED_OBJECT) {
    299     return Memory::Object_Handle_at(pc_);
    300   } else {
    301     return origin->code_target_object_handle_at(pc_);
    302   }
    303 }
    304 
    305 
    306 Address RelocInfo::target_reference() {
    307   ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
    308   return Memory::Address_at(pc_);
    309 }
    310 
    311 
    312 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
    313   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    314   ASSERT(!target->IsConsString());
    315   Memory::Object_at(pc_) = target;
    316   CPU::FlushICache(pc_, sizeof(Address));
    317   if (mode == UPDATE_WRITE_BARRIER &&
    318       host() != NULL &&
    319       target->IsHeapObject()) {
    320     host()->GetHeap()->incremental_marking()->RecordWrite(
    321         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
    322   }
    323 }
    324 
    325 
    326 Address RelocInfo::target_runtime_entry(Assembler* origin) {
    327   ASSERT(IsRuntimeEntry(rmode_));
    328   return origin->runtime_entry_at(pc_);
    329 }
    330 
    331 
    332 void RelocInfo::set_target_runtime_entry(Address target,
    333                                          WriteBarrierMode mode) {
    334   ASSERT(IsRuntimeEntry(rmode_));
    335   if (target_address() != target) set_target_address(target, mode);
    336 }
    337 
    338 
    339 Handle<Cell> RelocInfo::target_cell_handle() {
    340   ASSERT(rmode_ == RelocInfo::CELL);
    341   Address address = Memory::Address_at(pc_);
    342   return Handle<Cell>(reinterpret_cast<Cell**>(address));
    343 }
    344 
    345 
    346 Cell* RelocInfo::target_cell() {
    347   ASSERT(rmode_ == RelocInfo::CELL);
    348   return Cell::FromValueAddress(Memory::Address_at(pc_));
    349 }
    350 
    351 
    352 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
    353   ASSERT(rmode_ == RelocInfo::CELL);
    354   Address address = cell->address() + Cell::kValueOffset;
    355   Memory::Address_at(pc_) = address;
    356   CPU::FlushICache(pc_, sizeof(Address));
    357   if (mode == UPDATE_WRITE_BARRIER &&
    358       host() != NULL) {
    359     // TODO(1550) We are passing NULL as a slot because cell can never be on
    360     // evacuation candidate.
    361     host()->GetHeap()->incremental_marking()->RecordWrite(
    362         host(), NULL, cell);
    363   }
    364 }
    365 
    366 
    367 void RelocInfo::WipeOut() {
    368   if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_)) {
    369     Memory::Address_at(pc_) = NULL;
    370   } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
    371     // Effectively write zero into the relocation.
    372     Assembler::set_target_address_at(pc_, pc_ + sizeof(int32_t));
    373   } else {
    374     UNREACHABLE();
    375   }
    376 }
    377 
    378 
    379 bool RelocInfo::IsPatchedReturnSequence() {
    380   // The recognized call sequence is:
    381   //  movq(kScratchRegister, address); call(kScratchRegister);
    382   // It only needs to be distinguished from a return sequence
    383   //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
    384   // The 11th byte is int3 (0xCC) in the return sequence and
    385   // REX.WB (0x48+register bit) for the call sequence.
    386 #ifdef ENABLE_DEBUGGER_SUPPORT
    387   return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
    388          0xCC;
    389 #else
    390   return false;
    391 #endif
    392 }
    393 
    394 
    395 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
    396   return !Assembler::IsNop(pc());
    397 }
    398 
    399 
    400 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
    401   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    402   ASSERT(*pc_ == kCallOpcode);
    403   return origin->code_target_object_handle_at(pc_ + 1);
    404 }
    405 
    406 
    407 Code* RelocInfo::code_age_stub() {
    408   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    409   ASSERT(*pc_ == kCallOpcode);
    410   return Code::GetCodeFromTargetAddress(
    411       Assembler::target_address_at(pc_ + 1));
    412 }
    413 
    414 
    415 void RelocInfo::set_code_age_stub(Code* stub) {
    416   ASSERT(*pc_ == kCallOpcode);
    417   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    418   Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
    419 }
    420 
    421 
    422 Address RelocInfo::call_address() {
    423   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    424          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    425   return Memory::Address_at(
    426       pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
    427 }
    428 
    429 
    430 void RelocInfo::set_call_address(Address target) {
    431   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    432          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    433   Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
    434       target;
    435   CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
    436                    sizeof(Address));
    437   if (host() != NULL) {
    438     Object* target_code = Code::GetCodeFromTargetAddress(target);
    439     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    440         host(), this, HeapObject::cast(target_code));
    441   }
    442 }
    443 
    444 
    445 Object* RelocInfo::call_object() {
    446   return *call_object_address();
    447 }
    448 
    449 
    450 void RelocInfo::set_call_object(Object* target) {
    451   *call_object_address() = target;
    452 }
    453 
    454 
    455 Object** RelocInfo::call_object_address() {
    456   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    457          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    458   return reinterpret_cast<Object**>(
    459       pc_ + Assembler::kPatchReturnSequenceAddressOffset);
    460 }
    461 
    462 
    463 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
    464   RelocInfo::Mode mode = rmode();
    465   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    466     visitor->VisitEmbeddedPointer(this);
    467     CPU::FlushICache(pc_, sizeof(Address));
    468   } else if (RelocInfo::IsCodeTarget(mode)) {
    469     visitor->VisitCodeTarget(this);
    470   } else if (mode == RelocInfo::CELL) {
    471     visitor->VisitCell(this);
    472   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    473     visitor->VisitExternalReference(this);
    474     CPU::FlushICache(pc_, sizeof(Address));
    475   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    476     visitor->VisitCodeAgeSequence(this);
    477 #ifdef ENABLE_DEBUGGER_SUPPORT
    478   } else if (((RelocInfo::IsJSReturn(mode) &&
    479               IsPatchedReturnSequence()) ||
    480              (RelocInfo::IsDebugBreakSlot(mode) &&
    481               IsPatchedDebugBreakSlotSequence())) &&
    482              isolate->debug()->has_break_points()) {
    483     visitor->VisitDebugTarget(this);
    484 #endif
    485   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    486     visitor->VisitRuntimeEntry(this);
    487   }
    488 }
    489 
    490 
    491 template<typename StaticVisitor>
    492 void RelocInfo::Visit(Heap* heap) {
    493   RelocInfo::Mode mode = rmode();
    494   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    495     StaticVisitor::VisitEmbeddedPointer(heap, this);
    496     CPU::FlushICache(pc_, sizeof(Address));
    497   } else if (RelocInfo::IsCodeTarget(mode)) {
    498     StaticVisitor::VisitCodeTarget(heap, this);
    499   } else if (mode == RelocInfo::CELL) {
    500     StaticVisitor::VisitCell(heap, this);
    501   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    502     StaticVisitor::VisitExternalReference(this);
    503     CPU::FlushICache(pc_, sizeof(Address));
    504   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    505     StaticVisitor::VisitCodeAgeSequence(heap, this);
    506 #ifdef ENABLE_DEBUGGER_SUPPORT
    507   } else if (heap->isolate()->debug()->has_break_points() &&
    508              ((RelocInfo::IsJSReturn(mode) &&
    509               IsPatchedReturnSequence()) ||
    510              (RelocInfo::IsDebugBreakSlot(mode) &&
    511               IsPatchedDebugBreakSlotSequence()))) {
    512     StaticVisitor::VisitDebugTarget(heap, this);
    513 #endif
    514   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    515     StaticVisitor::VisitRuntimeEntry(this);
    516   }
    517 }
    518 
    519 
    520 // -----------------------------------------------------------------------------
    521 // Implementation of Operand
    522 
    523 void Operand::set_modrm(int mod, Register rm_reg) {
    524   ASSERT(is_uint2(mod));
    525   buf_[0] = mod << 6 | rm_reg.low_bits();
    526   // Set REX.B to the high bit of rm.code().
    527   rex_ |= rm_reg.high_bit();
    528 }
    529 
    530 
    531 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
    532   ASSERT(len_ == 1);
    533   ASSERT(is_uint2(scale));
    534   // Use SIB with no index register only for base rsp or r12. Otherwise we
    535   // would skip the SIB byte entirely.
    536   ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
    537   buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
    538   rex_ |= index.high_bit() << 1 | base.high_bit();
    539   len_ = 2;
    540 }
    541 
    542 void Operand::set_disp8(int disp) {
    543   ASSERT(is_int8(disp));
    544   ASSERT(len_ == 1 || len_ == 2);
    545   int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
    546   *p = disp;
    547   len_ += sizeof(int8_t);
    548 }
    549 
    550 void Operand::set_disp32(int disp) {
    551   ASSERT(len_ == 1 || len_ == 2);
    552   int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
    553   *p = disp;
    554   len_ += sizeof(int32_t);
    555 }
    556 
    557 
    558 } }  // namespace v8::internal
    559 
    560 #endif  // V8_X64_ASSEMBLER_X64_INL_H_
    561