Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
     29 #define V8_X64_ASSEMBLER_X64_INL_H_
     30 
     31 #include "x64/assembler-x64.h"
     32 
     33 #include "cpu.h"
     34 #include "debug.h"
     35 #include "v8memory.h"
     36 
     37 namespace v8 {
     38 namespace internal {
     39 
     40 
     41 // -----------------------------------------------------------------------------
     42 // Implementation of Assembler
     43 
     44 
     45 static const byte kCallOpcode = 0xE8;
     46 
     47 
     48 void Assembler::emitl(uint32_t x) {
     49   Memory::uint32_at(pc_) = x;
     50   pc_ += sizeof(uint32_t);
     51 }
     52 
     53 
     54 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
     55   uintptr_t value = reinterpret_cast<uintptr_t>(x);
     56   Memory::uintptr_at(pc_) = value;
     57   if (!RelocInfo::IsNone(rmode)) {
     58     RecordRelocInfo(rmode, value);
     59   }
     60   pc_ += sizeof(uintptr_t);
     61 }
     62 
     63 
     64 void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
     65   Memory::uint64_at(pc_) = x;
     66   if (!RelocInfo::IsNone(rmode)) {
     67     RecordRelocInfo(rmode, x);
     68   }
     69   pc_ += sizeof(uint64_t);
     70 }
     71 
     72 
     73 void Assembler::emitw(uint16_t x) {
     74   Memory::uint16_at(pc_) = x;
     75   pc_ += sizeof(uint16_t);
     76 }
     77 
     78 
     79 void Assembler::emit_code_target(Handle<Code> target,
     80                                  RelocInfo::Mode rmode,
     81                                  TypeFeedbackId ast_id) {
     82   ASSERT(RelocInfo::IsCodeTarget(rmode));
     83   if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
     84     RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
     85   } else {
     86     RecordRelocInfo(rmode);
     87   }
     88   int current = code_targets_.length();
     89   if (current > 0 && code_targets_.last().is_identical_to(target)) {
     90     // Optimization if we keep jumping to the same code target.
     91     emitl(current - 1);
     92   } else {
     93     code_targets_.Add(target);
     94     emitl(current);
     95   }
     96 }
     97 
     98 
     99 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
    100   ASSERT(RelocInfo::IsRuntimeEntry(rmode));
    101   ASSERT(isolate()->code_range()->exists());
    102   RecordRelocInfo(rmode);
    103   emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
    104 }
    105 
    106 
    107 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
    108   emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
    109 }
    110 
    111 
    112 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
    113   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
    114 }
    115 
    116 
    117 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
    118   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
    119 }
    120 
    121 
    122 void Assembler::emit_rex_64(Register reg, const Operand& op) {
    123   emit(0x48 | reg.high_bit() << 2 | op.rex_);
    124 }
    125 
    126 
    127 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
    128   emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
    129 }
    130 
    131 
    132 void Assembler::emit_rex_64(Register rm_reg) {
    133   ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
    134   emit(0x48 | rm_reg.high_bit());
    135 }
    136 
    137 
    138 void Assembler::emit_rex_64(const Operand& op) {
    139   emit(0x48 | op.rex_);
    140 }
    141 
    142 
    143 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
    144   emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
    145 }
    146 
    147 
    148 void Assembler::emit_rex_32(Register reg, const Operand& op) {
    149   emit(0x40 | reg.high_bit() << 2  | op.rex_);
    150 }
    151 
    152 
    153 void Assembler::emit_rex_32(Register rm_reg) {
    154   emit(0x40 | rm_reg.high_bit());
    155 }
    156 
    157 
    158 void Assembler::emit_rex_32(const Operand& op) {
    159   emit(0x40 | op.rex_);
    160 }
    161 
    162 
    163 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
    164   byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
    165   if (rex_bits != 0) emit(0x40 | rex_bits);
    166 }
    167 
    168 
    169 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
    170   byte rex_bits =  reg.high_bit() << 2 | op.rex_;
    171   if (rex_bits != 0) emit(0x40 | rex_bits);
    172 }
    173 
    174 
    175 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
    176   byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
    177   if (rex_bits != 0) emit(0x40 | rex_bits);
    178 }
    179 
    180 
    181 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
    182   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
    183   if (rex_bits != 0) emit(0x40 | rex_bits);
    184 }
    185 
    186 
    187 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
    188   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
    189   if (rex_bits != 0) emit(0x40 | rex_bits);
    190 }
    191 
    192 
    193 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
    194   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
    195   if (rex_bits != 0) emit(0x40 | rex_bits);
    196 }
    197 
    198 
    199 void Assembler::emit_optional_rex_32(Register rm_reg) {
    200   if (rm_reg.high_bit()) emit(0x41);
    201 }
    202 
    203 
    204 void Assembler::emit_optional_rex_32(const Operand& op) {
    205   if (op.rex_ != 0) emit(0x40 | op.rex_);
    206 }
    207 
    208 
    209 Address Assembler::target_address_at(Address pc) {
    210   return Memory::int32_at(pc) + pc + 4;
    211 }
    212 
    213 
    214 void Assembler::set_target_address_at(Address pc, Address target) {
    215   Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
    216   CPU::FlushICache(pc, sizeof(int32_t));
    217 }
    218 
    219 
    220 Address Assembler::target_address_from_return_address(Address pc) {
    221   return pc - kCallTargetAddressOffset;
    222 }
    223 
    224 
    225 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
    226   return code_targets_[Memory::int32_at(pc)];
    227 }
    228 
    229 
    230 Address Assembler::runtime_entry_at(Address pc) {
    231   ASSERT(isolate()->code_range()->exists());
    232   return Memory::int32_at(pc) + isolate()->code_range()->start();
    233 }
    234 
    235 // -----------------------------------------------------------------------------
    236 // Implementation of RelocInfo
    237 
    238 // The modes possibly affected by apply must be in kApplyMask.
    239 void RelocInfo::apply(intptr_t delta) {
    240   if (IsInternalReference(rmode_)) {
    241     // absolute code pointer inside code object moves with the code object.
    242     Memory::Address_at(pc_) += static_cast<int32_t>(delta);
    243     CPU::FlushICache(pc_, sizeof(Address));
    244   } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
    245     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
    246     CPU::FlushICache(pc_, sizeof(int32_t));
    247   } else if (rmode_ == CODE_AGE_SEQUENCE) {
    248     if (*pc_ == kCallOpcode) {
    249       int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
    250       *p -= static_cast<int32_t>(delta);  // Relocate entry.
    251       CPU::FlushICache(p, sizeof(uint32_t));
    252     }
    253   }
    254 }
    255 
    256 
    257 Address RelocInfo::target_address() {
    258   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    259   return Assembler::target_address_at(pc_);
    260 }
    261 
    262 
    263 Address RelocInfo::target_address_address() {
    264   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
    265                               || rmode_ == EMBEDDED_OBJECT
    266                               || rmode_ == EXTERNAL_REFERENCE);
    267   return reinterpret_cast<Address>(pc_);
    268 }
    269 
    270 
    271 int RelocInfo::target_address_size() {
    272   if (IsCodedSpecially()) {
    273     return Assembler::kSpecialTargetSize;
    274   } else {
    275     return kPointerSize;
    276   }
    277 }
    278 
    279 
    280 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
    281   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    282   Assembler::set_target_address_at(pc_, target);
    283   if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
    284     Object* target_code = Code::GetCodeFromTargetAddress(target);
    285     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    286         host(), this, HeapObject::cast(target_code));
    287   }
    288 }
    289 
    290 
    291 Object* RelocInfo::target_object() {
    292   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    293   return Memory::Object_at(pc_);
    294 }
    295 
    296 
    297 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
    298   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    299   if (rmode_ == EMBEDDED_OBJECT) {
    300     return Memory::Object_Handle_at(pc_);
    301   } else {
    302     return origin->code_target_object_handle_at(pc_);
    303   }
    304 }
    305 
    306 
    307 Object** RelocInfo::target_object_address() {
    308   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    309   return reinterpret_cast<Object**>(pc_);
    310 }
    311 
    312 
    313 Address* RelocInfo::target_reference_address() {
    314   ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
    315   return reinterpret_cast<Address*>(pc_);
    316 }
    317 
    318 
    319 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
    320   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    321   ASSERT(!target->IsConsString());
    322   Memory::Object_at(pc_) = target;
    323   CPU::FlushICache(pc_, sizeof(Address));
    324   if (mode == UPDATE_WRITE_BARRIER &&
    325       host() != NULL &&
    326       target->IsHeapObject()) {
    327     host()->GetHeap()->incremental_marking()->RecordWrite(
    328         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
    329   }
    330 }
    331 
    332 
    333 Address RelocInfo::target_runtime_entry(Assembler* origin) {
    334   ASSERT(IsRuntimeEntry(rmode_));
    335   return origin->runtime_entry_at(pc_);
    336 }
    337 
    338 
    339 void RelocInfo::set_target_runtime_entry(Address target,
    340                                          WriteBarrierMode mode) {
    341   ASSERT(IsRuntimeEntry(rmode_));
    342   if (target_address() != target) set_target_address(target, mode);
    343 }
    344 
    345 
    346 Handle<Cell> RelocInfo::target_cell_handle() {
    347   ASSERT(rmode_ == RelocInfo::CELL);
    348   Address address = Memory::Address_at(pc_);
    349   return Handle<Cell>(reinterpret_cast<Cell**>(address));
    350 }
    351 
    352 
    353 Cell* RelocInfo::target_cell() {
    354   ASSERT(rmode_ == RelocInfo::CELL);
    355   return Cell::FromValueAddress(Memory::Address_at(pc_));
    356 }
    357 
    358 
    359 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
    360   ASSERT(rmode_ == RelocInfo::CELL);
    361   Address address = cell->address() + Cell::kValueOffset;
    362   Memory::Address_at(pc_) = address;
    363   CPU::FlushICache(pc_, sizeof(Address));
    364   if (mode == UPDATE_WRITE_BARRIER &&
    365       host() != NULL) {
    366     // TODO(1550) We are passing NULL as a slot because cell can never be on
    367     // evacuation candidate.
    368     host()->GetHeap()->incremental_marking()->RecordWrite(
    369         host(), NULL, cell);
    370   }
    371 }
    372 
    373 
    374 bool RelocInfo::IsPatchedReturnSequence() {
    375   // The recognized call sequence is:
    376   //  movq(kScratchRegister, address); call(kScratchRegister);
    377   // It only needs to be distinguished from a return sequence
    378   //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
    379   // The 11th byte is int3 (0xCC) in the return sequence and
    380   // REX.WB (0x48+register bit) for the call sequence.
    381 #ifdef ENABLE_DEBUGGER_SUPPORT
    382   return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
    383          0xCC;
    384 #else
    385   return false;
    386 #endif
    387 }
    388 
    389 
    390 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
    391   return !Assembler::IsNop(pc());
    392 }
    393 
    394 
    395 Code* RelocInfo::code_age_stub() {
    396   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    397   ASSERT(*pc_ == kCallOpcode);
    398   return Code::GetCodeFromTargetAddress(
    399       Assembler::target_address_at(pc_ + 1));
    400 }
    401 
    402 
    403 void RelocInfo::set_code_age_stub(Code* stub) {
    404   ASSERT(*pc_ == kCallOpcode);
    405   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    406   Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
    407 }
    408 
    409 
    410 Address RelocInfo::call_address() {
    411   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    412          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    413   return Memory::Address_at(
    414       pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
    415 }
    416 
    417 
    418 void RelocInfo::set_call_address(Address target) {
    419   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    420          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    421   Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
    422       target;
    423   CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
    424                    sizeof(Address));
    425   if (host() != NULL) {
    426     Object* target_code = Code::GetCodeFromTargetAddress(target);
    427     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    428         host(), this, HeapObject::cast(target_code));
    429   }
    430 }
    431 
    432 
    433 Object* RelocInfo::call_object() {
    434   return *call_object_address();
    435 }
    436 
    437 
    438 void RelocInfo::set_call_object(Object* target) {
    439   *call_object_address() = target;
    440 }
    441 
    442 
    443 Object** RelocInfo::call_object_address() {
    444   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    445          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    446   return reinterpret_cast<Object**>(
    447       pc_ + Assembler::kPatchReturnSequenceAddressOffset);
    448 }
    449 
    450 
    451 void RelocInfo::Visit(ObjectVisitor* visitor) {
    452   RelocInfo::Mode mode = rmode();
    453   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    454     visitor->VisitEmbeddedPointer(this);
    455     CPU::FlushICache(pc_, sizeof(Address));
    456   } else if (RelocInfo::IsCodeTarget(mode)) {
    457     visitor->VisitCodeTarget(this);
    458   } else if (mode == RelocInfo::CELL) {
    459     visitor->VisitCell(this);
    460   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    461     visitor->VisitExternalReference(this);
    462     CPU::FlushICache(pc_, sizeof(Address));
    463   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    464     visitor->VisitCodeAgeSequence(this);
    465 #ifdef ENABLE_DEBUGGER_SUPPORT
    466   // TODO(isolates): Get a cached isolate below.
    467   } else if (((RelocInfo::IsJSReturn(mode) &&
    468               IsPatchedReturnSequence()) ||
    469              (RelocInfo::IsDebugBreakSlot(mode) &&
    470               IsPatchedDebugBreakSlotSequence())) &&
    471              Isolate::Current()->debug()->has_break_points()) {
    472     visitor->VisitDebugTarget(this);
    473 #endif
    474   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    475     visitor->VisitRuntimeEntry(this);
    476   }
    477 }
    478 
    479 
    480 template<typename StaticVisitor>
    481 void RelocInfo::Visit(Heap* heap) {
    482   RelocInfo::Mode mode = rmode();
    483   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    484     StaticVisitor::VisitEmbeddedPointer(heap, this);
    485     CPU::FlushICache(pc_, sizeof(Address));
    486   } else if (RelocInfo::IsCodeTarget(mode)) {
    487     StaticVisitor::VisitCodeTarget(heap, this);
    488   } else if (mode == RelocInfo::CELL) {
    489     StaticVisitor::VisitCell(heap, this);
    490   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    491     StaticVisitor::VisitExternalReference(this);
    492     CPU::FlushICache(pc_, sizeof(Address));
    493   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    494     StaticVisitor::VisitCodeAgeSequence(heap, this);
    495 #ifdef ENABLE_DEBUGGER_SUPPORT
    496   } else if (heap->isolate()->debug()->has_break_points() &&
    497              ((RelocInfo::IsJSReturn(mode) &&
    498               IsPatchedReturnSequence()) ||
    499              (RelocInfo::IsDebugBreakSlot(mode) &&
    500               IsPatchedDebugBreakSlotSequence()))) {
    501     StaticVisitor::VisitDebugTarget(heap, this);
    502 #endif
    503   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    504     StaticVisitor::VisitRuntimeEntry(this);
    505   }
    506 }
    507 
    508 
    509 // -----------------------------------------------------------------------------
    510 // Implementation of Operand
    511 
    512 void Operand::set_modrm(int mod, Register rm_reg) {
    513   ASSERT(is_uint2(mod));
    514   buf_[0] = mod << 6 | rm_reg.low_bits();
    515   // Set REX.B to the high bit of rm.code().
    516   rex_ |= rm_reg.high_bit();
    517 }
    518 
    519 
    520 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
    521   ASSERT(len_ == 1);
    522   ASSERT(is_uint2(scale));
    523   // Use SIB with no index register only for base rsp or r12. Otherwise we
    524   // would skip the SIB byte entirely.
    525   ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
    526   buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
    527   rex_ |= index.high_bit() << 1 | base.high_bit();
    528   len_ = 2;
    529 }
    530 
    531 void Operand::set_disp8(int disp) {
    532   ASSERT(is_int8(disp));
    533   ASSERT(len_ == 1 || len_ == 2);
    534   int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
    535   *p = disp;
    536   len_ += sizeof(int8_t);
    537 }
    538 
    539 void Operand::set_disp32(int disp) {
    540   ASSERT(len_ == 1 || len_ == 2);
    541   int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
    542   *p = disp;
    543   len_ += sizeof(int32_t);
    544 }
    545 
    546 
    547 } }  // namespace v8::internal
    548 
    549 #endif  // V8_X64_ASSEMBLER_X64_INL_H_
    550