Home | History | Annotate | Download | only in mips
      1 
      2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
      3 // All Rights Reserved.
      4 //
      5 // Redistribution and use in source and binary forms, with or without
      6 // modification, are permitted provided that the following conditions are
      7 // met:
      8 //
      9 // - Redistributions of source code must retain the above copyright notice,
     10 // this list of conditions and the following disclaimer.
     11 //
     12 // - Redistribution in binary form must reproduce the above copyright
     13 // notice, this list of conditions and the following disclaimer in the
     14 // documentation and/or other materials provided with the distribution.
     15 //
     16 // - Neither the name of Sun Microsystems or the names of contributors may
     17 // be used to endorse or promote products derived from this software without
     18 // specific prior written permission.
     19 //
     20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
     21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
     22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
     24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
     25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
     27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
     28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
     29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
     30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31 
     32 // The original source code covered by the above license above has been
     33 // modified significantly by Google Inc.
     34 // Copyright 2012 the V8 project authors. All rights reserved.
     35 
     36 
     37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
     38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
     39 
     40 #include "mips/assembler-mips.h"
     41 
     42 #include "cpu.h"
     43 #include "debug.h"
     44 
     45 
     46 namespace v8 {
     47 namespace internal {
     48 
     49 // -----------------------------------------------------------------------------
     50 // Operand and MemOperand.
     51 
     52 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
     53   rm_ = no_reg;
     54   imm32_ = immediate;
     55   rmode_ = rmode;
     56 }
     57 
     58 
     59 Operand::Operand(const ExternalReference& f)  {
     60   rm_ = no_reg;
     61   imm32_ = reinterpret_cast<int32_t>(f.address());
     62   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
     63 }
     64 
     65 
     66 Operand::Operand(Smi* value) {
     67   rm_ = no_reg;
     68   imm32_ =  reinterpret_cast<intptr_t>(value);
     69   rmode_ = RelocInfo::NONE32;
     70 }
     71 
     72 
     73 Operand::Operand(Register rm) {
     74   rm_ = rm;
     75 }
     76 
     77 
     78 bool Operand::is_reg() const {
     79   return rm_.is_valid();
     80 }
     81 
     82 
     83 int Register::NumAllocatableRegisters() {
     84     return kMaxNumAllocatableRegisters;
     85 }
     86 
     87 
     88 int DoubleRegister::NumRegisters() {
     89     return FPURegister::kMaxNumRegisters;
     90 }
     91 
     92 
     93 int DoubleRegister::NumAllocatableRegisters() {
     94     return FPURegister::kMaxNumAllocatableRegisters;
     95 }
     96 
     97 
     98 int FPURegister::ToAllocationIndex(FPURegister reg) {
     99   ASSERT(reg.code() % 2 == 0);
    100   ASSERT(reg.code() / 2 < kMaxNumAllocatableRegisters);
    101   ASSERT(reg.is_valid());
    102   ASSERT(!reg.is(kDoubleRegZero));
    103   ASSERT(!reg.is(kLithiumScratchDouble));
    104   return (reg.code() / 2);
    105 }
    106 
    107 
    108 // -----------------------------------------------------------------------------
    109 // RelocInfo.
    110 
    111 void RelocInfo::apply(intptr_t delta) {
    112   if (IsCodeTarget(rmode_)) {
    113     uint32_t scope1 = (uint32_t) target_address() & ~kImm28Mask;
    114     uint32_t scope2 = reinterpret_cast<uint32_t>(pc_) & ~kImm28Mask;
    115 
    116     if (scope1 != scope2) {
    117       Assembler::JumpLabelToJumpRegister(pc_);
    118     }
    119   }
    120   if (IsInternalReference(rmode_)) {
    121     // Absolute code pointer inside code object moves with the code object.
    122     byte* p = reinterpret_cast<byte*>(pc_);
    123     int count = Assembler::RelocateInternalReference(p, delta);
    124     CPU::FlushICache(p, count * sizeof(uint32_t));
    125   }
    126 }
    127 
    128 
    129 Address RelocInfo::target_address() {
    130   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    131   return Assembler::target_address_at(pc_);
    132 }
    133 
    134 
    135 Address RelocInfo::target_address_address() {
    136   ASSERT(IsCodeTarget(rmode_) ||
    137          IsRuntimeEntry(rmode_) ||
    138          rmode_ == EMBEDDED_OBJECT ||
    139          rmode_ == EXTERNAL_REFERENCE);
    140   // Read the address of the word containing the target_address in an
    141   // instruction stream.
    142   // The only architecture-independent user of this function is the serializer.
    143   // The serializer uses it to find out how many raw bytes of instruction to
    144   // output before the next target.
    145   // For an instruction like LUI/ORI where the target bits are mixed into the
    146   // instruction bits, the size of the target will be zero, indicating that the
    147   // serializer should not step forward in memory after a target is resolved
    148   // and written. In this case the target_address_address function should
    149   // return the end of the instructions to be patched, allowing the
    150   // deserializer to deserialize the instructions as raw bytes and put them in
    151   // place, ready to be patched with the target. After jump optimization,
    152   // that is the address of the instruction that follows J/JAL/JR/JALR
    153   // instruction.
    154   return reinterpret_cast<Address>(
    155     pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
    156 }
    157 
    158 
    159 int RelocInfo::target_address_size() {
    160   return Assembler::kSpecialTargetSize;
    161 }
    162 
    163 
    164 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
    165   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    166   Assembler::set_target_address_at(pc_, target);
    167   if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
    168     Object* target_code = Code::GetCodeFromTargetAddress(target);
    169     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    170         host(), this, HeapObject::cast(target_code));
    171   }
    172 }
    173 
    174 
    175 Address Assembler::target_address_from_return_address(Address pc) {
    176   return pc - kCallTargetAddressOffset;
    177 }
    178 
    179 
    180 Object* RelocInfo::target_object() {
    181   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    182   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
    183 }
    184 
    185 
    186 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
    187   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    188   return Handle<Object>(reinterpret_cast<Object**>(
    189       Assembler::target_address_at(pc_)));
    190 }
    191 
    192 
    193 Object** RelocInfo::target_object_address() {
    194   // Provide a "natural pointer" to the embedded object,
    195   // which can be de-referenced during heap iteration.
    196   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    197   reconstructed_obj_ptr_ =
    198       reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
    199   return &reconstructed_obj_ptr_;
    200 }
    201 
    202 
    203 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
    204   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    205   ASSERT(!target->IsConsString());
    206   Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
    207   if (mode == UPDATE_WRITE_BARRIER &&
    208       host() != NULL &&
    209       target->IsHeapObject()) {
    210     host()->GetHeap()->incremental_marking()->RecordWrite(
    211         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
    212   }
    213 }
    214 
    215 
    216 Address* RelocInfo::target_reference_address() {
    217   ASSERT(rmode_ == EXTERNAL_REFERENCE);
    218   reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
    219   return &reconstructed_adr_ptr_;
    220 }
    221 
    222 
    223 Address RelocInfo::target_runtime_entry(Assembler* origin) {
    224   ASSERT(IsRuntimeEntry(rmode_));
    225   return target_address();
    226 }
    227 
    228 
    229 void RelocInfo::set_target_runtime_entry(Address target,
    230                                          WriteBarrierMode mode) {
    231   ASSERT(IsRuntimeEntry(rmode_));
    232   if (target_address() != target) set_target_address(target, mode);
    233 }
    234 
    235 
    236 Handle<Cell> RelocInfo::target_cell_handle() {
    237   ASSERT(rmode_ == RelocInfo::CELL);
    238   Address address = Memory::Address_at(pc_);
    239   return Handle<Cell>(reinterpret_cast<Cell**>(address));
    240 }
    241 
    242 
    243 Cell* RelocInfo::target_cell() {
    244   ASSERT(rmode_ == RelocInfo::CELL);
    245   return Cell::FromValueAddress(Memory::Address_at(pc_));
    246 }
    247 
    248 
    249 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
    250   ASSERT(rmode_ == RelocInfo::CELL);
    251   Address address = cell->address() + Cell::kValueOffset;
    252   Memory::Address_at(pc_) = address;
    253   if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
    254     // TODO(1550) We are passing NULL as a slot because cell can never be on
    255     // evacuation candidate.
    256     host()->GetHeap()->incremental_marking()->RecordWrite(
    257         host(), NULL, cell);
    258   }
    259 }
    260 
    261 
    262 static const int kNoCodeAgeSequenceLength = 7;
    263 
    264 Code* RelocInfo::code_age_stub() {
    265   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    266   return Code::GetCodeFromTargetAddress(
    267       Memory::Address_at(pc_ + Assembler::kInstrSize *
    268                          (kNoCodeAgeSequenceLength - 1)));
    269 }
    270 
    271 
    272 void RelocInfo::set_code_age_stub(Code* stub) {
    273   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    274   Memory::Address_at(pc_ + Assembler::kInstrSize *
    275                      (kNoCodeAgeSequenceLength - 1)) =
    276       stub->instruction_start();
    277 }
    278 
    279 
    280 Address RelocInfo::call_address() {
    281   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    282          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    283   // The pc_ offset of 0 assumes mips patched return sequence per
    284   // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
    285   // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
    286   return Assembler::target_address_at(pc_);
    287 }
    288 
    289 
    290 void RelocInfo::set_call_address(Address target) {
    291   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    292          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    293   // The pc_ offset of 0 assumes mips patched return sequence per
    294   // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
    295   // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
    296   Assembler::set_target_address_at(pc_, target);
    297   if (host() != NULL) {
    298     Object* target_code = Code::GetCodeFromTargetAddress(target);
    299     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    300         host(), this, HeapObject::cast(target_code));
    301   }
    302 }
    303 
    304 
    305 Object* RelocInfo::call_object() {
    306   return *call_object_address();
    307 }
    308 
    309 
    310 Object** RelocInfo::call_object_address() {
    311   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
    312          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
    313   return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
    314 }
    315 
    316 
    317 void RelocInfo::set_call_object(Object* target) {
    318   *call_object_address() = target;
    319 }
    320 
    321 
    322 bool RelocInfo::IsPatchedReturnSequence() {
    323   Instr instr0 = Assembler::instr_at(pc_);
    324   Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
    325   Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize);
    326   bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
    327                          (instr1 & kOpcodeMask) == ORI &&
    328                          ((instr2 & kOpcodeMask) == JAL ||
    329                           ((instr2 & kOpcodeMask) == SPECIAL &&
    330                            (instr2 & kFunctionFieldMask) == JALR)));
    331   return patched_return;
    332 }
    333 
    334 
    335 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
    336   Instr current_instr = Assembler::instr_at(pc_);
    337   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
    338 }
    339 
    340 
    341 void RelocInfo::Visit(ObjectVisitor* visitor) {
    342   RelocInfo::Mode mode = rmode();
    343   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    344     visitor->VisitEmbeddedPointer(this);
    345   } else if (RelocInfo::IsCodeTarget(mode)) {
    346     visitor->VisitCodeTarget(this);
    347   } else if (mode == RelocInfo::CELL) {
    348     visitor->VisitCell(this);
    349   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    350     visitor->VisitExternalReference(this);
    351   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    352     visitor->VisitCodeAgeSequence(this);
    353 #ifdef ENABLE_DEBUGGER_SUPPORT
    354   // TODO(isolates): Get a cached isolate below.
    355   } else if (((RelocInfo::IsJSReturn(mode) &&
    356               IsPatchedReturnSequence()) ||
    357              (RelocInfo::IsDebugBreakSlot(mode) &&
    358              IsPatchedDebugBreakSlotSequence())) &&
    359              Isolate::Current()->debug()->has_break_points()) {
    360     visitor->VisitDebugTarget(this);
    361 #endif
    362   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    363     visitor->VisitRuntimeEntry(this);
    364   }
    365 }
    366 
    367 
    368 template<typename StaticVisitor>
    369 void RelocInfo::Visit(Heap* heap) {
    370   RelocInfo::Mode mode = rmode();
    371   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    372     StaticVisitor::VisitEmbeddedPointer(heap, this);
    373   } else if (RelocInfo::IsCodeTarget(mode)) {
    374     StaticVisitor::VisitCodeTarget(heap, this);
    375   } else if (mode == RelocInfo::CELL) {
    376     StaticVisitor::VisitCell(heap, this);
    377   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    378     StaticVisitor::VisitExternalReference(this);
    379   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    380     StaticVisitor::VisitCodeAgeSequence(heap, this);
    381 #ifdef ENABLE_DEBUGGER_SUPPORT
    382   } else if (heap->isolate()->debug()->has_break_points() &&
    383              ((RelocInfo::IsJSReturn(mode) &&
    384               IsPatchedReturnSequence()) ||
    385              (RelocInfo::IsDebugBreakSlot(mode) &&
    386               IsPatchedDebugBreakSlotSequence()))) {
    387     StaticVisitor::VisitDebugTarget(heap, this);
    388 #endif
    389   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    390     StaticVisitor::VisitRuntimeEntry(this);
    391   }
    392 }
    393 
    394 
    395 // -----------------------------------------------------------------------------
    396 // Assembler.
    397 
    398 
    399 void Assembler::CheckBuffer() {
    400   if (buffer_space() <= kGap) {
    401     GrowBuffer();
    402   }
    403 }
    404 
    405 
    406 void Assembler::CheckTrampolinePoolQuick() {
    407   if (pc_offset() >= next_buffer_check_) {
    408     CheckTrampolinePool();
    409   }
    410 }
    411 
    412 
    413 void Assembler::emit(Instr x) {
    414   if (!is_buffer_growth_blocked()) {
    415     CheckBuffer();
    416   }
    417   *reinterpret_cast<Instr*>(pc_) = x;
    418   pc_ += kInstrSize;
    419   CheckTrampolinePoolQuick();
    420 }
    421 
    422 
    423 } }  // namespace v8::internal
    424 
    425 #endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_
    426