Home | History | Annotate | Download | only in mips
      1 
      2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
      3 // All Rights Reserved.
      4 //
      5 // Redistribution and use in source and binary forms, with or without
      6 // modification, are permitted provided that the following conditions are
      7 // met:
      8 //
      9 // - Redistributions of source code must retain the above copyright notice,
     10 // this list of conditions and the following disclaimer.
     11 //
     12 // - Redistribution in binary form must reproduce the above copyright
     13 // notice, this list of conditions and the following disclaimer in the
     14 // documentation and/or other materials provided with the distribution.
     15 //
     16 // - Neither the name of Sun Microsystems or the names of contributors may
     17 // be used to endorse or promote products derived from this software without
     18 // specific prior written permission.
     19 //
     20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
     21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
     22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
     24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
     25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
     27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
     28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
     29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
     30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31 
     32 // The original source code covered by the above license above has been
     33 // modified significantly by Google Inc.
     34 // Copyright 2012 the V8 project authors. All rights reserved.
     35 
     36 
     37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
     38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
     39 
     40 #include "src/mips/assembler-mips.h"
     41 
     42 #include "src/assembler.h"
     43 #include "src/debug/debug.h"
     44 
     45 
     46 namespace v8 {
     47 namespace internal {
     48 
     49 
     50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
     51 
     52 
     53 // -----------------------------------------------------------------------------
     54 // Operand and MemOperand.
     55 
     56 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
     57   rm_ = no_reg;
     58   imm32_ = immediate;
     59   rmode_ = rmode;
     60 }
     61 
     62 
     63 Operand::Operand(const ExternalReference& f)  {
     64   rm_ = no_reg;
     65   imm32_ = reinterpret_cast<int32_t>(f.address());
     66   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
     67 }
     68 
     69 
     70 Operand::Operand(Smi* value) {
     71   rm_ = no_reg;
     72   imm32_ =  reinterpret_cast<intptr_t>(value);
     73   rmode_ = RelocInfo::NONE32;
     74 }
     75 
     76 
     77 Operand::Operand(Register rm) {
     78   rm_ = rm;
     79 }
     80 
     81 
     82 bool Operand::is_reg() const {
     83   return rm_.is_valid();
     84 }
     85 
     86 
     87 // -----------------------------------------------------------------------------
     88 // RelocInfo.
     89 
     90 void RelocInfo::apply(intptr_t delta) {
     91   if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
     92     // Absolute code pointer inside code object moves with the code object.
     93     byte* p = reinterpret_cast<byte*>(pc_);
     94     int count = Assembler::RelocateInternalReference(rmode_, p, delta);
     95     Assembler::FlushICache(isolate_, p, count * sizeof(uint32_t));
     96   }
     97 }
     98 
     99 
    100 Address RelocInfo::target_address() {
    101   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    102   return Assembler::target_address_at(pc_, host_);
    103 }
    104 
    105 
    106 Address RelocInfo::target_address_address() {
    107   DCHECK(IsCodeTarget(rmode_) ||
    108          IsRuntimeEntry(rmode_) ||
    109          rmode_ == EMBEDDED_OBJECT ||
    110          rmode_ == EXTERNAL_REFERENCE);
    111   // Read the address of the word containing the target_address in an
    112   // instruction stream.
    113   // The only architecture-independent user of this function is the serializer.
    114   // The serializer uses it to find out how many raw bytes of instruction to
    115   // output before the next target.
    116   // For an instruction like LUI/ORI where the target bits are mixed into the
    117   // instruction bits, the size of the target will be zero, indicating that the
    118   // serializer should not step forward in memory after a target is resolved
    119   // and written. In this case the target_address_address function should
    120   // return the end of the instructions to be patched, allowing the
    121   // deserializer to deserialize the instructions as raw bytes and put them in
    122   // place, ready to be patched with the target. After jump optimization,
    123   // that is the address of the instruction that follows J/JAL/JR/JALR
    124   // instruction.
    125   return reinterpret_cast<Address>(
    126     pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
    127 }
    128 
    129 
    130 Address RelocInfo::constant_pool_entry_address() {
    131   UNREACHABLE();
    132   return NULL;
    133 }
    134 
    135 
    136 int RelocInfo::target_address_size() {
    137   return Assembler::kSpecialTargetSize;
    138 }
    139 
    140 
    141 void RelocInfo::set_target_address(Address target,
    142                                    WriteBarrierMode write_barrier_mode,
    143                                    ICacheFlushMode icache_flush_mode) {
    144   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    145   Assembler::set_target_address_at(isolate_, pc_, host_, target,
    146                                    icache_flush_mode);
    147   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
    148       host() != NULL && IsCodeTarget(rmode_)) {
    149     Object* target_code = Code::GetCodeFromTargetAddress(target);
    150     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    151         host(), this, HeapObject::cast(target_code));
    152   }
    153 }
    154 
    155 
    156 Address Assembler::target_address_from_return_address(Address pc) {
    157   return pc - kCallTargetAddressOffset;
    158 }
    159 
    160 
    161 void Assembler::set_target_internal_reference_encoded_at(Address pc,
    162                                                          Address target) {
    163   // Encoded internal references are lui/ori load of 32-bit abolute address.
    164   Instr instr_lui = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
    165   Instr instr_ori = Assembler::instr_at(pc + 1 * Assembler::kInstrSize);
    166   DCHECK(Assembler::IsLui(instr_lui));
    167   DCHECK(Assembler::IsOri(instr_ori));
    168   instr_lui &= ~kImm16Mask;
    169   instr_ori &= ~kImm16Mask;
    170   int32_t imm = reinterpret_cast<int32_t>(target);
    171   DCHECK((imm & 3) == 0);
    172   Assembler::instr_at_put(pc + 0 * Assembler::kInstrSize,
    173                           instr_lui | ((imm >> kLuiShift) & kImm16Mask));
    174   Assembler::instr_at_put(pc + 1 * Assembler::kInstrSize,
    175                           instr_ori | (imm & kImm16Mask));
    176 
    177   // Currently used only by deserializer, and all code will be flushed
    178   // after complete deserialization, no need to flush on each reference.
    179 }
    180 
    181 
    182 void Assembler::deserialization_set_target_internal_reference_at(
    183     Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
    184   if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
    185     DCHECK(IsLui(instr_at(pc)));
    186     set_target_internal_reference_encoded_at(pc, target);
    187   } else {
    188     DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
    189     Memory::Address_at(pc) = target;
    190   }
    191 }
    192 
    193 
    194 Object* RelocInfo::target_object() {
    195   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    196   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
    197 }
    198 
    199 
    200 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
    201   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    202   return Handle<Object>(reinterpret_cast<Object**>(
    203       Assembler::target_address_at(pc_, host_)));
    204 }
    205 
    206 
    207 void RelocInfo::set_target_object(Object* target,
    208                                   WriteBarrierMode write_barrier_mode,
    209                                   ICacheFlushMode icache_flush_mode) {
    210   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    211   Assembler::set_target_address_at(isolate_, pc_, host_,
    212                                    reinterpret_cast<Address>(target),
    213                                    icache_flush_mode);
    214   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
    215       host() != NULL &&
    216       target->IsHeapObject()) {
    217     host()->GetHeap()->incremental_marking()->RecordWrite(
    218         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
    219   }
    220 }
    221 
    222 
    223 Address RelocInfo::target_external_reference() {
    224   DCHECK(rmode_ == EXTERNAL_REFERENCE);
    225   return Assembler::target_address_at(pc_, host_);
    226 }
    227 
    228 
    229 Address RelocInfo::target_internal_reference() {
    230   if (rmode_ == INTERNAL_REFERENCE) {
    231     return Memory::Address_at(pc_);
    232   } else {
    233     // Encoded internal references are lui/ori load of 32-bit abolute address.
    234     DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
    235     Instr instr_lui = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
    236     Instr instr_ori = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
    237     DCHECK(Assembler::IsLui(instr_lui));
    238     DCHECK(Assembler::IsOri(instr_ori));
    239     int32_t imm = (instr_lui & static_cast<int32_t>(kImm16Mask)) << kLuiShift;
    240     imm |= (instr_ori & static_cast<int32_t>(kImm16Mask));
    241     return reinterpret_cast<Address>(imm);
    242   }
    243 }
    244 
    245 
    246 Address RelocInfo::target_internal_reference_address() {
    247   DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
    248   return reinterpret_cast<Address>(pc_);
    249 }
    250 
    251 
    252 Address RelocInfo::target_runtime_entry(Assembler* origin) {
    253   DCHECK(IsRuntimeEntry(rmode_));
    254   return target_address();
    255 }
    256 
    257 
    258 void RelocInfo::set_target_runtime_entry(Address target,
    259                                          WriteBarrierMode write_barrier_mode,
    260                                          ICacheFlushMode icache_flush_mode) {
    261   DCHECK(IsRuntimeEntry(rmode_));
    262   if (target_address() != target)
    263     set_target_address(target, write_barrier_mode, icache_flush_mode);
    264 }
    265 
    266 
    267 Handle<Cell> RelocInfo::target_cell_handle() {
    268   DCHECK(rmode_ == RelocInfo::CELL);
    269   Address address = Memory::Address_at(pc_);
    270   return Handle<Cell>(reinterpret_cast<Cell**>(address));
    271 }
    272 
    273 
    274 Cell* RelocInfo::target_cell() {
    275   DCHECK(rmode_ == RelocInfo::CELL);
    276   return Cell::FromValueAddress(Memory::Address_at(pc_));
    277 }
    278 
    279 
    280 void RelocInfo::set_target_cell(Cell* cell,
    281                                 WriteBarrierMode write_barrier_mode,
    282                                 ICacheFlushMode icache_flush_mode) {
    283   DCHECK(rmode_ == RelocInfo::CELL);
    284   Address address = cell->address() + Cell::kValueOffset;
    285   Memory::Address_at(pc_) = address;
    286   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
    287     // TODO(1550) We are passing NULL as a slot because cell can never be on
    288     // evacuation candidate.
    289     host()->GetHeap()->incremental_marking()->RecordWrite(
    290         host(), NULL, cell);
    291   }
    292 }
    293 
    294 
    295 static const int kNoCodeAgeSequenceLength = 7 * Assembler::kInstrSize;
    296 
    297 
    298 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
    299   UNREACHABLE();  // This should never be reached on Arm.
    300   return Handle<Object>();
    301 }
    302 
    303 
    304 Code* RelocInfo::code_age_stub() {
    305   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    306   return Code::GetCodeFromTargetAddress(
    307       Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
    308 }
    309 
    310 
    311 void RelocInfo::set_code_age_stub(Code* stub,
    312                                   ICacheFlushMode icache_flush_mode) {
    313   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    314   Assembler::set_target_address_at(isolate_, pc_ + Assembler::kInstrSize, host_,
    315                                    stub->instruction_start());
    316 }
    317 
    318 
    319 Address RelocInfo::debug_call_address() {
    320   // The pc_ offset of 0 assumes patched debug break slot or return
    321   // sequence.
    322   DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
    323   return Assembler::target_address_at(pc_, host_);
    324 }
    325 
    326 
    327 void RelocInfo::set_debug_call_address(Address target) {
    328   DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
    329   // The pc_ offset of 0 assumes patched debug break slot or return
    330   // sequence.
    331   Assembler::set_target_address_at(isolate_, pc_, host_, target);
    332   if (host() != NULL) {
    333     Object* target_code = Code::GetCodeFromTargetAddress(target);
    334     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    335         host(), this, HeapObject::cast(target_code));
    336   }
    337 }
    338 
    339 
    340 void RelocInfo::WipeOut() {
    341   DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
    342          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
    343          IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
    344   if (IsInternalReference(rmode_)) {
    345     Memory::Address_at(pc_) = NULL;
    346   } else if (IsInternalReferenceEncoded(rmode_)) {
    347     Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
    348   } else {
    349     Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
    350   }
    351 }
    352 
    353 
    354 bool RelocInfo::IsPatchedReturnSequence() {
    355   Instr instr0 = Assembler::instr_at(pc_);
    356   Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
    357   Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize);
    358   bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
    359                          (instr1 & kOpcodeMask) == ORI &&
    360                          ((instr2 & kOpcodeMask) == JAL ||
    361                           ((instr2 & kOpcodeMask) == SPECIAL &&
    362                            (instr2 & kFunctionFieldMask) == JALR)));
    363   return patched_return;
    364 }
    365 
    366 
    367 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
    368   Instr current_instr = Assembler::instr_at(pc_);
    369   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
    370 }
    371 
    372 
    373 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
    374   RelocInfo::Mode mode = rmode();
    375   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    376     visitor->VisitEmbeddedPointer(this);
    377   } else if (RelocInfo::IsCodeTarget(mode)) {
    378     visitor->VisitCodeTarget(this);
    379   } else if (mode == RelocInfo::CELL) {
    380     visitor->VisitCell(this);
    381   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    382     visitor->VisitExternalReference(this);
    383   } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
    384              mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
    385     visitor->VisitInternalReference(this);
    386   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    387     visitor->VisitCodeAgeSequence(this);
    388   } else if (RelocInfo::IsDebugBreakSlot(mode) &&
    389              IsPatchedDebugBreakSlotSequence()) {
    390     visitor->VisitDebugTarget(this);
    391   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    392     visitor->VisitRuntimeEntry(this);
    393   }
    394 }
    395 
    396 
    397 template<typename StaticVisitor>
    398 void RelocInfo::Visit(Heap* heap) {
    399   RelocInfo::Mode mode = rmode();
    400   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    401     StaticVisitor::VisitEmbeddedPointer(heap, this);
    402   } else if (RelocInfo::IsCodeTarget(mode)) {
    403     StaticVisitor::VisitCodeTarget(heap, this);
    404   } else if (mode == RelocInfo::CELL) {
    405     StaticVisitor::VisitCell(heap, this);
    406   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    407     StaticVisitor::VisitExternalReference(this);
    408   } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
    409              mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
    410     StaticVisitor::VisitInternalReference(this);
    411   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    412     StaticVisitor::VisitCodeAgeSequence(heap, this);
    413   } else if (RelocInfo::IsDebugBreakSlot(mode) &&
    414              IsPatchedDebugBreakSlotSequence()) {
    415     StaticVisitor::VisitDebugTarget(heap, this);
    416   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    417     StaticVisitor::VisitRuntimeEntry(this);
    418   }
    419 }
    420 
    421 
    422 // -----------------------------------------------------------------------------
    423 // Assembler.
    424 
    425 
    426 void Assembler::CheckBuffer() {
    427   if (buffer_space() <= kGap) {
    428     GrowBuffer();
    429   }
    430 }
    431 
    432 
    433 void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
    434   if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
    435     CheckTrampolinePool();
    436   }
    437 }
    438 
    439 
    440 void Assembler::CheckForEmitInForbiddenSlot() {
    441   if (!is_buffer_growth_blocked()) {
    442     CheckBuffer();
    443   }
    444   if (IsPrevInstrCompactBranch()) {
    445     // Nop instruction to preceed a CTI in forbidden slot:
    446     Instr nop = SPECIAL | SLL;
    447     *reinterpret_cast<Instr*>(pc_) = nop;
    448     pc_ += kInstrSize;
    449 
    450     ClearCompactBranchState();
    451   }
    452 }
    453 
    454 
    455 void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
    456   if (IsPrevInstrCompactBranch()) {
    457     if (Instruction::IsForbiddenAfterBranchInstr(x)) {
    458       // Nop instruction to preceed a CTI in forbidden slot:
    459       Instr nop = SPECIAL | SLL;
    460       *reinterpret_cast<Instr*>(pc_) = nop;
    461       pc_ += kInstrSize;
    462     }
    463     ClearCompactBranchState();
    464   }
    465   *reinterpret_cast<Instr*>(pc_) = x;
    466   pc_ += kInstrSize;
    467   if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
    468     EmittedCompactBranchInstruction();
    469   }
    470   CheckTrampolinePoolQuick();
    471 }
    472 
    473 
    474 template <typename T>
    475 void Assembler::EmitHelper(T x) {
    476   *reinterpret_cast<T*>(pc_) = x;
    477   pc_ += sizeof(x);
    478   CheckTrampolinePoolQuick();
    479 }
    480 
    481 
    482 void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
    483   if (!is_buffer_growth_blocked()) {
    484     CheckBuffer();
    485   }
    486   EmitHelper(x, is_compact_branch);
    487 }
    488 
    489 
    490 }  // namespace internal
    491 }  // namespace v8
    492 
    493 #endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_
    494