Home | History | Annotate | Download | only in arm
      1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
      2 // All Rights Reserved.
      3 //
      4 // Redistribution and use in source and binary forms, with or without
      5 // modification, are permitted provided that the following conditions
      6 // are met:
      7 //
      8 // - Redistributions of source code must retain the above copyright notice,
      9 // this list of conditions and the following disclaimer.
     10 //
     11 // - Redistribution in binary form must reproduce the above copyright
     12 // notice, this list of conditions and the following disclaimer in the
     13 // documentation and/or other materials provided with the
     14 // distribution.
     15 //
     16 // - Neither the name of Sun Microsystems or the names of contributors may
     17 // be used to endorse or promote products derived from this software without
     18 // specific prior written permission.
     19 //
     20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
     23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
     24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
     25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
     27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
     29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
     31 // OF THE POSSIBILITY OF SUCH DAMAGE.
     32 
     33 // The original source code covered by the above license above has been modified
     34 // significantly by Google Inc.
     35 // Copyright 2012 the V8 project authors. All rights reserved.
     36 
     37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
     38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
     39 
     40 #include "src/arm/assembler-arm.h"
     41 
     42 #include "src/assembler.h"
     43 #include "src/debug/debug.h"
     44 
     45 
     46 namespace v8 {
     47 namespace internal {
     48 
     49 
     50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
     51 
     52 
     53 int DoubleRegister::NumRegisters() {
     54   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
     55 }
     56 
     57 
     58 void RelocInfo::apply(intptr_t delta) {
     59   if (RelocInfo::IsInternalReference(rmode_)) {
     60     // absolute code pointer inside code object moves with the code object.
     61     int32_t* p = reinterpret_cast<int32_t*>(pc_);
     62     *p += delta;  // relocate entry
     63   }
     64   // We do not use pc relative addressing on ARM, so there is
     65   // nothing else to do.
     66 }
     67 
     68 
     69 Address RelocInfo::target_address() {
     70   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
     71   return Assembler::target_address_at(pc_, host_);
     72 }
     73 
     74 
     75 Address RelocInfo::target_address_address() {
     76   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
     77                               || rmode_ == EMBEDDED_OBJECT
     78                               || rmode_ == EXTERNAL_REFERENCE);
     79   if (FLAG_enable_embedded_constant_pool ||
     80       Assembler::IsMovW(Memory::int32_at(pc_))) {
     81     // We return the PC for embedded constant pool since this function is used
     82     // by the serializer and expects the address to reside within the code
     83     // object.
     84     return reinterpret_cast<Address>(pc_);
     85   } else {
     86     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
     87     return constant_pool_entry_address();
     88   }
     89 }
     90 
     91 
     92 Address RelocInfo::constant_pool_entry_address() {
     93   DCHECK(IsInConstantPool());
     94   return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
     95 }
     96 
     97 
     98 int RelocInfo::target_address_size() {
     99   return kPointerSize;
    100 }
    101 
    102 
    103 void RelocInfo::set_target_address(Address target,
    104                                    WriteBarrierMode write_barrier_mode,
    105                                    ICacheFlushMode icache_flush_mode) {
    106   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
    107   Assembler::set_target_address_at(isolate_, pc_, host_, target,
    108                                    icache_flush_mode);
    109   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
    110       host() != NULL && IsCodeTarget(rmode_)) {
    111     Object* target_code = Code::GetCodeFromTargetAddress(target);
    112     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    113         host(), this, HeapObject::cast(target_code));
    114   }
    115 }
    116 
    117 
    118 Object* RelocInfo::target_object() {
    119   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    120   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
    121 }
    122 
    123 
    124 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
    125   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    126   return Handle<Object>(reinterpret_cast<Object**>(
    127       Assembler::target_address_at(pc_, host_)));
    128 }
    129 
    130 
    131 void RelocInfo::set_target_object(Object* target,
    132                                   WriteBarrierMode write_barrier_mode,
    133                                   ICacheFlushMode icache_flush_mode) {
    134   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
    135   Assembler::set_target_address_at(isolate_, pc_, host_,
    136                                    reinterpret_cast<Address>(target),
    137                                    icache_flush_mode);
    138   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
    139       host() != NULL &&
    140       target->IsHeapObject()) {
    141     host()->GetHeap()->incremental_marking()->RecordWrite(
    142         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
    143   }
    144 }
    145 
    146 
    147 Address RelocInfo::target_external_reference() {
    148   DCHECK(rmode_ == EXTERNAL_REFERENCE);
    149   return Assembler::target_address_at(pc_, host_);
    150 }
    151 
    152 
    153 Address RelocInfo::target_internal_reference() {
    154   DCHECK(rmode_ == INTERNAL_REFERENCE);
    155   return Memory::Address_at(pc_);
    156 }
    157 
    158 
    159 Address RelocInfo::target_internal_reference_address() {
    160   DCHECK(rmode_ == INTERNAL_REFERENCE);
    161   return reinterpret_cast<Address>(pc_);
    162 }
    163 
    164 
    165 Address RelocInfo::target_runtime_entry(Assembler* origin) {
    166   DCHECK(IsRuntimeEntry(rmode_));
    167   return target_address();
    168 }
    169 
    170 
    171 void RelocInfo::set_target_runtime_entry(Address target,
    172                                          WriteBarrierMode write_barrier_mode,
    173                                          ICacheFlushMode icache_flush_mode) {
    174   DCHECK(IsRuntimeEntry(rmode_));
    175   if (target_address() != target)
    176     set_target_address(target, write_barrier_mode, icache_flush_mode);
    177 }
    178 
    179 
    180 Handle<Cell> RelocInfo::target_cell_handle() {
    181   DCHECK(rmode_ == RelocInfo::CELL);
    182   Address address = Memory::Address_at(pc_);
    183   return Handle<Cell>(reinterpret_cast<Cell**>(address));
    184 }
    185 
    186 
    187 Cell* RelocInfo::target_cell() {
    188   DCHECK(rmode_ == RelocInfo::CELL);
    189   return Cell::FromValueAddress(Memory::Address_at(pc_));
    190 }
    191 
    192 
    193 void RelocInfo::set_target_cell(Cell* cell,
    194                                 WriteBarrierMode write_barrier_mode,
    195                                 ICacheFlushMode icache_flush_mode) {
    196   DCHECK(rmode_ == RelocInfo::CELL);
    197   Address address = cell->address() + Cell::kValueOffset;
    198   Memory::Address_at(pc_) = address;
    199   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
    200     // TODO(1550) We are passing NULL as a slot because cell can never be on
    201     // evacuation candidate.
    202     host()->GetHeap()->incremental_marking()->RecordWrite(
    203         host(), NULL, cell);
    204   }
    205 }
    206 
    207 
    208 static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
    209 
    210 
    211 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
    212   UNREACHABLE();  // This should never be reached on Arm.
    213   return Handle<Object>();
    214 }
    215 
    216 
    217 Code* RelocInfo::code_age_stub() {
    218   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    219   return Code::GetCodeFromTargetAddress(
    220       Memory::Address_at(pc_ +
    221                          (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
    222 }
    223 
    224 
    225 void RelocInfo::set_code_age_stub(Code* stub,
    226                                   ICacheFlushMode icache_flush_mode) {
    227   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
    228   Memory::Address_at(pc_ +
    229                      (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
    230       stub->instruction_start();
    231 }
    232 
    233 
    234 Address RelocInfo::debug_call_address() {
    235   // The 2 instructions offset assumes patched debug break slot or return
    236   // sequence.
    237   DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
    238   return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
    239 }
    240 
    241 
    242 void RelocInfo::set_debug_call_address(Address target) {
    243   DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
    244   Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
    245       target;
    246   if (host() != NULL) {
    247     Object* target_code = Code::GetCodeFromTargetAddress(target);
    248     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
    249         host(), this, HeapObject::cast(target_code));
    250   }
    251 }
    252 
    253 
    254 void RelocInfo::WipeOut() {
    255   DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
    256          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
    257          IsInternalReference(rmode_));
    258   if (IsInternalReference(rmode_)) {
    259     Memory::Address_at(pc_) = NULL;
    260   } else {
    261     Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
    262   }
    263 }
    264 
    265 
    266 bool RelocInfo::IsPatchedReturnSequence() {
    267   Instr current_instr = Assembler::instr_at(pc_);
    268   Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
    269   // A patched return sequence is:
    270   //  ldr ip, [pc, #0]
    271   //  blx ip
    272   return Assembler::IsLdrPcImmediateOffset(current_instr) &&
    273          Assembler::IsBlxReg(next_instr);
    274 }
    275 
    276 
    277 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
    278   Instr current_instr = Assembler::instr_at(pc_);
    279   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
    280 }
    281 
    282 
    283 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
    284   RelocInfo::Mode mode = rmode();
    285   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    286     visitor->VisitEmbeddedPointer(this);
    287   } else if (RelocInfo::IsCodeTarget(mode)) {
    288     visitor->VisitCodeTarget(this);
    289   } else if (mode == RelocInfo::CELL) {
    290     visitor->VisitCell(this);
    291   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    292     visitor->VisitExternalReference(this);
    293   } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
    294     visitor->VisitInternalReference(this);
    295   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    296     visitor->VisitCodeAgeSequence(this);
    297   } else if (RelocInfo::IsDebugBreakSlot(mode) &&
    298              IsPatchedDebugBreakSlotSequence()) {
    299     visitor->VisitDebugTarget(this);
    300   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    301     visitor->VisitRuntimeEntry(this);
    302   }
    303 }
    304 
    305 
    306 template<typename StaticVisitor>
    307 void RelocInfo::Visit(Heap* heap) {
    308   RelocInfo::Mode mode = rmode();
    309   if (mode == RelocInfo::EMBEDDED_OBJECT) {
    310     StaticVisitor::VisitEmbeddedPointer(heap, this);
    311   } else if (RelocInfo::IsCodeTarget(mode)) {
    312     StaticVisitor::VisitCodeTarget(heap, this);
    313   } else if (mode == RelocInfo::CELL) {
    314     StaticVisitor::VisitCell(heap, this);
    315   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    316     StaticVisitor::VisitExternalReference(this);
    317   } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
    318     StaticVisitor::VisitInternalReference(this);
    319   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    320     StaticVisitor::VisitCodeAgeSequence(heap, this);
    321   } else if (RelocInfo::IsDebugBreakSlot(mode) &&
    322              IsPatchedDebugBreakSlotSequence()) {
    323     StaticVisitor::VisitDebugTarget(heap, this);
    324   } else if (RelocInfo::IsRuntimeEntry(mode)) {
    325     StaticVisitor::VisitRuntimeEntry(this);
    326   }
    327 }
    328 
    329 
    330 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
    331   rm_ = no_reg;
    332   imm32_ = immediate;
    333   rmode_ = rmode;
    334 }
    335 
    336 
    337 Operand::Operand(const ExternalReference& f)  {
    338   rm_ = no_reg;
    339   imm32_ = reinterpret_cast<int32_t>(f.address());
    340   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
    341 }
    342 
    343 
    344 Operand::Operand(Smi* value) {
    345   rm_ = no_reg;
    346   imm32_ =  reinterpret_cast<intptr_t>(value);
    347   rmode_ = RelocInfo::NONE32;
    348 }
    349 
    350 
    351 Operand::Operand(Register rm) {
    352   rm_ = rm;
    353   rs_ = no_reg;
    354   shift_op_ = LSL;
    355   shift_imm_ = 0;
    356 }
    357 
    358 
    359 bool Operand::is_reg() const {
    360   return rm_.is_valid() &&
    361          rs_.is(no_reg) &&
    362          shift_op_ == LSL &&
    363          shift_imm_ == 0;
    364 }
    365 
    366 
    367 void Assembler::CheckBuffer() {
    368   if (buffer_space() <= kGap) {
    369     GrowBuffer();
    370   }
    371   MaybeCheckConstPool();
    372 }
    373 
    374 
    375 void Assembler::emit(Instr x) {
    376   CheckBuffer();
    377   *reinterpret_cast<Instr*>(pc_) = x;
    378   pc_ += kInstrSize;
    379 }
    380 
    381 
    382 Address Assembler::target_address_from_return_address(Address pc) {
    383   // Returns the address of the call target from the return address that will
    384   // be returned to after a call.
    385   // Call sequence on V7 or later is:
    386   //  movw  ip, #... @ call address low 16
    387   //  movt  ip, #... @ call address high 16
    388   //  blx   ip
    389   //                      @ return address
    390   // For V6 when the constant pool is unavailable, it is:
    391   //  mov  ip, #...     @ call address low 8
    392   //  orr  ip, ip, #... @ call address 2nd 8
    393   //  orr  ip, ip, #... @ call address 3rd 8
    394   //  orr  ip, ip, #... @ call address high 8
    395   //  blx   ip
    396   //                      @ return address
    397   // In cases that need frequent patching, the address is in the
    398   // constant pool.  It could be a small constant pool load:
    399   //  ldr   ip, [pc / pp, #...] @ call address
    400   //  blx   ip
    401   //                      @ return address
    402   // Or an extended constant pool load (ARMv7):
    403   //  movw  ip, #...
    404   //  movt  ip, #...
    405   //  ldr   ip, [pc, ip]  @ call address
    406   //  blx   ip
    407   //                      @ return address
    408   // Or an extended constant pool load (ARMv6):
    409   //  mov  ip, #...
    410   //  orr  ip, ip, #...
    411   //  orr  ip, ip, #...
    412   //  orr  ip, ip, #...
    413   //  ldr   ip, [pc, ip]  @ call address
    414   //  blx   ip
    415   //                      @ return address
    416   Address candidate = pc - 2 * Assembler::kInstrSize;
    417   Instr candidate_instr(Memory::int32_at(candidate));
    418   if (IsLdrPcImmediateOffset(candidate_instr) |
    419       IsLdrPpImmediateOffset(candidate_instr)) {
    420     return candidate;
    421   } else {
    422     if (IsLdrPpRegOffset(candidate_instr)) {
    423       candidate -= Assembler::kInstrSize;
    424     }
    425     if (CpuFeatures::IsSupported(ARMv7)) {
    426       candidate -= 1 * Assembler::kInstrSize;
    427       DCHECK(IsMovW(Memory::int32_at(candidate)) &&
    428              IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
    429     } else {
    430       candidate -= 3 * Assembler::kInstrSize;
    431       DCHECK(
    432           IsMovImmed(Memory::int32_at(candidate)) &&
    433           IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
    434           IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
    435           IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
    436     }
    437     return candidate;
    438   }
    439 }
    440 
    441 
    442 Address Assembler::return_address_from_call_start(Address pc) {
    443   if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
    444       IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
    445     // Load from constant pool, small section.
    446     return pc + kInstrSize * 2;
    447   } else {
    448     if (CpuFeatures::IsSupported(ARMv7)) {
    449       DCHECK(IsMovW(Memory::int32_at(pc)));
    450       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
    451       if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
    452         // Load from constant pool, extended section.
    453         return pc + kInstrSize * 4;
    454       } else {
    455         // A movw / movt load immediate.
    456         return pc + kInstrSize * 3;
    457       }
    458     } else {
    459       DCHECK(IsMovImmed(Memory::int32_at(pc)));
    460       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
    461       DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
    462       DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
    463       if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
    464         // Load from constant pool, extended section.
    465         return pc + kInstrSize * 6;
    466       } else {
    467         // A mov / orr load immediate.
    468         return pc + kInstrSize * 5;
    469       }
    470     }
    471   }
    472 }
    473 
    474 
    475 void Assembler::deserialization_set_special_target_at(
    476     Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
    477   if (FLAG_enable_embedded_constant_pool) {
    478     set_target_address_at(isolate, constant_pool_entry, code, target);
    479   } else {
    480     Memory::Address_at(constant_pool_entry) = target;
    481   }
    482 }
    483 
    484 
    485 void Assembler::deserialization_set_target_internal_reference_at(
    486     Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
    487   Memory::Address_at(pc) = target;
    488 }
    489 
    490 
    491 bool Assembler::is_constant_pool_load(Address pc) {
    492   if (CpuFeatures::IsSupported(ARMv7)) {
    493     return !Assembler::IsMovW(Memory::int32_at(pc)) ||
    494            (FLAG_enable_embedded_constant_pool &&
    495             Assembler::IsLdrPpRegOffset(
    496                 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
    497   } else {
    498     return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
    499            (FLAG_enable_embedded_constant_pool &&
    500             Assembler::IsLdrPpRegOffset(
    501                 Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
    502   }
    503 }
    504 
    505 
    506 Address Assembler::constant_pool_entry_address(Address pc,
    507                                                Address constant_pool) {
    508   if (FLAG_enable_embedded_constant_pool) {
    509     DCHECK(constant_pool != NULL);
    510     int cp_offset;
    511     if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
    512       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
    513              IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
    514              IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
    515              IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
    516       // This is an extended constant pool lookup (ARMv6).
    517       Instr mov_instr = instr_at(pc);
    518       Instr orr_instr_1 = instr_at(pc + kInstrSize);
    519       Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
    520       Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
    521       cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
    522                   DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
    523     } else if (IsMovW(Memory::int32_at(pc))) {
    524       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
    525              IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
    526       // This is an extended constant pool lookup (ARMv7).
    527       Instruction* movw_instr = Instruction::At(pc);
    528       Instruction* movt_instr = Instruction::At(pc + kInstrSize);
    529       cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
    530                   movw_instr->ImmedMovwMovtValue();
    531     } else {
    532       // This is a small constant pool lookup.
    533       DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
    534       cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
    535     }
    536     return constant_pool + cp_offset;
    537   } else {
    538     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
    539     Instr instr = Memory::int32_at(pc);
    540     return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
    541   }
    542 }
    543 
    544 
    545 Address Assembler::target_address_at(Address pc, Address constant_pool) {
    546   if (is_constant_pool_load(pc)) {
    547     // This is a constant pool lookup. Return the value in the constant pool.
    548     return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
    549   } else if (CpuFeatures::IsSupported(ARMv7)) {
    550     // This is an movw / movt immediate load. Return the immediate.
    551     DCHECK(IsMovW(Memory::int32_at(pc)) &&
    552            IsMovT(Memory::int32_at(pc + kInstrSize)));
    553     Instruction* movw_instr = Instruction::At(pc);
    554     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
    555     return reinterpret_cast<Address>(
    556         (movt_instr->ImmedMovwMovtValue() << 16) |
    557          movw_instr->ImmedMovwMovtValue());
    558   } else {
    559     // This is an mov / orr immediate load. Return the immediate.
    560     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
    561            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
    562            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
    563            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
    564     Instr mov_instr = instr_at(pc);
    565     Instr orr_instr_1 = instr_at(pc + kInstrSize);
    566     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
    567     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
    568     Address ret = reinterpret_cast<Address>(
    569         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
    570         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
    571     return ret;
    572   }
    573 }
    574 
    575 
    576 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
    577                                       Address constant_pool, Address target,
    578                                       ICacheFlushMode icache_flush_mode) {
    579   if (is_constant_pool_load(pc)) {
    580     // This is a constant pool lookup. Update the entry in the constant pool.
    581     Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
    582     // Intuitively, we would think it is necessary to always flush the
    583     // instruction cache after patching a target address in the code as follows:
    584     //   Assembler::FlushICache(isolate, pc, sizeof(target));
    585     // However, on ARM, no instruction is actually patched in the case
    586     // of embedded constants of the form:
    587     // ldr   ip, [pp, #...]
    588     // since the instruction accessing this address in the constant pool remains
    589     // unchanged.
    590   } else if (CpuFeatures::IsSupported(ARMv7)) {
    591     // This is an movw / movt immediate load. Patch the immediate embedded in
    592     // the instructions.
    593     DCHECK(IsMovW(Memory::int32_at(pc)));
    594     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
    595     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
    596     uint32_t immediate = reinterpret_cast<uint32_t>(target);
    597     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
    598     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
    599     DCHECK(IsMovW(Memory::int32_at(pc)));
    600     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
    601     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
    602       Assembler::FlushICache(isolate, pc, 2 * kInstrSize);
    603     }
    604   } else {
    605     // This is an mov / orr immediate load. Patch the immediate embedded in
    606     // the instructions.
    607     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
    608            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
    609            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
    610            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
    611     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
    612     uint32_t immediate = reinterpret_cast<uint32_t>(target);
    613     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
    614     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
    615     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
    616     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
    617     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
    618            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
    619            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
    620            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
    621     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
    622       Assembler::FlushICache(isolate, pc, 4 * kInstrSize);
    623     }
    624   }
    625 }
    626 
    627 
    628 }  // namespace internal
    629 }  // namespace v8
    630 
    631 #endif  // V8_ARM_ASSEMBLER_ARM_INL_H_
    632