Home | History | Annotate | Download | only in arm
      1 /*
      2  * Copyright (C) 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <iostream>
     18 #include <type_traits>
     19 
     20 #include "jni_macro_assembler_arm_vixl.h"
     21 #include "entrypoints/quick/quick_entrypoints.h"
     22 #include "thread.h"
     23 
     24 using namespace vixl::aarch32;  // NOLINT(build/namespaces)
     25 namespace vixl32 = vixl::aarch32;
     26 
     27 using vixl::ExactAssemblyScope;
     28 using vixl::CodeBufferCheckScope;
     29 
     30 namespace art {
     31 namespace arm {
     32 
     33 #ifdef ___
     34 #error "ARM Assembler macro already defined."
     35 #else
     36 #define ___   asm_.GetVIXLAssembler()->
     37 #endif
     38 
     39 void ArmVIXLJNIMacroAssembler::FinalizeCode() {
     40   for (const std::unique_ptr<
     41       ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
     42     EmitExceptionPoll(exception.get());
     43   }
     44   asm_.FinalizeCode();
     45 }
     46 
     47 static dwarf::Reg DWARFReg(vixl32::Register reg) {
     48   return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode()));
     49 }
     50 
     51 static dwarf::Reg DWARFReg(vixl32::SRegister reg) {
     52   return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode()));
     53 }
     54 
     55 static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
     56 
     57 void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
     58                                           ManagedRegister method_reg,
     59                                           ArrayRef<const ManagedRegister> callee_save_regs,
     60                                           const ManagedRegisterEntrySpills& entry_spills) {
     61   CHECK_ALIGNED(frame_size, kStackAlignment);
     62   CHECK(r0.Is(method_reg.AsArm().AsVIXLRegister()));
     63 
     64   // Push callee saves and link register.
     65   RegList core_spill_mask = 1 << LR;
     66   uint32_t fp_spill_mask = 0;
     67   for (const ManagedRegister& reg : callee_save_regs) {
     68     if (reg.AsArm().IsCoreRegister()) {
     69       core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
     70     } else {
     71       fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
     72     }
     73   }
     74   ___ Push(RegisterList(core_spill_mask));
     75   cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
     76   cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
     77   if (fp_spill_mask != 0) {
     78     uint32_t first = CTZ(fp_spill_mask);
     79 
     80     // Check that list is contiguous.
     81     DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
     82 
     83     ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
     84     cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
     85     cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
     86   }
     87 
     88   // Increase frame to required size.
     89   int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
     90   // Must at least have space for Method*.
     91   CHECK_GT(frame_size, pushed_values * kFramePointerSize);
     92   IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize);  // handles CFI as well.
     93 
     94   // Write out Method*.
     95   asm_.StoreToOffset(kStoreWord, r0, sp, 0);
     96 
     97   // Write out entry spills.
     98   int32_t offset = frame_size + kFramePointerSize;
     99   for (size_t i = 0; i < entry_spills.size(); ++i) {
    100     ArmManagedRegister reg = entry_spills.at(i).AsArm();
    101     if (reg.IsNoRegister()) {
    102       // only increment stack offset.
    103       ManagedRegisterSpill spill = entry_spills.at(i);
    104       offset += spill.getSize();
    105     } else if (reg.IsCoreRegister()) {
    106       asm_.StoreToOffset(kStoreWord, reg.AsVIXLRegister(), sp, offset);
    107       offset += 4;
    108     } else if (reg.IsSRegister()) {
    109       asm_.StoreSToOffset(reg.AsVIXLSRegister(), sp, offset);
    110       offset += 4;
    111     } else if (reg.IsDRegister()) {
    112       asm_.StoreDToOffset(reg.AsVIXLDRegister(), sp, offset);
    113       offset += 8;
    114     }
    115   }
    116 }
    117 
    118 void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
    119                                            ArrayRef<const ManagedRegister> callee_save_regs) {
    120   CHECK_ALIGNED(frame_size, kStackAlignment);
    121   cfi().RememberState();
    122 
    123   // Compute callee saves to pop and LR.
    124   RegList core_spill_mask = 1 << LR;
    125   uint32_t fp_spill_mask = 0;
    126   for (const ManagedRegister& reg : callee_save_regs) {
    127     if (reg.AsArm().IsCoreRegister()) {
    128       core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
    129     } else {
    130       fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
    131     }
    132   }
    133 
    134   // Decrease frame to start of callee saves.
    135   int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
    136   CHECK_GT(frame_size, pop_values * kFramePointerSize);
    137   DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize));  // handles CFI as well.
    138 
    139   // Pop FP callee saves.
    140   if (fp_spill_mask != 0) {
    141     uint32_t first = CTZ(fp_spill_mask);
    142     // Check that list is contiguous.
    143      DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
    144 
    145     ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
    146     cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
    147     cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
    148   }
    149 
    150   // Pop core callee saves and LR.
    151   ___ Pop(RegisterList(core_spill_mask));
    152 
    153   if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
    154     // Refresh Mark Register.
    155     // TODO: Refresh MR only if suspend is taken.
    156     ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value()));
    157   }
    158 
    159   // Return to LR.
    160   ___ Bx(vixl32::lr);
    161 
    162   // The CFI should be restored for any code that follows the exit block.
    163   cfi().RestoreState();
    164   cfi().DefCFAOffset(frame_size);
    165 }
    166 
    167 
    168 void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
    169   asm_.AddConstant(sp, -adjust);
    170   cfi().AdjustCFAOffset(adjust);
    171 }
    172 
    173 void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
    174   asm_.AddConstant(sp, adjust);
    175   cfi().AdjustCFAOffset(-adjust);
    176 }
    177 
    178 void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
    179   ArmManagedRegister src = m_src.AsArm();
    180   if (src.IsNoRegister()) {
    181     CHECK_EQ(0u, size);
    182   } else if (src.IsCoreRegister()) {
    183     CHECK_EQ(4u, size);
    184     UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    185     temps.Exclude(src.AsVIXLRegister());
    186     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
    187   } else if (src.IsRegisterPair()) {
    188     CHECK_EQ(8u, size);
    189     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairLow(),  sp, dest.Int32Value());
    190     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairHigh(), sp, dest.Int32Value() + 4);
    191   } else if (src.IsSRegister()) {
    192     CHECK_EQ(4u, size);
    193     asm_.StoreSToOffset(src.AsVIXLSRegister(), sp, dest.Int32Value());
    194   } else {
    195     CHECK_EQ(8u, size);
    196     CHECK(src.IsDRegister()) << src;
    197     asm_.StoreDToOffset(src.AsVIXLDRegister(), sp, dest.Int32Value());
    198   }
    199 }
    200 
    201 void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
    202   ArmManagedRegister src = msrc.AsArm();
    203   CHECK(src.IsCoreRegister()) << src;
    204   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    205   temps.Exclude(src.AsVIXLRegister());
    206   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
    207 }
    208 
    209 void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
    210   ArmManagedRegister src = msrc.AsArm();
    211   CHECK(src.IsCoreRegister()) << src;
    212   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    213   temps.Exclude(src.AsVIXLRegister());
    214   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
    215 }
    216 
    217 void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
    218                                              ManagedRegister msrc,
    219                                              FrameOffset in_off,
    220                                              ManagedRegister mscratch) {
    221   ArmManagedRegister src = msrc.AsArm();
    222   ArmManagedRegister scratch = mscratch.AsArm();
    223   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
    224   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    225   temps.Exclude(scratch.AsVIXLRegister());
    226   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, in_off.Int32Value());
    227   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value() + 4);
    228 }
    229 
    230 void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
    231                                        FrameOffset src,
    232                                        ManagedRegister mscratch) {
    233   ArmManagedRegister scratch = mscratch.AsArm();
    234   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    235   temps.Exclude(scratch.AsVIXLRegister());
    236   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, src.Int32Value());
    237   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value());
    238 }
    239 
    240 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest,
    241                                        ManagedRegister base,
    242                                        MemberOffset offs,
    243                                        bool unpoison_reference) {
    244   ArmManagedRegister dst = dest.AsArm();
    245   CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
    246   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    247   temps.Exclude(dst.AsVIXLRegister(), base.AsArm().AsVIXLRegister());
    248   asm_.LoadFromOffset(kLoadWord,
    249                       dst.AsVIXLRegister(),
    250                       base.AsArm().AsVIXLRegister(),
    251                       offs.Int32Value());
    252 
    253   if (unpoison_reference) {
    254     asm_.MaybeUnpoisonHeapReference(dst.AsVIXLRegister());
    255   }
    256 }
    257 
    258 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
    259                                        FrameOffset src ATTRIBUTE_UNUSED) {
    260   UNIMPLEMENTED(FATAL);
    261 }
    262 
    263 void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
    264                                           ManagedRegister base ATTRIBUTE_UNUSED,
    265                                           Offset offs ATTRIBUTE_UNUSED) {
    266   UNIMPLEMENTED(FATAL);
    267 }
    268 
    269 void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
    270                                                      uint32_t imm,
    271                                                      ManagedRegister scratch) {
    272   ArmManagedRegister mscratch = scratch.AsArm();
    273   CHECK(mscratch.IsCoreRegister()) << mscratch;
    274   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    275   temps.Exclude(mscratch.AsVIXLRegister());
    276   asm_.LoadImmediate(mscratch.AsVIXLRegister(), imm);
    277   asm_.StoreToOffset(kStoreWord, mscratch.AsVIXLRegister(), sp, dest.Int32Value());
    278 }
    279 
    280 void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
    281   return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
    282 }
    283 
    284 void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
    285                                               ThreadOffset32 src,
    286                                               size_t size) {
    287   return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
    288 }
    289 
    290 void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister m_dst, ThreadOffset32 offs) {
    291   ArmManagedRegister dst = m_dst.AsArm();
    292   CHECK(dst.IsCoreRegister()) << dst;
    293   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    294   temps.Exclude(dst.AsVIXLRegister());
    295   asm_.LoadFromOffset(kLoadWord, dst.AsVIXLRegister(), tr, offs.Int32Value());
    296 }
    297 
    298 void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
    299                                                     ThreadOffset32 thr_offs,
    300                                                     ManagedRegister mscratch) {
    301   ArmManagedRegister scratch = mscratch.AsArm();
    302   CHECK(scratch.IsCoreRegister()) << scratch;
    303   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    304   temps.Exclude(scratch.AsVIXLRegister());
    305   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
    306   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
    307 }
    308 
    309 void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
    310                                                   FrameOffset fr_offs ATTRIBUTE_UNUSED,
    311                                                   ManagedRegister mscratch ATTRIBUTE_UNUSED) {
    312   UNIMPLEMENTED(FATAL);
    313 }
    314 
    315 void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
    316                                                         FrameOffset fr_offs,
    317                                                         ManagedRegister mscratch) {
    318   ArmManagedRegister scratch = mscratch.AsArm();
    319   CHECK(scratch.IsCoreRegister()) << scratch;
    320   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    321   temps.Exclude(scratch.AsVIXLRegister());
    322   asm_.AddConstant(scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
    323   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
    324 }
    325 
    326 void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
    327   asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
    328 }
    329 
    330 void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
    331                                           size_t size ATTRIBUTE_UNUSED) {
    332   UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
    333 }
    334 
    335 void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
    336                                           size_t size ATTRIBUTE_UNUSED) {
    337   UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
    338 }
    339 
    340 void ArmVIXLJNIMacroAssembler::Move(ManagedRegister m_dst,
    341                                     ManagedRegister m_src,
    342                                     size_t size  ATTRIBUTE_UNUSED) {
    343   ArmManagedRegister dst = m_dst.AsArm();
    344   ArmManagedRegister src = m_src.AsArm();
    345   if (!dst.Equals(src)) {
    346     if (dst.IsCoreRegister()) {
    347       CHECK(src.IsCoreRegister()) << src;
    348       UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    349       temps.Exclude(dst.AsVIXLRegister());
    350       ___ Mov(dst.AsVIXLRegister(), src.AsVIXLRegister());
    351     } else if (dst.IsDRegister()) {
    352       if (src.IsDRegister()) {
    353         ___ Vmov(F64, dst.AsVIXLDRegister(), src.AsVIXLDRegister());
    354       } else {
    355         // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
    356         CHECK(src.IsRegisterPair()) << src;
    357         ___ Vmov(dst.AsVIXLDRegister(), src.AsVIXLRegisterPairLow(), src.AsVIXLRegisterPairHigh());
    358       }
    359     } else if (dst.IsSRegister()) {
    360       if (src.IsSRegister()) {
    361         ___ Vmov(F32, dst.AsVIXLSRegister(), src.AsVIXLSRegister());
    362       } else {
    363         // VMOV Sn, Rn  (Sn = Rn)
    364         CHECK(src.IsCoreRegister()) << src;
    365         ___ Vmov(dst.AsVIXLSRegister(), src.AsVIXLRegister());
    366       }
    367     } else {
    368       CHECK(dst.IsRegisterPair()) << dst;
    369       CHECK(src.IsRegisterPair()) << src;
    370       // Ensure that the first move doesn't clobber the input of the second.
    371       if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
    372         ___ Mov(dst.AsVIXLRegisterPairLow(),  src.AsVIXLRegisterPairLow());
    373         ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
    374       } else {
    375         ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
    376         ___ Mov(dst.AsVIXLRegisterPairLow(),  src.AsVIXLRegisterPairLow());
    377       }
    378     }
    379   }
    380 }
    381 
    382 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
    383                                     FrameOffset src,
    384                                     ManagedRegister scratch,
    385                                     size_t size) {
    386   ArmManagedRegister temp = scratch.AsArm();
    387   CHECK(temp.IsCoreRegister()) << temp;
    388   CHECK(size == 4 || size == 8) << size;
    389   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    390   temps.Exclude(temp.AsVIXLRegister());
    391   if (size == 4) {
    392     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
    393     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
    394   } else if (size == 8) {
    395     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
    396     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
    397     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value() + 4);
    398     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value() + 4);
    399   }
    400 }
    401 
    402 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
    403                                     ManagedRegister src_base ATTRIBUTE_UNUSED,
    404                                     Offset src_offset ATTRIBUTE_UNUSED,
    405                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
    406                                     size_t size ATTRIBUTE_UNUSED) {
    407   UNIMPLEMENTED(FATAL);
    408 }
    409 
    410 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
    411                                     Offset dest_offset ATTRIBUTE_UNUSED,
    412                                     FrameOffset src ATTRIBUTE_UNUSED,
    413                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
    414                                     size_t size ATTRIBUTE_UNUSED) {
    415   UNIMPLEMENTED(FATAL);
    416 }
    417 
    418 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
    419                                     FrameOffset src_base ATTRIBUTE_UNUSED,
    420                                     Offset src_offset ATTRIBUTE_UNUSED,
    421                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
    422                                     size_t size ATTRIBUTE_UNUSED) {
    423   UNIMPLEMENTED(FATAL);
    424 }
    425 
    426 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
    427                                     Offset dest_offset ATTRIBUTE_UNUSED,
    428                                     ManagedRegister src ATTRIBUTE_UNUSED,
    429                                     Offset src_offset ATTRIBUTE_UNUSED,
    430                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
    431                                     size_t size ATTRIBUTE_UNUSED) {
    432   UNIMPLEMENTED(FATAL);
    433 }
    434 
    435 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
    436                                     Offset dest_offset ATTRIBUTE_UNUSED,
    437                                     FrameOffset src ATTRIBUTE_UNUSED,
    438                                     Offset src_offset ATTRIBUTE_UNUSED,
    439                                     ManagedRegister scratch ATTRIBUTE_UNUSED,
    440                                     size_t size ATTRIBUTE_UNUSED) {
    441   UNIMPLEMENTED(FATAL);
    442 }
    443 
    444 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
    445                                                       FrameOffset handle_scope_offset,
    446                                                       ManagedRegister min_reg,
    447                                                       bool null_allowed) {
    448   ArmManagedRegister out_reg = mout_reg.AsArm();
    449   ArmManagedRegister in_reg = min_reg.AsArm();
    450   CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
    451   CHECK(out_reg.IsCoreRegister()) << out_reg;
    452   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    453   temps.Exclude(out_reg.AsVIXLRegister());
    454   if (null_allowed) {
    455     // Null values get a handle scope entry value of 0.  Otherwise, the handle scope entry is
    456     // the address in the handle scope holding the reference.
    457     // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
    458     if (in_reg.IsNoRegister()) {
    459       asm_.LoadFromOffset(kLoadWord,
    460                           out_reg.AsVIXLRegister(),
    461                           sp,
    462                           handle_scope_offset.Int32Value());
    463       in_reg = out_reg;
    464     }
    465 
    466     temps.Exclude(in_reg.AsVIXLRegister());
    467     ___ Cmp(in_reg.AsVIXLRegister(), 0);
    468 
    469     if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value(), kCcDontCare)) {
    470       if (!out_reg.Equals(in_reg)) {
    471         ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
    472                                  3 * vixl32::kMaxInstructionSizeInBytes,
    473                                  CodeBufferCheckScope::kMaximumSize);
    474         ___ it(eq, 0xc);
    475         ___ mov(eq, out_reg.AsVIXLRegister(), 0);
    476         asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
    477       } else {
    478         ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
    479                                  2 * vixl32::kMaxInstructionSizeInBytes,
    480                                  CodeBufferCheckScope::kMaximumSize);
    481         ___ it(ne, 0x8);
    482         asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
    483       }
    484     } else {
    485       // TODO: Implement this (old arm assembler would have crashed here).
    486       UNIMPLEMENTED(FATAL);
    487     }
    488   } else {
    489     asm_.AddConstant(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
    490   }
    491 }
    492 
    493 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
    494                                                       FrameOffset handle_scope_offset,
    495                                                       ManagedRegister mscratch,
    496                                                       bool null_allowed) {
    497   ArmManagedRegister scratch = mscratch.AsArm();
    498   CHECK(scratch.IsCoreRegister()) << scratch;
    499   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    500   temps.Exclude(scratch.AsVIXLRegister());
    501   if (null_allowed) {
    502     asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
    503     // Null values get a handle scope entry value of 0.  Otherwise, the handle scope entry is
    504     // the address in the handle scope holding the reference.
    505     // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
    506     ___ Cmp(scratch.AsVIXLRegister(), 0);
    507 
    508     if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value(), kCcDontCare)) {
    509       ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
    510                                2 * vixl32::kMaxInstructionSizeInBytes,
    511                                CodeBufferCheckScope::kMaximumSize);
    512       ___ it(ne, 0x8);
    513       asm_.AddConstantInIt(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
    514     } else {
    515       // TODO: Implement this (old arm assembler would have crashed here).
    516       UNIMPLEMENTED(FATAL);
    517     }
    518   } else {
    519     asm_.AddConstant(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
    520   }
    521   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, out_off.Int32Value());
    522 }
    523 
    524 void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
    525     ManagedRegister mout_reg ATTRIBUTE_UNUSED,
    526     ManagedRegister min_reg ATTRIBUTE_UNUSED) {
    527   UNIMPLEMENTED(FATAL);
    528 }
    529 
    530 void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
    531                                             bool could_be_null ATTRIBUTE_UNUSED) {
    532   // TODO: not validating references.
    533 }
    534 
    535 void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
    536                                             bool could_be_null ATTRIBUTE_UNUSED) {
    537   // TODO: not validating references.
    538 }
    539 
    540 void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
    541                                     Offset offset,
    542                                     ManagedRegister mscratch) {
    543   ArmManagedRegister base = mbase.AsArm();
    544   ArmManagedRegister scratch = mscratch.AsArm();
    545   CHECK(base.IsCoreRegister()) << base;
    546   CHECK(scratch.IsCoreRegister()) << scratch;
    547   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    548   temps.Exclude(scratch.AsVIXLRegister());
    549   asm_.LoadFromOffset(kLoadWord,
    550                       scratch.AsVIXLRegister(),
    551                       base.AsVIXLRegister(),
    552                       offset.Int32Value());
    553   ___ Blx(scratch.AsVIXLRegister());
    554   // TODO: place reference map on call.
    555 }
    556 
    557 void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
    558   ArmManagedRegister scratch = mscratch.AsArm();
    559   CHECK(scratch.IsCoreRegister()) << scratch;
    560   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    561   temps.Exclude(scratch.AsVIXLRegister());
    562   // Call *(*(SP + base) + offset)
    563   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, base.Int32Value());
    564   asm_.LoadFromOffset(kLoadWord,
    565                       scratch.AsVIXLRegister(),
    566                       scratch.AsVIXLRegister(),
    567                       offset.Int32Value());
    568   ___ Blx(scratch.AsVIXLRegister());
    569   // TODO: place reference map on call
    570 }
    571 
    572 void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
    573                                               ManagedRegister scratch ATTRIBUTE_UNUSED) {
    574   UNIMPLEMENTED(FATAL);
    575 }
    576 
    577 void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
    578   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    579   temps.Exclude(mtr.AsArm().AsVIXLRegister());
    580   ___ Mov(mtr.AsArm().AsVIXLRegister(), tr);
    581 }
    582 
    583 void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
    584                                                 ManagedRegister scratch ATTRIBUTE_UNUSED) {
    585   asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
    586 }
    587 
    588 void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
    589   CHECK_ALIGNED(stack_adjust, kStackAlignment);
    590   ArmManagedRegister scratch = m_scratch.AsArm();
    591   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    592   temps.Exclude(scratch.AsVIXLRegister());
    593   exception_blocks_.emplace_back(
    594       new ArmVIXLJNIMacroAssembler::ArmException(scratch, stack_adjust));
    595   asm_.LoadFromOffset(kLoadWord,
    596                       scratch.AsVIXLRegister(),
    597                       tr,
    598                       Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
    599 
    600   ___ Cmp(scratch.AsVIXLRegister(), 0);
    601   {
    602     ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
    603                              vixl32::kMaxInstructionSizeInBytes,
    604                              CodeBufferCheckScope::kMaximumSize);
    605     vixl32::Label* label = exception_blocks_.back()->Entry();
    606     ___ b(ne, Narrow, label);
    607     ___ AddBranchLabel(label);
    608   }
    609   // TODO: think about using CBNZ here.
    610 }
    611 
    612 std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
    613   return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
    614 }
    615 
    616 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
    617   CHECK(label != nullptr);
    618   ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
    619 }
    620 
    621 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
    622                                     JNIMacroUnaryCondition condition,
    623                                     ManagedRegister test) {
    624   CHECK(label != nullptr);
    625 
    626   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    627   temps.Exclude(test.AsArm().AsVIXLRegister());
    628   switch (condition) {
    629     case JNIMacroUnaryCondition::kZero:
    630       ___ CompareAndBranchIfZero(test.AsArm().AsVIXLRegister(),
    631                                  ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
    632       break;
    633     case JNIMacroUnaryCondition::kNotZero:
    634       ___ CompareAndBranchIfNonZero(test.AsArm().AsVIXLRegister(),
    635                                     ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
    636       break;
    637     default:
    638       LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
    639       UNREACHABLE();
    640   }
    641 }
    642 
    643 void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
    644   CHECK(label != nullptr);
    645   ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
    646 }
    647 
    648 void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
    649     ArmVIXLJNIMacroAssembler::ArmException* exception) {
    650   ___ Bind(exception->Entry());
    651   if (exception->stack_adjust_ != 0) {  // Fix up the frame.
    652     DecreaseFrameSize(exception->stack_adjust_);
    653   }
    654 
    655   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    656   temps.Exclude(exception->scratch_.AsVIXLRegister());
    657   // Pass exception object as argument.
    658   // Don't care about preserving r0 as this won't return.
    659   ___ Mov(r0, exception->scratch_.AsVIXLRegister());
    660   temps.Include(exception->scratch_.AsVIXLRegister());
    661   // TODO: check that exception->scratch_ is dead by this point.
    662   vixl32::Register temp = temps.Acquire();
    663   ___ Ldr(temp,
    664           MemOperand(tr,
    665               QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
    666   ___ Blx(temp);
    667 }
    668 
    669 void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
    670   UNIMPLEMENTED(FATAL);
    671 }
    672 
    673 void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
    674                                     dest,
    675                                     vixl32::Register base,
    676                                     int32_t offset,
    677                                     size_t size) {
    678   if (dest.IsNoRegister()) {
    679     CHECK_EQ(0u, size) << dest;
    680   } else if (dest.IsCoreRegister()) {
    681     CHECK(!dest.AsVIXLRegister().Is(sp)) << dest;
    682 
    683     UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
    684     temps.Exclude(dest.AsVIXLRegister());
    685 
    686     if (size == 1u) {
    687       ___ Ldrb(dest.AsVIXLRegister(), MemOperand(base, offset));
    688     } else {
    689       CHECK_EQ(4u, size) << dest;
    690       ___ Ldr(dest.AsVIXLRegister(), MemOperand(base, offset));
    691     }
    692   } else if (dest.IsRegisterPair()) {
    693     CHECK_EQ(8u, size) << dest;
    694     ___ Ldr(dest.AsVIXLRegisterPairLow(),  MemOperand(base, offset));
    695     ___ Ldr(dest.AsVIXLRegisterPairHigh(), MemOperand(base, offset + 4));
    696   } else if (dest.IsSRegister()) {
    697     ___ Vldr(dest.AsVIXLSRegister(), MemOperand(base, offset));
    698   } else {
    699     CHECK(dest.IsDRegister()) << dest;
    700     ___ Vldr(dest.AsVIXLDRegister(), MemOperand(base, offset));
    701   }
    702 }
    703 
    704 }  // namespace arm
    705 }  // namespace art
    706