Home | History | Annotate | Download | only in arm64
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13 * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
     18 #define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
     19 
     20 #include <stdint.h>
     21 #include <memory>
     22 #include <vector>
     23 
     24 #include "base/logging.h"
     25 #include "constants_arm64.h"
     26 #include "utils/arm64/managed_register_arm64.h"
     27 #include "utils/assembler.h"
     28 #include "offsets.h"
     29 #include "utils.h"
     30 #include "a64/macro-assembler-a64.h"
     31 #include "a64/disasm-a64.h"
     32 
     33 namespace art {
     34 namespace arm64 {
     35 
     36 #define MEM_OP(x...)      vixl::MemOperand(x)
     37 #define COND_OP(x)        static_cast<vixl::Condition>(x)
     38 
     39 enum Condition {
     40   kNoCondition = -1,
     41   EQ = 0,
     42   NE = 1,
     43   HS = 2,
     44   LO = 3,
     45   MI = 4,
     46   PL = 5,
     47   VS = 6,
     48   VC = 7,
     49   HI = 8,
     50   LS = 9,
     51   GE = 10,
     52   LT = 11,
     53   GT = 12,
     54   LE = 13,
     55   AL = 14,    // Always.
     56   NV = 15,    // Behaves as always/al.
     57   kMaxCondition = 16,
     58 };
     59 
     60 enum LoadOperandType {
     61   kLoadSignedByte,
     62   kLoadUnsignedByte,
     63   kLoadSignedHalfword,
     64   kLoadUnsignedHalfword,
     65   kLoadWord,
     66   kLoadCoreWord,
     67   kLoadSWord,
     68   kLoadDWord
     69 };
     70 
     71 enum StoreOperandType {
     72   kStoreByte,
     73   kStoreHalfword,
     74   kStoreWord,
     75   kStoreCoreWord,
     76   kStoreSWord,
     77   kStoreDWord
     78 };
     79 
     80 class Arm64Exception;
     81 
     82 class Arm64Assembler FINAL : public Assembler {
     83  public:
     84   Arm64Assembler() : vixl_buf_(new byte[kBufferSizeArm64]),
     85   vixl_masm_(new vixl::MacroAssembler(vixl_buf_, kBufferSizeArm64)) {}
     86 
     87   virtual ~Arm64Assembler() {
     88     delete vixl_masm_;
     89     delete[] vixl_buf_;
     90   }
     91 
     92   // Emit slow paths queued during assembly.
     93   void EmitSlowPaths();
     94 
     95   // Size of generated code.
     96   size_t CodeSize() const;
     97 
     98   // Copy instructions out of assembly buffer into the given region of memory.
     99   void FinalizeInstructions(const MemoryRegion& region);
    100 
    101   // Emit code that will create an activation on the stack.
    102   void BuildFrame(size_t frame_size, ManagedRegister method_reg,
    103                   const std::vector<ManagedRegister>& callee_save_regs,
    104                   const ManagedRegisterEntrySpills& entry_spills) OVERRIDE;
    105 
    106   // Emit code that will remove an activation from the stack.
    107   void RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& callee_save_regs)
    108       OVERRIDE;
    109 
    110   void IncreaseFrameSize(size_t adjust) OVERRIDE;
    111   void DecreaseFrameSize(size_t adjust) OVERRIDE;
    112 
    113   // Store routines.
    114   void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE;
    115   void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE;
    116   void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE;
    117   void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE;
    118   void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, ManagedRegister scratch)
    119       OVERRIDE;
    120   void StoreStackOffsetToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs,
    121                                   ManagedRegister scratch) OVERRIDE;
    122   void StoreStackPointerToThread64(ThreadOffset<8> thr_offs) OVERRIDE;
    123   void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off,
    124                      ManagedRegister scratch) OVERRIDE;
    125 
    126   // Load routines.
    127   void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE;
    128   void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE;
    129   void LoadRef(ManagedRegister dest, FrameOffset  src) OVERRIDE;
    130   void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE;
    131   void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
    132   void LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) OVERRIDE;
    133 
    134   // Copying routines.
    135   void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE;
    136   void CopyRawPtrFromThread64(FrameOffset fr_offs, ThreadOffset<8> thr_offs,
    137                               ManagedRegister scratch) OVERRIDE;
    138   void CopyRawPtrToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
    139       OVERRIDE;
    140   void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE;
    141   void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE;
    142   void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch,
    143             size_t size) OVERRIDE;
    144   void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch,
    145             size_t size) OVERRIDE;
    146   void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch,
    147             size_t size) OVERRIDE;
    148   void Copy(ManagedRegister dest, Offset dest_offset, ManagedRegister src, Offset src_offset,
    149             ManagedRegister scratch, size_t size) OVERRIDE;
    150   void Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
    151             ManagedRegister scratch, size_t size) OVERRIDE;
    152   void MemoryBarrier(ManagedRegister scratch) OVERRIDE;
    153 
    154   // Sign extension.
    155   void SignExtend(ManagedRegister mreg, size_t size) OVERRIDE;
    156 
    157   // Zero extension.
    158   void ZeroExtend(ManagedRegister mreg, size_t size) OVERRIDE;
    159 
    160   // Exploit fast access in managed code to Thread::Current().
    161   void GetCurrentThread(ManagedRegister tr) OVERRIDE;
    162   void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) OVERRIDE;
    163 
    164   // Set up out_reg to hold a Object** into the handle scope, or to be NULL if the
    165   // value is null and null_allowed. in_reg holds a possibly stale reference
    166   // that can be used to avoid loading the handle scope entry to see if the value is
    167   // NULL.
    168   void CreateHandleScopeEntry(ManagedRegister out_reg, FrameOffset handlescope_offset,
    169                        ManagedRegister in_reg, bool null_allowed) OVERRIDE;
    170 
    171   // Set up out_off to hold a Object** into the handle scope, or to be NULL if the
    172   // value is null and null_allowed.
    173   void CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handlescope_offset,
    174                        ManagedRegister scratch, bool null_allowed) OVERRIDE;
    175 
    176   // src holds a handle scope entry (Object**) load this into dst.
    177   void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) OVERRIDE;
    178 
    179   // Heap::VerifyObject on src. In some cases (such as a reference to this) we
    180   // know that src may not be null.
    181   void VerifyObject(ManagedRegister src, bool could_be_null) OVERRIDE;
    182   void VerifyObject(FrameOffset src, bool could_be_null) OVERRIDE;
    183 
    184   // Call to address held at [base+offset].
    185   void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) OVERRIDE;
    186   void Call(FrameOffset base, Offset offset, ManagedRegister scratch) OVERRIDE;
    187   void CallFromThread64(ThreadOffset<8> offset, ManagedRegister scratch) OVERRIDE;
    188 
    189   // Jump to address (not setting link register)
    190   void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
    191 
    192   // Generate code to check if Thread::Current()->exception_ is non-null
    193   // and branch to a ExceptionSlowPath if it is.
    194   void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
    195 
    196  private:
    197   static vixl::Register reg_x(int code) {
    198     CHECK(code < kNumberOfCoreRegisters) << code;
    199     if (code == SP) {
    200       return vixl::sp;
    201     } else if (code == XZR) {
    202       return vixl::xzr;
    203     }
    204     return vixl::Register::XRegFromCode(code);
    205   }
    206 
    207   static vixl::Register reg_w(int code) {
    208     return vixl::Register::WRegFromCode(code);
    209   }
    210 
    211   static vixl::FPRegister reg_d(int code) {
    212     return vixl::FPRegister::DRegFromCode(code);
    213   }
    214 
    215   static vixl::FPRegister reg_s(int code) {
    216     return vixl::FPRegister::SRegFromCode(code);
    217   }
    218 
    219   // Emits Exception block.
    220   void EmitExceptionPoll(Arm64Exception *exception);
    221 
    222   void StoreWToOffset(StoreOperandType type, WRegister source,
    223                       Register base, int32_t offset);
    224   void StoreToOffset(Register source, Register base, int32_t offset);
    225   void StoreSToOffset(SRegister source, Register base, int32_t offset);
    226   void StoreDToOffset(DRegister source, Register base, int32_t offset);
    227 
    228   void LoadImmediate(Register dest, int32_t value, Condition cond = AL);
    229   void Load(Arm64ManagedRegister dst, Register src, int32_t src_offset, size_t size);
    230   void LoadWFromOffset(LoadOperandType type, WRegister dest,
    231                       Register base, int32_t offset);
    232   void LoadFromOffset(Register dest, Register base, int32_t offset);
    233   void LoadSFromOffset(SRegister dest, Register base, int32_t offset);
    234   void LoadDFromOffset(DRegister dest, Register base, int32_t offset);
    235   void AddConstant(Register rd, int32_t value, Condition cond = AL);
    236   void AddConstant(Register rd, Register rn, int32_t value, Condition cond = AL);
    237 
    238   // Vixl buffer.
    239   byte* vixl_buf_;
    240 
    241   // Vixl assembler.
    242   vixl::MacroAssembler* vixl_masm_;
    243 
    244   // List of exception blocks to generate at the end of the code cache.
    245   std::vector<Arm64Exception*> exception_blocks_;
    246 
    247   // Used for testing.
    248   friend class Arm64ManagedRegister_VixlRegisters_Test;
    249 };
    250 
    251 class Arm64Exception {
    252  private:
    253   explicit Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust)
    254       : scratch_(scratch), stack_adjust_(stack_adjust) {
    255     }
    256 
    257   vixl::Label* Entry() { return &exception_entry_; }
    258 
    259   // Register used for passing Thread::Current()->exception_ .
    260   const Arm64ManagedRegister scratch_;
    261 
    262   // Stack adjust for ExceptionPool.
    263   const size_t stack_adjust_;
    264 
    265   vixl::Label exception_entry_;
    266 
    267   friend class Arm64Assembler;
    268   DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
    269 };
    270 
    271 }  // namespace arm64
    272 }  // namespace art
    273 
    274 #endif  // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
    275