Home | History | Annotate | Download | only in arm64
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
     18 #define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
     19 
     20 #include <stdint.h>
     21 #include <memory>
     22 #include <vector>
     23 
     24 #include "base/arena_containers.h"
     25 #include "base/logging.h"
     26 #include "utils/arm64/managed_register_arm64.h"
     27 #include "utils/assembler.h"
     28 #include "offsets.h"
     29 
     30 // TODO(VIXL): Make VIXL compile with -Wshadow.
     31 #pragma GCC diagnostic push
     32 #pragma GCC diagnostic ignored "-Wshadow"
     33 #include "aarch64/disasm-aarch64.h"
     34 #include "aarch64/macro-assembler-aarch64.h"
     35 #pragma GCC diagnostic pop
     36 
     37 namespace art {
     38 namespace arm64 {
     39 
     40 #define MEM_OP(...)      vixl::aarch64::MemOperand(__VA_ARGS__)
     41 
     42 enum LoadOperandType {
     43   kLoadSignedByte,
     44   kLoadUnsignedByte,
     45   kLoadSignedHalfword,
     46   kLoadUnsignedHalfword,
     47   kLoadWord,
     48   kLoadCoreWord,
     49   kLoadSWord,
     50   kLoadDWord
     51 };
     52 
     53 enum StoreOperandType {
     54   kStoreByte,
     55   kStoreHalfword,
     56   kStoreWord,
     57   kStoreCoreWord,
     58   kStoreSWord,
     59   kStoreDWord
     60 };
     61 
     62 class Arm64Assembler FINAL : public Assembler {
     63  public:
     64   explicit Arm64Assembler(ArenaAllocator* arena) : Assembler(arena) {}
     65 
     66   virtual ~Arm64Assembler() {}
     67 
     68   vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; }
     69 
     70   // Finalize the code.
     71   void FinalizeCode() OVERRIDE;
     72 
     73   // Size of generated code.
     74   size_t CodeSize() const OVERRIDE;
     75   const uint8_t* CodeBufferBaseAddress() const OVERRIDE;
     76 
     77   // Copy instructions out of assembly buffer into the given region of memory.
     78   void FinalizeInstructions(const MemoryRegion& region);
     79 
     80   void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs);
     81 
     82   void SpillRegisters(vixl::aarch64::CPURegList registers, int offset);
     83   void UnspillRegisters(vixl::aarch64::CPURegList registers, int offset);
     84 
     85   // Jump to address (not setting link register)
     86   void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
     87 
     88   //
     89   // Heap poisoning.
     90   //
     91 
     92   // Poison a heap reference contained in `reg`.
     93   void PoisonHeapReference(vixl::aarch64::Register reg);
     94   // Unpoison a heap reference contained in `reg`.
     95   void UnpoisonHeapReference(vixl::aarch64::Register reg);
     96   // Poison a heap reference contained in `reg` if heap poisoning is enabled.
     97   void MaybePoisonHeapReference(vixl::aarch64::Register reg);
     98   // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
     99   void MaybeUnpoisonHeapReference(vixl::aarch64::Register reg);
    100 
    101   void Bind(Label* label ATTRIBUTE_UNUSED) OVERRIDE {
    102     UNIMPLEMENTED(FATAL) << "Do not use Bind for ARM64";
    103   }
    104   void Jump(Label* label ATTRIBUTE_UNUSED) OVERRIDE {
    105     UNIMPLEMENTED(FATAL) << "Do not use Jump for ARM64";
    106   }
    107 
    108   static vixl::aarch64::Register reg_x(int code) {
    109     CHECK(code < kNumberOfXRegisters) << code;
    110     if (code == SP) {
    111       return vixl::aarch64::sp;
    112     } else if (code == XZR) {
    113       return vixl::aarch64::xzr;
    114     }
    115     return vixl::aarch64::Register::GetXRegFromCode(code);
    116   }
    117 
    118   static vixl::aarch64::Register reg_w(int code) {
    119     CHECK(code < kNumberOfWRegisters) << code;
    120     if (code == WSP) {
    121       return vixl::aarch64::wsp;
    122     } else if (code == WZR) {
    123       return vixl::aarch64::wzr;
    124     }
    125     return vixl::aarch64::Register::GetWRegFromCode(code);
    126   }
    127 
    128   static vixl::aarch64::FPRegister reg_d(int code) {
    129     return vixl::aarch64::FPRegister::GetDRegFromCode(code);
    130   }
    131 
    132   static vixl::aarch64::FPRegister reg_s(int code) {
    133     return vixl::aarch64::FPRegister::GetSRegFromCode(code);
    134   }
    135 
    136  private:
    137   // VIXL assembler.
    138   vixl::aarch64::MacroAssembler vixl_masm_;
    139 
    140   // Used for testing.
    141   friend class Arm64ManagedRegister_VixlRegisters_Test;
    142 };
    143 
    144 }  // namespace arm64
    145 }  // namespace art
    146 
    147 #endif  // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
    148