Home | History | Annotate | Download | only in arm64
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "base/logging.h"
     18 #include "calling_convention_arm64.h"
     19 #include "handle_scope-inl.h"
     20 #include "utils/arm64/managed_register_arm64.h"
     21 
     22 namespace art {
     23 namespace arm64 {
     24 
     25 static const XRegister kXArgumentRegisters[] = {
     26   X0, X1, X2, X3, X4, X5, X6, X7
     27 };
     28 
     29 static const WRegister kWArgumentRegisters[] = {
     30   W0, W1, W2, W3, W4, W5, W6, W7
     31 };
     32 
     33 static const DRegister kDArgumentRegisters[] = {
     34   D0, D1, D2, D3, D4, D5, D6, D7
     35 };
     36 
     37 static const SRegister kSArgumentRegisters[] = {
     38   S0, S1, S2, S3, S4, S5, S6, S7
     39 };
     40 
     41 static const DRegister kDCalleeSaveRegisters[] = {
     42   D8, D9, D10, D11, D12, D13, D14, D15
     43 };
     44 
     45 // Calling convention
     46 ManagedRegister Arm64ManagedRuntimeCallingConvention::InterproceduralScratchRegister() {
     47   return Arm64ManagedRegister::FromXRegister(X20);  // saved on entry restored on exit
     48 }
     49 
     50 ManagedRegister Arm64JniCallingConvention::InterproceduralScratchRegister() {
     51   return Arm64ManagedRegister::FromXRegister(X20);  // saved on entry restored on exit
     52 }
     53 
     54 static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
     55   if (shorty[0] == 'F') {
     56     return Arm64ManagedRegister::FromSRegister(S0);
     57   } else if (shorty[0] == 'D') {
     58     return Arm64ManagedRegister::FromDRegister(D0);
     59   } else if (shorty[0] == 'J') {
     60     return Arm64ManagedRegister::FromXRegister(X0);
     61   } else if (shorty[0] == 'V') {
     62     return Arm64ManagedRegister::NoRegister();
     63   } else {
     64     return Arm64ManagedRegister::FromWRegister(W0);
     65   }
     66 }
     67 
     68 ManagedRegister Arm64ManagedRuntimeCallingConvention::ReturnRegister() {
     69   return ReturnRegisterForShorty(GetShorty());
     70 }
     71 
     72 ManagedRegister Arm64JniCallingConvention::ReturnRegister() {
     73   return ReturnRegisterForShorty(GetShorty());
     74 }
     75 
     76 ManagedRegister Arm64JniCallingConvention::IntReturnRegister() {
     77   return Arm64ManagedRegister::FromWRegister(W0);
     78 }
     79 
     80 // Managed runtime calling convention
     81 
     82 ManagedRegister Arm64ManagedRuntimeCallingConvention::MethodRegister() {
     83   return Arm64ManagedRegister::FromXRegister(X0);
     84 }
     85 
     86 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
     87   return false;  // Everything moved to stack on entry.
     88 }
     89 
     90 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
     91   return true;
     92 }
     93 
     94 ManagedRegister Arm64ManagedRuntimeCallingConvention::CurrentParamRegister() {
     95   LOG(FATAL) << "Should not reach here";
     96   return ManagedRegister::NoRegister();
     97 }
     98 
     99 FrameOffset Arm64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
    100   CHECK(IsCurrentParamOnStack());
    101   FrameOffset result =
    102       FrameOffset(displacement_.Int32Value() +  // displacement
    103                   kFramePointerSize +  // Method ref
    104                   (itr_slots_ * sizeof(uint32_t)));  // offset into in args
    105   return result;
    106 }
    107 
    108 const ManagedRegisterEntrySpills& Arm64ManagedRuntimeCallingConvention::EntrySpills() {
    109   // We spill the argument registers on ARM64 to free them up for scratch use, we then assume
    110   // all arguments are on the stack.
    111   if ((entry_spills_.size() == 0) && (NumArgs() > 0)) {
    112     int gp_reg_index = 1;   // we start from X1/W1, X0 holds ArtMethod*.
    113     int fp_reg_index = 0;   // D0/S0.
    114 
    115     // We need to choose the correct register (D/S or X/W) since the managed
    116     // stack uses 32bit stack slots.
    117     ResetIterator(FrameOffset(0));
    118     while (HasNext()) {
    119       if (IsCurrentParamAFloatOrDouble()) {  // FP regs.
    120           if (fp_reg_index < 8) {
    121             if (!IsCurrentParamADouble()) {
    122               entry_spills_.push_back(Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[fp_reg_index]));
    123             } else {
    124               entry_spills_.push_back(Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[fp_reg_index]));
    125             }
    126             fp_reg_index++;
    127           } else {  // just increase the stack offset.
    128             if (!IsCurrentParamADouble()) {
    129               entry_spills_.push_back(ManagedRegister::NoRegister(), 4);
    130             } else {
    131               entry_spills_.push_back(ManagedRegister::NoRegister(), 8);
    132             }
    133           }
    134       } else {  // GP regs.
    135         if (gp_reg_index < 8) {
    136           if (IsCurrentParamALong() && (!IsCurrentParamAReference())) {
    137             entry_spills_.push_back(Arm64ManagedRegister::FromXRegister(kXArgumentRegisters[gp_reg_index]));
    138           } else {
    139             entry_spills_.push_back(Arm64ManagedRegister::FromWRegister(kWArgumentRegisters[gp_reg_index]));
    140           }
    141           gp_reg_index++;
    142         } else {  // just increase the stack offset.
    143           if (IsCurrentParamALong() && (!IsCurrentParamAReference())) {
    144               entry_spills_.push_back(ManagedRegister::NoRegister(), 8);
    145           } else {
    146               entry_spills_.push_back(ManagedRegister::NoRegister(), 4);
    147           }
    148         }
    149       }
    150       Next();
    151     }
    152   }
    153   return entry_spills_;
    154 }
    155 
    156 // JNI calling convention
    157 Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static, bool is_synchronized,
    158                                                      const char* shorty)
    159     : JniCallingConvention(is_static, is_synchronized, shorty, kFramePointerSize) {
    160   uint32_t core_spill_mask = CoreSpillMask();
    161   DCHECK_EQ(XZR, kNumberOfXRegisters - 1);  // Exclude XZR from the loop (avoid 1 << 32).
    162   for (int x_reg = 0; x_reg < kNumberOfXRegisters - 1; ++x_reg) {
    163     if (((1 << x_reg) & core_spill_mask) != 0) {
    164       callee_save_regs_.push_back(
    165           Arm64ManagedRegister::FromXRegister(static_cast<XRegister>(x_reg)));
    166     }
    167   }
    168 
    169   uint32_t fp_spill_mask = FpSpillMask();
    170   for (int d_reg = 0; d_reg < kNumberOfDRegisters; ++d_reg) {
    171     if (((1 << d_reg) & fp_spill_mask) != 0) {
    172       callee_save_regs_.push_back(
    173           Arm64ManagedRegister::FromDRegister(static_cast<DRegister>(d_reg)));
    174     }
    175   }
    176 }
    177 
    178 uint32_t Arm64JniCallingConvention::CoreSpillMask() const {
    179   // Compute spill mask to agree with callee saves initialized in the constructor.
    180   // Note: The native jni function may call to some VM runtime functions which may suspend
    181   // or trigger GC. And the jni method frame will become top quick frame in those cases.
    182   // So we need to satisfy GC to save LR and callee-save registers which is similar to
    183   // CalleeSaveMethod(RefOnly) frame.
    184   // Jni function is the native function which the java code wants to call.
    185   // Jni method is the method that compiled by jni compiler.
    186   // Call chain: managed code(java) --> jni method --> jni function.
    187   // Thread register(X19) is saved on stack.
    188   return 1 << X19 | 1 << X20 | 1 << X21 | 1 << X22 | 1 << X23 | 1 << X24 |
    189          1 << X25 | 1 << X26 | 1 << X27 | 1 << X28 | 1 << X29 | 1 << LR;
    190 }
    191 
    192 uint32_t Arm64JniCallingConvention::FpSpillMask() const {
    193   // Considering the case, java_method_1 --> jni method --> jni function --> java_method_2, we may
    194   // break on java_method_2 and we still need to find out the values of DEX registers in
    195   // java_method_1. So all callee-saves(in managed code) need to be saved.
    196   uint32_t result = 0;
    197   for (size_t i = 0; i < arraysize(kDCalleeSaveRegisters); ++i) {
    198     result |= (1 << kDCalleeSaveRegisters[i]);
    199   }
    200   return result;
    201 }
    202 
    203 ManagedRegister Arm64JniCallingConvention::ReturnScratchRegister() const {
    204   return ManagedRegister::NoRegister();
    205 }
    206 
    207 size_t Arm64JniCallingConvention::FrameSize() {
    208   // Method*, callee save area size, local reference segment state
    209   size_t frame_data_size = kFramePointerSize +
    210       CalleeSaveRegisters().size() * kFramePointerSize + sizeof(uint32_t);
    211   // References plus 2 words for HandleScope header
    212   size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
    213   // Plus return value spill area size
    214   return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
    215 }
    216 
    217 size_t Arm64JniCallingConvention::OutArgSize() {
    218   return RoundUp(NumberOfOutgoingStackArgs() * kFramePointerSize, kStackAlignment);
    219 }
    220 
    221 bool Arm64JniCallingConvention::IsCurrentParamInRegister() {
    222   if (IsCurrentParamAFloatOrDouble()) {
    223     return (itr_float_and_doubles_ < 8);
    224   } else {
    225     return ((itr_args_ - itr_float_and_doubles_) < 8);
    226   }
    227 }
    228 
    229 bool Arm64JniCallingConvention::IsCurrentParamOnStack() {
    230   return !IsCurrentParamInRegister();
    231 }
    232 
    233 ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() {
    234   CHECK(IsCurrentParamInRegister());
    235   if (IsCurrentParamAFloatOrDouble()) {
    236     CHECK_LT(itr_float_and_doubles_, 8u);
    237     if (IsCurrentParamADouble()) {
    238       return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
    239     } else {
    240       return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
    241     }
    242   } else {
    243     int gp_reg = itr_args_ - itr_float_and_doubles_;
    244     CHECK_LT(static_cast<unsigned int>(gp_reg), 8u);
    245     if (IsCurrentParamALong() || IsCurrentParamAReference() || IsCurrentParamJniEnv())  {
    246       return Arm64ManagedRegister::FromXRegister(kXArgumentRegisters[gp_reg]);
    247     } else {
    248       return Arm64ManagedRegister::FromWRegister(kWArgumentRegisters[gp_reg]);
    249     }
    250   }
    251 }
    252 
    253 FrameOffset Arm64JniCallingConvention::CurrentParamStackOffset() {
    254   CHECK(IsCurrentParamOnStack());
    255   size_t args_on_stack = itr_args_
    256                   - std::min(8u, itr_float_and_doubles_)
    257                   - std::min(8u, (itr_args_ - itr_float_and_doubles_));
    258   size_t offset = displacement_.Int32Value() - OutArgSize() + (args_on_stack * kFramePointerSize);
    259   CHECK_LT(offset, OutArgSize());
    260   return FrameOffset(offset);
    261 }
    262 
    263 size_t Arm64JniCallingConvention::NumberOfOutgoingStackArgs() {
    264   // all arguments including JNI args
    265   size_t all_args = NumArgs() + NumberOfExtraArgumentsForJni();
    266 
    267   size_t all_stack_args = all_args -
    268             std::min(8u, static_cast<unsigned int>(NumFloatOrDoubleArgs())) -
    269             std::min(8u, static_cast<unsigned int>((all_args - NumFloatOrDoubleArgs())));
    270 
    271   return all_stack_args;
    272 }
    273 
    274 }  // namespace arm64
    275 }  // namespace art
    276