Home | History | Annotate | Download | only in quick
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <algorithm>
     18 #include <vector>
     19 
     20 #include "base/logging.h"
     21 #include "base/macros.h"
     22 #include "calling_convention.h"
     23 #include "class_linker.h"
     24 #include "compiled_method.h"
     25 #include "dex_file-inl.h"
     26 #include "driver/compiler_driver.h"
     27 #include "disassembler.h"
     28 #include "entrypoints/quick/quick_entrypoints.h"
     29 #include "jni_internal.h"
     30 #include "utils/assembler.h"
     31 #include "utils/managed_register.h"
     32 #include "utils/arm/managed_register_arm.h"
     33 #include "utils/mips/managed_register_mips.h"
     34 #include "utils/x86/managed_register_x86.h"
     35 #include "thread.h"
     36 #include "UniquePtr.h"
     37 
     38 #define __ jni_asm->
     39 
     40 namespace art {
     41 
     42 static void CopyParameter(Assembler* jni_asm,
     43                           ManagedRuntimeCallingConvention* mr_conv,
     44                           JniCallingConvention* jni_conv,
     45                           size_t frame_size, size_t out_arg_size);
     46 static void SetNativeParameter(Assembler* jni_asm,
     47                                JniCallingConvention* jni_conv,
     48                                ManagedRegister in_reg);
     49 
     50 // Generate the JNI bridge for the given method, general contract:
     51 // - Arguments are in the managed runtime format, either on stack or in
     52 //   registers, a reference to the method object is supplied as part of this
     53 //   convention.
     54 //
     55 CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver& compiler,
     56                                             uint32_t access_flags, uint32_t method_idx,
     57                                             const DexFile& dex_file) {
     58   const bool is_native = (access_flags & kAccNative) != 0;
     59   CHECK(is_native);
     60   const bool is_static = (access_flags & kAccStatic) != 0;
     61   const bool is_synchronized = (access_flags & kAccSynchronized) != 0;
     62   const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
     63   InstructionSet instruction_set = compiler.GetInstructionSet();
     64   if (instruction_set == kThumb2) {
     65     instruction_set = kArm;
     66   }
     67   // Calling conventions used to iterate over parameters to method
     68   UniquePtr<JniCallingConvention> main_jni_conv(
     69       JniCallingConvention::Create(is_static, is_synchronized, shorty, instruction_set));
     70   bool reference_return = main_jni_conv->IsReturnAReference();
     71 
     72   UniquePtr<ManagedRuntimeCallingConvention> mr_conv(
     73       ManagedRuntimeCallingConvention::Create(is_static, is_synchronized, shorty, instruction_set));
     74 
     75   // Calling conventions to call into JNI method "end" possibly passing a returned reference, the
     76   //     method and the current thread.
     77   size_t jni_end_arg_count = 0;
     78   if (reference_return) { jni_end_arg_count++; }
     79   if (is_synchronized) { jni_end_arg_count++; }
     80   const char* jni_end_shorty = jni_end_arg_count == 0 ? "I"
     81                                                         : (jni_end_arg_count == 1 ? "II" : "III");
     82   UniquePtr<JniCallingConvention> end_jni_conv(
     83       JniCallingConvention::Create(is_static, is_synchronized, jni_end_shorty, instruction_set));
     84 
     85 
     86   // Assembler that holds generated instructions
     87   UniquePtr<Assembler> jni_asm(Assembler::Create(instruction_set));
     88   bool should_disassemble = false;
     89 
     90   // Offsets into data structures
     91   // TODO: if cross compiling these offsets are for the host not the target
     92   const Offset functions(OFFSETOF_MEMBER(JNIEnvExt, functions));
     93   const Offset monitor_enter(OFFSETOF_MEMBER(JNINativeInterface, MonitorEnter));
     94   const Offset monitor_exit(OFFSETOF_MEMBER(JNINativeInterface, MonitorExit));
     95 
     96   // 1. Build the frame saving all callee saves
     97   const size_t frame_size(main_jni_conv->FrameSize());
     98   const std::vector<ManagedRegister>& callee_save_regs = main_jni_conv->CalleeSaveRegisters();
     99   __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills());
    100 
    101   // 2. Set up the StackIndirectReferenceTable
    102   mr_conv->ResetIterator(FrameOffset(frame_size));
    103   main_jni_conv->ResetIterator(FrameOffset(0));
    104   __ StoreImmediateToFrame(main_jni_conv->SirtNumRefsOffset(),
    105                            main_jni_conv->ReferenceCount(),
    106                            mr_conv->InterproceduralScratchRegister());
    107   __ CopyRawPtrFromThread(main_jni_conv->SirtLinkOffset(),
    108                           Thread::TopSirtOffset(),
    109                           mr_conv->InterproceduralScratchRegister());
    110   __ StoreStackOffsetToThread(Thread::TopSirtOffset(),
    111                               main_jni_conv->SirtOffset(),
    112                               mr_conv->InterproceduralScratchRegister());
    113 
    114   // 3. Place incoming reference arguments into SIRT
    115   main_jni_conv->Next();  // Skip JNIEnv*
    116   // 3.5. Create Class argument for static methods out of passed method
    117   if (is_static) {
    118     FrameOffset sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
    119     // Check sirt offset is within frame
    120     CHECK_LT(sirt_offset.Uint32Value(), frame_size);
    121     __ LoadRef(main_jni_conv->InterproceduralScratchRegister(),
    122                mr_conv->MethodRegister(), mirror::ArtMethod::DeclaringClassOffset());
    123     __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false);
    124     __ StoreRef(sirt_offset, main_jni_conv->InterproceduralScratchRegister());
    125     main_jni_conv->Next();  // in SIRT so move to next argument
    126   }
    127   while (mr_conv->HasNext()) {
    128     CHECK(main_jni_conv->HasNext());
    129     bool ref_param = main_jni_conv->IsCurrentParamAReference();
    130     CHECK(!ref_param || mr_conv->IsCurrentParamAReference());
    131     // References need placing in SIRT and the entry value passing
    132     if (ref_param) {
    133       // Compute SIRT entry, note null is placed in the SIRT but its boxed value
    134       // must be NULL
    135       FrameOffset sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
    136       // Check SIRT offset is within frame and doesn't run into the saved segment state
    137       CHECK_LT(sirt_offset.Uint32Value(), frame_size);
    138       CHECK_NE(sirt_offset.Uint32Value(),
    139                main_jni_conv->SavedLocalReferenceCookieOffset().Uint32Value());
    140       bool input_in_reg = mr_conv->IsCurrentParamInRegister();
    141       bool input_on_stack = mr_conv->IsCurrentParamOnStack();
    142       CHECK(input_in_reg || input_on_stack);
    143 
    144       if (input_in_reg) {
    145         ManagedRegister in_reg  =  mr_conv->CurrentParamRegister();
    146         __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull());
    147         __ StoreRef(sirt_offset, in_reg);
    148       } else if (input_on_stack) {
    149         FrameOffset in_off  = mr_conv->CurrentParamStackOffset();
    150         __ VerifyObject(in_off, mr_conv->IsCurrentArgPossiblyNull());
    151         __ CopyRef(sirt_offset, in_off,
    152                    mr_conv->InterproceduralScratchRegister());
    153       }
    154     }
    155     mr_conv->Next();
    156     main_jni_conv->Next();
    157   }
    158 
    159   // 4. Write out the end of the quick frames.
    160   __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset());
    161   __ StoreImmediateToThread(Thread::TopOfManagedStackPcOffset(), 0,
    162                             mr_conv->InterproceduralScratchRegister());
    163 
    164   // 5. Move frame down to allow space for out going args.
    165   const size_t main_out_arg_size = main_jni_conv->OutArgSize();
    166   const size_t end_out_arg_size = end_jni_conv->OutArgSize();
    167   const size_t max_out_arg_size = std::max(main_out_arg_size, end_out_arg_size);
    168   __ IncreaseFrameSize(max_out_arg_size);
    169 
    170 
    171   // 6. Call into appropriate JniMethodStart passing Thread* so that transition out of Runnable
    172   //    can occur. The result is the saved JNI local state that is restored by the exit call. We
    173   //    abuse the JNI calling convention here, that is guaranteed to support passing 2 pointer
    174   //    arguments.
    175   ThreadOffset jni_start = is_synchronized ? QUICK_ENTRYPOINT_OFFSET(pJniMethodStartSynchronized)
    176                                            : QUICK_ENTRYPOINT_OFFSET(pJniMethodStart);
    177   main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
    178   FrameOffset locked_object_sirt_offset(0);
    179   if (is_synchronized) {
    180     // Pass object for locking.
    181     main_jni_conv->Next();  // Skip JNIEnv.
    182     locked_object_sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
    183     main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
    184     if (main_jni_conv->IsCurrentParamOnStack()) {
    185       FrameOffset out_off = main_jni_conv->CurrentParamStackOffset();
    186       __ CreateSirtEntry(out_off, locked_object_sirt_offset,
    187                          mr_conv->InterproceduralScratchRegister(),
    188                          false);
    189     } else {
    190       ManagedRegister out_reg = main_jni_conv->CurrentParamRegister();
    191       __ CreateSirtEntry(out_reg, locked_object_sirt_offset,
    192                          ManagedRegister::NoRegister(), false);
    193     }
    194     main_jni_conv->Next();
    195   }
    196   if (main_jni_conv->IsCurrentParamInRegister()) {
    197     __ GetCurrentThread(main_jni_conv->CurrentParamRegister());
    198     __ Call(main_jni_conv->CurrentParamRegister(), Offset(jni_start),
    199             main_jni_conv->InterproceduralScratchRegister());
    200   } else {
    201     __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset(),
    202                         main_jni_conv->InterproceduralScratchRegister());
    203     __ Call(ThreadOffset(jni_start), main_jni_conv->InterproceduralScratchRegister());
    204   }
    205   if (is_synchronized) {  // Check for exceptions from monitor enter.
    206     __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), main_out_arg_size);
    207   }
    208   FrameOffset saved_cookie_offset = main_jni_conv->SavedLocalReferenceCookieOffset();
    209   __ Store(saved_cookie_offset, main_jni_conv->IntReturnRegister(), 4);
    210 
    211   // 7. Iterate over arguments placing values from managed calling convention in
    212   //    to the convention required for a native call (shuffling). For references
    213   //    place an index/pointer to the reference after checking whether it is
    214   //    NULL (which must be encoded as NULL).
    215   //    Note: we do this prior to materializing the JNIEnv* and static's jclass to
    216   //    give as many free registers for the shuffle as possible
    217   mr_conv->ResetIterator(FrameOffset(frame_size+main_out_arg_size));
    218   uint32_t args_count = 0;
    219   while (mr_conv->HasNext()) {
    220     args_count++;
    221     mr_conv->Next();
    222   }
    223 
    224   // Do a backward pass over arguments, so that the generated code will be "mov
    225   // R2, R3; mov R1, R2" instead of "mov R1, R2; mov R2, R3."
    226   // TODO: A reverse iterator to improve readability.
    227   for (uint32_t i = 0; i < args_count; ++i) {
    228     mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size));
    229     main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
    230     main_jni_conv->Next();  // Skip JNIEnv*.
    231     if (is_static) {
    232       main_jni_conv->Next();  // Skip Class for now.
    233     }
    234     // Skip to the argument we're interested in.
    235     for (uint32_t j = 0; j < args_count - i - 1; ++j) {
    236       mr_conv->Next();
    237       main_jni_conv->Next();
    238     }
    239     CopyParameter(jni_asm.get(), mr_conv.get(), main_jni_conv.get(), frame_size, main_out_arg_size);
    240   }
    241   if (is_static) {
    242     // Create argument for Class
    243     mr_conv->ResetIterator(FrameOffset(frame_size+main_out_arg_size));
    244     main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
    245     main_jni_conv->Next();  // Skip JNIEnv*
    246     FrameOffset sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
    247     if (main_jni_conv->IsCurrentParamOnStack()) {
    248       FrameOffset out_off = main_jni_conv->CurrentParamStackOffset();
    249       __ CreateSirtEntry(out_off, sirt_offset,
    250                          mr_conv->InterproceduralScratchRegister(),
    251                          false);
    252     } else {
    253       ManagedRegister out_reg = main_jni_conv->CurrentParamRegister();
    254       __ CreateSirtEntry(out_reg, sirt_offset,
    255                          ManagedRegister::NoRegister(), false);
    256     }
    257   }
    258 
    259   // 8. Create 1st argument, the JNI environment ptr.
    260   main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
    261   // Register that will hold local indirect reference table
    262   if (main_jni_conv->IsCurrentParamInRegister()) {
    263     ManagedRegister jni_env = main_jni_conv->CurrentParamRegister();
    264     DCHECK(!jni_env.Equals(main_jni_conv->InterproceduralScratchRegister()));
    265     __ LoadRawPtrFromThread(jni_env, Thread::JniEnvOffset());
    266   } else {
    267     FrameOffset jni_env = main_jni_conv->CurrentParamStackOffset();
    268     __ CopyRawPtrFromThread(jni_env, Thread::JniEnvOffset(),
    269                             main_jni_conv->InterproceduralScratchRegister());
    270   }
    271 
    272   // 9. Plant call to native code associated with method.
    273   __ Call(main_jni_conv->MethodStackOffset(), mirror::ArtMethod::NativeMethodOffset(),
    274           mr_conv->InterproceduralScratchRegister());
    275 
    276   // 10. Fix differences in result widths.
    277   if (instruction_set == kX86) {
    278     if (main_jni_conv->GetReturnType() == Primitive::kPrimByte ||
    279         main_jni_conv->GetReturnType() == Primitive::kPrimShort) {
    280       __ SignExtend(main_jni_conv->ReturnRegister(),
    281                     Primitive::ComponentSize(main_jni_conv->GetReturnType()));
    282     } else if (main_jni_conv->GetReturnType() == Primitive::kPrimBoolean ||
    283                main_jni_conv->GetReturnType() == Primitive::kPrimChar) {
    284       __ ZeroExtend(main_jni_conv->ReturnRegister(),
    285                     Primitive::ComponentSize(main_jni_conv->GetReturnType()));
    286     }
    287   }
    288 
    289   // 11. Save return value
    290   FrameOffset return_save_location = main_jni_conv->ReturnValueSaveLocation();
    291   if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) {
    292     if (instruction_set == kMips && main_jni_conv->GetReturnType() == Primitive::kPrimDouble &&
    293         return_save_location.Uint32Value() % 8 != 0) {
    294       // Ensure doubles are 8-byte aligned for MIPS
    295       return_save_location = FrameOffset(return_save_location.Uint32Value() + kPointerSize);
    296     }
    297     CHECK_LT(return_save_location.Uint32Value(), frame_size+main_out_arg_size);
    298     __ Store(return_save_location, main_jni_conv->ReturnRegister(), main_jni_conv->SizeOfReturnValue());
    299   }
    300 
    301   // 12. Call into JNI method end possibly passing a returned reference, the method and the current
    302   //     thread.
    303   end_jni_conv->ResetIterator(FrameOffset(end_out_arg_size));
    304   ThreadOffset jni_end(-1);
    305   if (reference_return) {
    306     // Pass result.
    307     jni_end = is_synchronized ? QUICK_ENTRYPOINT_OFFSET(pJniMethodEndWithReferenceSynchronized)
    308                               : QUICK_ENTRYPOINT_OFFSET(pJniMethodEndWithReference);
    309     SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister());
    310     end_jni_conv->Next();
    311   } else {
    312     jni_end = is_synchronized ? QUICK_ENTRYPOINT_OFFSET(pJniMethodEndSynchronized)
    313                               : QUICK_ENTRYPOINT_OFFSET(pJniMethodEnd);
    314   }
    315   // Pass saved local reference state.
    316   if (end_jni_conv->IsCurrentParamOnStack()) {
    317     FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
    318     __ Copy(out_off, saved_cookie_offset, end_jni_conv->InterproceduralScratchRegister(), 4);
    319   } else {
    320     ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
    321     __ Load(out_reg, saved_cookie_offset, 4);
    322   }
    323   end_jni_conv->Next();
    324   if (is_synchronized) {
    325     // Pass object for unlocking.
    326     if (end_jni_conv->IsCurrentParamOnStack()) {
    327       FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
    328       __ CreateSirtEntry(out_off, locked_object_sirt_offset,
    329                          end_jni_conv->InterproceduralScratchRegister(),
    330                          false);
    331     } else {
    332       ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
    333       __ CreateSirtEntry(out_reg, locked_object_sirt_offset,
    334                          ManagedRegister::NoRegister(), false);
    335     }
    336     end_jni_conv->Next();
    337   }
    338   if (end_jni_conv->IsCurrentParamInRegister()) {
    339     __ GetCurrentThread(end_jni_conv->CurrentParamRegister());
    340     __ Call(end_jni_conv->CurrentParamRegister(), Offset(jni_end),
    341             end_jni_conv->InterproceduralScratchRegister());
    342   } else {
    343     __ GetCurrentThread(end_jni_conv->CurrentParamStackOffset(),
    344                         end_jni_conv->InterproceduralScratchRegister());
    345     __ Call(ThreadOffset(jni_end), end_jni_conv->InterproceduralScratchRegister());
    346   }
    347 
    348   // 13. Reload return value
    349   if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) {
    350     __ Load(mr_conv->ReturnRegister(), return_save_location, mr_conv->SizeOfReturnValue());
    351   }
    352 
    353   // 14. Move frame up now we're done with the out arg space.
    354   __ DecreaseFrameSize(max_out_arg_size);
    355 
    356   // 15. Process pending exceptions from JNI call or monitor exit.
    357   __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), 0);
    358 
    359   // 16. Remove activation - no need to restore callee save registers because we didn't clobber
    360   //     them.
    361   __ RemoveFrame(frame_size, std::vector<ManagedRegister>());
    362 
    363   // 17. Finalize code generation
    364   __ EmitSlowPaths();
    365   size_t cs = __ CodeSize();
    366   std::vector<uint8_t> managed_code(cs);
    367   MemoryRegion code(&managed_code[0], managed_code.size());
    368   __ FinalizeInstructions(code);
    369   if (should_disassemble) {
    370     UniquePtr<Disassembler> disassembler(Disassembler::Create(instruction_set));
    371     disassembler->Dump(LOG(INFO), &managed_code[0], &managed_code[managed_code.size()]);
    372   }
    373   return new CompiledMethod(compiler,
    374                             instruction_set,
    375                             managed_code,
    376                             frame_size,
    377                             main_jni_conv->CoreSpillMask(),
    378                             main_jni_conv->FpSpillMask());
    379 }
    380 
    381 // Copy a single parameter from the managed to the JNI calling convention
    382 static void CopyParameter(Assembler* jni_asm,
    383                           ManagedRuntimeCallingConvention* mr_conv,
    384                           JniCallingConvention* jni_conv,
    385                           size_t frame_size, size_t out_arg_size) {
    386   bool input_in_reg = mr_conv->IsCurrentParamInRegister();
    387   bool output_in_reg = jni_conv->IsCurrentParamInRegister();
    388   FrameOffset sirt_offset(0);
    389   bool null_allowed = false;
    390   bool ref_param = jni_conv->IsCurrentParamAReference();
    391   CHECK(!ref_param || mr_conv->IsCurrentParamAReference());
    392   // input may be in register, on stack or both - but not none!
    393   CHECK(input_in_reg || mr_conv->IsCurrentParamOnStack());
    394   if (output_in_reg) {  // output shouldn't straddle registers and stack
    395     CHECK(!jni_conv->IsCurrentParamOnStack());
    396   } else {
    397     CHECK(jni_conv->IsCurrentParamOnStack());
    398   }
    399   // References need placing in SIRT and the entry address passing
    400   if (ref_param) {
    401     null_allowed = mr_conv->IsCurrentArgPossiblyNull();
    402     // Compute SIRT offset. Note null is placed in the SIRT but the jobject
    403     // passed to the native code must be null (not a pointer into the SIRT
    404     // as with regular references).
    405     sirt_offset = jni_conv->CurrentParamSirtEntryOffset();
    406     // Check SIRT offset is within frame.
    407     CHECK_LT(sirt_offset.Uint32Value(), (frame_size + out_arg_size));
    408   }
    409   if (input_in_reg && output_in_reg) {
    410     ManagedRegister in_reg = mr_conv->CurrentParamRegister();
    411     ManagedRegister out_reg = jni_conv->CurrentParamRegister();
    412     if (ref_param) {
    413       __ CreateSirtEntry(out_reg, sirt_offset, in_reg, null_allowed);
    414     } else {
    415       if (!mr_conv->IsCurrentParamOnStack()) {
    416         // regular non-straddling move
    417         __ Move(out_reg, in_reg, mr_conv->CurrentParamSize());
    418       } else {
    419         UNIMPLEMENTED(FATAL);  // we currently don't expect to see this case
    420       }
    421     }
    422   } else if (!input_in_reg && !output_in_reg) {
    423     FrameOffset out_off = jni_conv->CurrentParamStackOffset();
    424     if (ref_param) {
    425       __ CreateSirtEntry(out_off, sirt_offset, mr_conv->InterproceduralScratchRegister(),
    426                          null_allowed);
    427     } else {
    428       FrameOffset in_off = mr_conv->CurrentParamStackOffset();
    429       size_t param_size = mr_conv->CurrentParamSize();
    430       CHECK_EQ(param_size, jni_conv->CurrentParamSize());
    431       __ Copy(out_off, in_off, mr_conv->InterproceduralScratchRegister(), param_size);
    432     }
    433   } else if (!input_in_reg && output_in_reg) {
    434     FrameOffset in_off = mr_conv->CurrentParamStackOffset();
    435     ManagedRegister out_reg = jni_conv->CurrentParamRegister();
    436     // Check that incoming stack arguments are above the current stack frame.
    437     CHECK_GT(in_off.Uint32Value(), frame_size);
    438     if (ref_param) {
    439       __ CreateSirtEntry(out_reg, sirt_offset, ManagedRegister::NoRegister(), null_allowed);
    440     } else {
    441       size_t param_size = mr_conv->CurrentParamSize();
    442       CHECK_EQ(param_size, jni_conv->CurrentParamSize());
    443       __ Load(out_reg, in_off, param_size);
    444     }
    445   } else {
    446     CHECK(input_in_reg && !output_in_reg);
    447     ManagedRegister in_reg = mr_conv->CurrentParamRegister();
    448     FrameOffset out_off = jni_conv->CurrentParamStackOffset();
    449     // Check outgoing argument is within frame
    450     CHECK_LT(out_off.Uint32Value(), frame_size);
    451     if (ref_param) {
    452       // TODO: recycle value in in_reg rather than reload from SIRT
    453       __ CreateSirtEntry(out_off, sirt_offset, mr_conv->InterproceduralScratchRegister(),
    454                          null_allowed);
    455     } else {
    456       size_t param_size = mr_conv->CurrentParamSize();
    457       CHECK_EQ(param_size, jni_conv->CurrentParamSize());
    458       if (!mr_conv->IsCurrentParamOnStack()) {
    459         // regular non-straddling store
    460         __ Store(out_off, in_reg, param_size);
    461       } else {
    462         // store where input straddles registers and stack
    463         CHECK_EQ(param_size, 8u);
    464         FrameOffset in_off = mr_conv->CurrentParamStackOffset();
    465         __ StoreSpanning(out_off, in_reg, in_off, mr_conv->InterproceduralScratchRegister());
    466       }
    467     }
    468   }
    469 }
    470 
    471 static void SetNativeParameter(Assembler* jni_asm,
    472                                JniCallingConvention* jni_conv,
    473                                ManagedRegister in_reg) {
    474   if (jni_conv->IsCurrentParamOnStack()) {
    475     FrameOffset dest = jni_conv->CurrentParamStackOffset();
    476     __ StoreRawPtr(dest, in_reg);
    477   } else {
    478     if (!jni_conv->CurrentParamRegister().Equals(in_reg)) {
    479       __ Move(jni_conv->CurrentParamRegister(), in_reg, jni_conv->CurrentParamSize());
    480     }
    481   }
    482 }
    483 
    484 }  // namespace art
    485 
    486 extern "C" art::CompiledMethod* ArtQuickJniCompileMethod(art::CompilerDriver& compiler,
    487                                                          uint32_t access_flags, uint32_t method_idx,
    488                                                          const art::DexFile& dex_file) {
    489   return ArtJniCompileMethodInternal(compiler, access_flags, method_idx, dex_file);
    490 }
    491