Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "art_method.h"
     18 
     19 #include "arch/context.h"
     20 #include "art_field-inl.h"
     21 #include "art_method-inl.h"
     22 #include "base/stringpiece.h"
     23 #include "dex_file-inl.h"
     24 #include "dex_instruction.h"
     25 #include "entrypoints/entrypoint_utils.h"
     26 #include "entrypoints/runtime_asm_entrypoints.h"
     27 #include "gc/accounting/card_table-inl.h"
     28 #include "interpreter/interpreter.h"
     29 #include "jit/jit.h"
     30 #include "jit/jit_code_cache.h"
     31 #include "jni_internal.h"
     32 #include "mapping_table.h"
     33 #include "mirror/abstract_method.h"
     34 #include "mirror/class-inl.h"
     35 #include "mirror/object_array-inl.h"
     36 #include "mirror/object-inl.h"
     37 #include "mirror/string.h"
     38 #include "scoped_thread_state_change.h"
     39 #include "well_known_classes.h"
     40 
     41 namespace art {
     42 
     43 extern "C" void art_quick_invoke_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
     44                                       const char*);
     45 #if defined(__LP64__) || defined(__arm__) || defined(__i386__)
     46 extern "C" void art_quick_invoke_static_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
     47                                              const char*);
     48 #endif
     49 
     50 ArtMethod* ArtMethod::FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
     51                                           jobject jlr_method) {
     52   auto* abstract_method = soa.Decode<mirror::AbstractMethod*>(jlr_method);
     53   DCHECK(abstract_method != nullptr);
     54   return abstract_method->GetArtMethod();
     55 }
     56 
     57 mirror::String* ArtMethod::GetNameAsString(Thread* self) {
     58   CHECK(!IsProxyMethod());
     59   StackHandleScope<1> hs(self);
     60   Handle<mirror::DexCache> dex_cache(hs.NewHandle(GetDexCache()));
     61   auto* dex_file = dex_cache->GetDexFile();
     62   uint32_t dex_method_idx = GetDexMethodIndex();
     63   const DexFile::MethodId& method_id = dex_file->GetMethodId(dex_method_idx);
     64   return Runtime::Current()->GetClassLinker()->ResolveString(*dex_file, method_id.name_idx_,
     65                                                              dex_cache);
     66 }
     67 
     68 InvokeType ArtMethod::GetInvokeType() {
     69   // TODO: kSuper?
     70   if (GetDeclaringClass()->IsInterface()) {
     71     return kInterface;
     72   } else if (IsStatic()) {
     73     return kStatic;
     74   } else if (IsDirect()) {
     75     return kDirect;
     76   } else {
     77     return kVirtual;
     78   }
     79 }
     80 
     81 size_t ArtMethod::NumArgRegisters(const StringPiece& shorty) {
     82   CHECK_LE(1U, shorty.length());
     83   uint32_t num_registers = 0;
     84   for (size_t i = 1; i < shorty.length(); ++i) {
     85     char ch = shorty[i];
     86     if (ch == 'D' || ch == 'J') {
     87       num_registers += 2;
     88     } else {
     89       num_registers += 1;
     90     }
     91   }
     92   return num_registers;
     93 }
     94 
     95 static bool HasSameNameAndSignature(ArtMethod* method1, ArtMethod* method2)
     96     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     97   ScopedAssertNoThreadSuspension ants(Thread::Current(), "HasSameNameAndSignature");
     98   const DexFile* dex_file = method1->GetDexFile();
     99   const DexFile::MethodId& mid = dex_file->GetMethodId(method1->GetDexMethodIndex());
    100   if (method1->GetDexCache() == method2->GetDexCache()) {
    101     const DexFile::MethodId& mid2 = dex_file->GetMethodId(method2->GetDexMethodIndex());
    102     return mid.name_idx_ == mid2.name_idx_ && mid.proto_idx_ == mid2.proto_idx_;
    103   }
    104   const DexFile* dex_file2 = method2->GetDexFile();
    105   const DexFile::MethodId& mid2 = dex_file2->GetMethodId(method2->GetDexMethodIndex());
    106   if (!DexFileStringEquals(dex_file, mid.name_idx_, dex_file2, mid2.name_idx_)) {
    107     return false;  // Name mismatch.
    108   }
    109   return dex_file->GetMethodSignature(mid) == dex_file2->GetMethodSignature(mid2);
    110 }
    111 
    112 ArtMethod* ArtMethod::FindOverriddenMethod(size_t pointer_size) {
    113   if (IsStatic()) {
    114     return nullptr;
    115   }
    116   mirror::Class* declaring_class = GetDeclaringClass();
    117   mirror::Class* super_class = declaring_class->GetSuperClass();
    118   uint16_t method_index = GetMethodIndex();
    119   ArtMethod* result = nullptr;
    120   // Did this method override a super class method? If so load the result from the super class'
    121   // vtable
    122   if (super_class->HasVTable() && method_index < super_class->GetVTableLength()) {
    123     result = super_class->GetVTableEntry(method_index, pointer_size);
    124   } else {
    125     // Method didn't override superclass method so search interfaces
    126     if (IsProxyMethod()) {
    127       result = GetDexCacheResolvedMethods()->GetElementPtrSize<ArtMethod*>(
    128           GetDexMethodIndex(), pointer_size);
    129       CHECK_EQ(result,
    130                Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
    131     } else {
    132       mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable();
    133       for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
    134         mirror::Class* interface = iftable->GetInterface(i);
    135         for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
    136           ArtMethod* interface_method = interface->GetVirtualMethod(j, pointer_size);
    137           if (HasSameNameAndSignature(
    138               this, interface_method->GetInterfaceMethodIfProxy(sizeof(void*)))) {
    139             result = interface_method;
    140             break;
    141           }
    142         }
    143       }
    144     }
    145   }
    146   DCHECK(result == nullptr || HasSameNameAndSignature(
    147       GetInterfaceMethodIfProxy(sizeof(void*)), result->GetInterfaceMethodIfProxy(sizeof(void*))));
    148   return result;
    149 }
    150 
    151 uint32_t ArtMethod::FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
    152                                                      uint32_t name_and_signature_idx) {
    153   const DexFile* dexfile = GetDexFile();
    154   const uint32_t dex_method_idx = GetDexMethodIndex();
    155   const DexFile::MethodId& mid = dexfile->GetMethodId(dex_method_idx);
    156   const DexFile::MethodId& name_and_sig_mid = other_dexfile.GetMethodId(name_and_signature_idx);
    157   DCHECK_STREQ(dexfile->GetMethodName(mid), other_dexfile.GetMethodName(name_and_sig_mid));
    158   DCHECK_EQ(dexfile->GetMethodSignature(mid), other_dexfile.GetMethodSignature(name_and_sig_mid));
    159   if (dexfile == &other_dexfile) {
    160     return dex_method_idx;
    161   }
    162   const char* mid_declaring_class_descriptor = dexfile->StringByTypeIdx(mid.class_idx_);
    163   const DexFile::StringId* other_descriptor =
    164       other_dexfile.FindStringId(mid_declaring_class_descriptor);
    165   if (other_descriptor != nullptr) {
    166     const DexFile::TypeId* other_type_id =
    167         other_dexfile.FindTypeId(other_dexfile.GetIndexForStringId(*other_descriptor));
    168     if (other_type_id != nullptr) {
    169       const DexFile::MethodId* other_mid = other_dexfile.FindMethodId(
    170           *other_type_id, other_dexfile.GetStringId(name_and_sig_mid.name_idx_),
    171           other_dexfile.GetProtoId(name_and_sig_mid.proto_idx_));
    172       if (other_mid != nullptr) {
    173         return other_dexfile.GetIndexForMethodId(*other_mid);
    174       }
    175     }
    176   }
    177   return DexFile::kDexNoIndex;
    178 }
    179 
    180 uint32_t ArtMethod::ToDexPc(const uintptr_t pc, bool abort_on_failure) {
    181   const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
    182   uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
    183   if (IsOptimized(sizeof(void*))) {
    184     CodeInfo code_info = GetOptimizedCodeInfo();
    185     StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset);
    186     if (stack_map.IsValid()) {
    187       return stack_map.GetDexPc(code_info);
    188     }
    189   } else {
    190     MappingTable table(entry_point != nullptr ?
    191         GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
    192     if (table.TotalSize() == 0) {
    193       // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
    194       // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
    195       DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this);
    196       return DexFile::kDexNoIndex;   // Special no mapping case
    197     }
    198     // Assume the caller wants a pc-to-dex mapping so check here first.
    199     typedef MappingTable::PcToDexIterator It;
    200     for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
    201       if (cur.NativePcOffset() == sought_offset) {
    202         return cur.DexPc();
    203       }
    204     }
    205     // Now check dex-to-pc mappings.
    206     typedef MappingTable::DexToPcIterator It2;
    207     for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
    208       if (cur.NativePcOffset() == sought_offset) {
    209         return cur.DexPc();
    210       }
    211     }
    212   }
    213   if (abort_on_failure) {
    214       LOG(FATAL) << "Failed to find Dex offset for PC offset " << reinterpret_cast<void*>(sought_offset)
    215              << "(PC " << reinterpret_cast<void*>(pc) << ", entry_point=" << entry_point
    216              << " current entry_point=" << GetQuickOatEntryPoint(sizeof(void*))
    217              << ") in " << PrettyMethod(this);
    218   }
    219   return DexFile::kDexNoIndex;
    220 }
    221 
    222 uintptr_t ArtMethod::ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure) {
    223   const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
    224   MappingTable table(entry_point != nullptr ?
    225       GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
    226   if (table.TotalSize() == 0) {
    227     DCHECK_EQ(dex_pc, 0U);
    228     return 0;   // Special no mapping/pc == 0 case
    229   }
    230   // Assume the caller wants a dex-to-pc mapping so check here first.
    231   typedef MappingTable::DexToPcIterator It;
    232   for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
    233     if (cur.DexPc() == dex_pc) {
    234       return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
    235     }
    236   }
    237   // Now check pc-to-dex mappings.
    238   typedef MappingTable::PcToDexIterator It2;
    239   for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
    240     if (cur.DexPc() == dex_pc) {
    241       return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
    242     }
    243   }
    244   if (abort_on_failure) {
    245     LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc
    246                << " in " << PrettyMethod(this);
    247   }
    248   return UINTPTR_MAX;
    249 }
    250 
    251 uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type,
    252                                    uint32_t dex_pc, bool* has_no_move_exception) {
    253   const DexFile::CodeItem* code_item = GetCodeItem();
    254   // Set aside the exception while we resolve its type.
    255   Thread* self = Thread::Current();
    256   StackHandleScope<1> hs(self);
    257   Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
    258   self->ClearException();
    259   // Default to handler not found.
    260   uint32_t found_dex_pc = DexFile::kDexNoIndex;
    261   // Iterate over the catch handlers associated with dex_pc.
    262   for (CatchHandlerIterator it(*code_item, dex_pc); it.HasNext(); it.Next()) {
    263     uint16_t iter_type_idx = it.GetHandlerTypeIndex();
    264     // Catch all case
    265     if (iter_type_idx == DexFile::kDexNoIndex16) {
    266       found_dex_pc = it.GetHandlerAddress();
    267       break;
    268     }
    269     // Does this catch exception type apply?
    270     mirror::Class* iter_exception_type = GetClassFromTypeIndex(iter_type_idx, true);
    271     if (UNLIKELY(iter_exception_type == nullptr)) {
    272       // Now have a NoClassDefFoundError as exception. Ignore in case the exception class was
    273       // removed by a pro-guard like tool.
    274       // Note: this is not RI behavior. RI would have failed when loading the class.
    275       self->ClearException();
    276       // Delete any long jump context as this routine is called during a stack walk which will
    277       // release its in use context at the end.
    278       delete self->GetLongJumpContext();
    279       LOG(WARNING) << "Unresolved exception class when finding catch block: "
    280         << DescriptorToDot(GetTypeDescriptorFromTypeIdx(iter_type_idx));
    281     } else if (iter_exception_type->IsAssignableFrom(exception_type.Get())) {
    282       found_dex_pc = it.GetHandlerAddress();
    283       break;
    284     }
    285   }
    286   if (found_dex_pc != DexFile::kDexNoIndex) {
    287     const Instruction* first_catch_instr =
    288         Instruction::At(&code_item->insns_[found_dex_pc]);
    289     *has_no_move_exception = (first_catch_instr->Opcode() != Instruction::MOVE_EXCEPTION);
    290   }
    291   // Put the exception back.
    292   if (exception.Get() != nullptr) {
    293     self->SetException(exception.Get());
    294   }
    295   return found_dex_pc;
    296 }
    297 
    298 void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) {
    299   if (IsNative() || IsRuntimeMethod() || IsProxyMethod()) {
    300     return;
    301   }
    302   if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
    303     return;
    304   }
    305   const void* code = GetEntryPointFromQuickCompiledCode();
    306   if (code == GetQuickInstrumentationEntryPoint()) {
    307     return;
    308   }
    309   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    310   if (class_linker->IsQuickToInterpreterBridge(code) ||
    311       class_linker->IsQuickResolutionStub(code)) {
    312     return;
    313   }
    314   // If we are the JIT then we may have just compiled the method after the
    315   // IsQuickToInterpreterBridge check.
    316   jit::Jit* const jit = Runtime::Current()->GetJit();
    317   if (jit != nullptr &&
    318       jit->GetCodeCache()->ContainsCodePtr(reinterpret_cast<const void*>(code))) {
    319     return;
    320   }
    321   /*
    322    * During a stack walk, a return PC may point past-the-end of the code
    323    * in the case that the last instruction is a call that isn't expected to
    324    * return.  Thus, we check <= code + GetCodeSize().
    325    *
    326    * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
    327    */
    328   CHECK(PcIsWithinQuickCode(reinterpret_cast<uintptr_t>(code), pc))
    329       << PrettyMethod(this)
    330       << " pc=" << std::hex << pc
    331       << " code=" << code
    332       << " size=" << GetCodeSize(
    333           EntryPointToCodePointer(reinterpret_cast<const void*>(code)));
    334 }
    335 
    336 bool ArtMethod::IsEntrypointInterpreter() {
    337   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    338   const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this);
    339   return oat_quick_code == nullptr || oat_quick_code != GetEntryPointFromQuickCompiledCode();
    340 }
    341 
    342 const void* ArtMethod::GetQuickOatEntryPoint(size_t pointer_size) {
    343   if (IsAbstract() || IsRuntimeMethod() || IsProxyMethod()) {
    344     return nullptr;
    345   }
    346   Runtime* runtime = Runtime::Current();
    347   ClassLinker* class_linker = runtime->GetClassLinker();
    348   const void* code = runtime->GetInstrumentation()->GetQuickCodeFor(this, pointer_size);
    349   // On failure, instead of null we get the quick-generic-jni-trampoline for native method
    350   // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
    351   // for non-native methods.
    352   if (class_linker->IsQuickToInterpreterBridge(code) ||
    353       class_linker->IsQuickGenericJniStub(code)) {
    354     return nullptr;
    355   }
    356   return code;
    357 }
    358 
    359 #ifndef NDEBUG
    360 uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point) {
    361   CHECK_NE(quick_entry_point, GetQuickToInterpreterBridge());
    362   CHECK_EQ(quick_entry_point,
    363            Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*)));
    364   return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
    365 }
    366 #endif
    367 
    368 void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result,
    369                        const char* shorty) {
    370   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) {
    371     ThrowStackOverflowError(self);
    372     return;
    373   }
    374 
    375   if (kIsDebugBuild) {
    376     self->AssertThreadSuspensionIsAllowable();
    377     CHECK_EQ(kRunnable, self->GetState());
    378     CHECK_STREQ(GetInterfaceMethodIfProxy(sizeof(void*))->GetShorty(), shorty);
    379   }
    380 
    381   // Push a transition back into managed code onto the linked list in thread.
    382   ManagedStack fragment;
    383   self->PushManagedStackFragment(&fragment);
    384 
    385   Runtime* runtime = Runtime::Current();
    386   // Call the invoke stub, passing everything as arguments.
    387   // If the runtime is not yet started or it is required by the debugger, then perform the
    388   // Invocation by the interpreter.
    389   if (UNLIKELY(!runtime->IsStarted() || Dbg::IsForcedInterpreterNeededForCalling(self, this))) {
    390     if (IsStatic()) {
    391       art::interpreter::EnterInterpreterFromInvoke(self, this, nullptr, args, result);
    392     } else {
    393       mirror::Object* receiver =
    394           reinterpret_cast<StackReference<mirror::Object>*>(&args[0])->AsMirrorPtr();
    395       art::interpreter::EnterInterpreterFromInvoke(self, this, receiver, args + 1, result);
    396     }
    397   } else {
    398     DCHECK_EQ(runtime->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
    399 
    400     constexpr bool kLogInvocationStartAndReturn = false;
    401     bool have_quick_code = GetEntryPointFromQuickCompiledCode() != nullptr;
    402     if (LIKELY(have_quick_code)) {
    403       if (kLogInvocationStartAndReturn) {
    404         LOG(INFO) << StringPrintf(
    405             "Invoking '%s' quick code=%p static=%d", PrettyMethod(this).c_str(),
    406             GetEntryPointFromQuickCompiledCode(), static_cast<int>(IsStatic() ? 1 : 0));
    407       }
    408 
    409       // Ensure that we won't be accidentally calling quick compiled code when -Xint.
    410       if (kIsDebugBuild && runtime->GetInstrumentation()->IsForcedInterpretOnly()) {
    411         DCHECK(!runtime->UseJit());
    412         CHECK(IsEntrypointInterpreter())
    413             << "Don't call compiled code when -Xint " << PrettyMethod(this);
    414       }
    415 
    416 #if defined(__LP64__) || defined(__arm__) || defined(__i386__)
    417       if (!IsStatic()) {
    418         (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
    419       } else {
    420         (*art_quick_invoke_static_stub)(this, args, args_size, self, result, shorty);
    421       }
    422 #else
    423       (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
    424 #endif
    425       if (UNLIKELY(self->GetException() == Thread::GetDeoptimizationException())) {
    426         // Unusual case where we were running generated code and an
    427         // exception was thrown to force the activations to be removed from the
    428         // stack. Continue execution in the interpreter.
    429         self->ClearException();
    430         ShadowFrame* shadow_frame =
    431             self->PopStackedShadowFrame(StackedShadowFrameType::kDeoptimizationShadowFrame);
    432         result->SetJ(self->PopDeoptimizationReturnValue().GetJ());
    433         self->SetTopOfStack(nullptr);
    434         self->SetTopOfShadowStack(shadow_frame);
    435         interpreter::EnterInterpreterFromDeoptimize(self, shadow_frame, result);
    436       }
    437       if (kLogInvocationStartAndReturn) {
    438         LOG(INFO) << StringPrintf("Returned '%s' quick code=%p", PrettyMethod(this).c_str(),
    439                                   GetEntryPointFromQuickCompiledCode());
    440       }
    441     } else {
    442       LOG(INFO) << "Not invoking '" << PrettyMethod(this) << "' code=null";
    443       if (result != nullptr) {
    444         result->SetJ(0);
    445       }
    446     }
    447   }
    448 
    449   // Pop transition.
    450   self->PopManagedStackFragment(fragment);
    451 }
    452 
    453 // Counts the number of references in the parameter list of the corresponding method.
    454 // Note: Thus does _not_ include "this" for non-static methods.
    455 static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
    456     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    457   uint32_t shorty_len;
    458   const char* shorty = method->GetShorty(&shorty_len);
    459   uint32_t refs = 0;
    460   for (uint32_t i = 1; i < shorty_len ; ++i) {
    461     if (shorty[i] == 'L') {
    462       refs++;
    463     }
    464   }
    465   return refs;
    466 }
    467 
    468 QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() {
    469   Runtime* runtime = Runtime::Current();
    470 
    471   if (UNLIKELY(IsAbstract())) {
    472     return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
    473   }
    474 
    475   // This goes before IsProxyMethod since runtime methods have a null declaring class.
    476   if (UNLIKELY(IsRuntimeMethod())) {
    477     return runtime->GetRuntimeMethodFrameInfo(this);
    478   }
    479 
    480   // For Proxy method we add special handling for the direct method case  (there is only one
    481   // direct method - constructor). Direct method is cloned from original
    482   // java.lang.reflect.Proxy class together with code and as a result it is executed as usual
    483   // quick compiled method without any stubs. So the frame info should be returned as it is a
    484   // quick method not a stub. However, if instrumentation stubs are installed, the
    485   // instrumentation->GetQuickCodeFor() returns the artQuickProxyInvokeHandler instead of an
    486   // oat code pointer, thus we have to add a special case here.
    487   if (UNLIKELY(IsProxyMethod())) {
    488     if (IsDirect()) {
    489       CHECK(IsConstructor());
    490       return GetQuickFrameInfo(EntryPointToCodePointer(GetEntryPointFromQuickCompiledCode()));
    491     } else {
    492       return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
    493     }
    494   }
    495 
    496   const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*));
    497   ClassLinker* class_linker = runtime->GetClassLinker();
    498   // On failure, instead of null we get the quick-generic-jni-trampoline for native method
    499   // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
    500   // for non-native methods. And we really shouldn't see a failure for non-native methods here.
    501   DCHECK(!class_linker->IsQuickToInterpreterBridge(entry_point));
    502 
    503   if (class_linker->IsQuickGenericJniStub(entry_point)) {
    504     // Generic JNI frame.
    505     DCHECK(IsNative());
    506     uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(this) + 1;
    507     size_t scope_size = HandleScope::SizeOf(handle_refs);
    508     QuickMethodFrameInfo callee_info = runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
    509 
    510     // Callee saves + handle scope + method ref + alignment
    511     // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
    512     size_t frame_size = RoundUp(callee_info.FrameSizeInBytes() - sizeof(void*) +
    513                                 sizeof(ArtMethod*) + scope_size, kStackAlignment);
    514     return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
    515   }
    516 
    517   const void* code_pointer = EntryPointToCodePointer(entry_point);
    518   return GetQuickFrameInfo(code_pointer);
    519 }
    520 
    521 void ArtMethod::RegisterNative(const void* native_method, bool is_fast) {
    522   CHECK(IsNative()) << PrettyMethod(this);
    523   CHECK(!IsFastNative()) << PrettyMethod(this);
    524   CHECK(native_method != nullptr) << PrettyMethod(this);
    525   if (is_fast) {
    526     SetAccessFlags(GetAccessFlags() | kAccFastNative);
    527   }
    528   SetEntryPointFromJni(native_method);
    529 }
    530 
    531 void ArtMethod::UnregisterNative() {
    532   CHECK(IsNative() && !IsFastNative()) << PrettyMethod(this);
    533   // restore stub to lookup native pointer via dlsym
    534   RegisterNative(GetJniDlsymLookupStub(), false);
    535 }
    536 
    537 bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) {
    538   auto* dex_cache = GetDexCache();
    539   auto* dex_file = dex_cache->GetDexFile();
    540   const auto& method_id = dex_file->GetMethodId(GetDexMethodIndex());
    541   const auto& proto_id = dex_file->GetMethodPrototype(method_id);
    542   const DexFile::TypeList* proto_params = dex_file->GetProtoParameters(proto_id);
    543   auto count = proto_params != nullptr ? proto_params->Size() : 0u;
    544   auto param_len = params.Get() != nullptr ? params->GetLength() : 0u;
    545   if (param_len != count) {
    546     return false;
    547   }
    548   auto* cl = Runtime::Current()->GetClassLinker();
    549   for (size_t i = 0; i < count; ++i) {
    550     auto type_idx = proto_params->GetTypeItem(i).type_idx_;
    551     auto* type = cl->ResolveType(type_idx, this);
    552     if (type == nullptr) {
    553       Thread::Current()->AssertPendingException();
    554       return false;
    555     }
    556     if (type != params->GetWithoutChecks(i)) {
    557       return false;
    558     }
    559   }
    560   return true;
    561 }
    562 
    563 }  // namespace art
    564