Home | History | Annotate | Download | only in entrypoints
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
     18 #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
     19 
     20 #include "entrypoint_utils.h"
     21 
     22 #include "art_field-inl.h"
     23 #include "art_method-inl.h"
     24 #include "base/enums.h"
     25 #include "class_linker-inl.h"
     26 #include "common_throws.h"
     27 #include "dex/dex_file.h"
     28 #include "dex/invoke_type.h"
     29 #include "entrypoints/quick/callee_save_frame.h"
     30 #include "handle_scope-inl.h"
     31 #include "imt_conflict_table.h"
     32 #include "imtable-inl.h"
     33 #include "indirect_reference_table.h"
     34 #include "jni_internal.h"
     35 #include "mirror/array.h"
     36 #include "mirror/class-inl.h"
     37 #include "mirror/object-inl.h"
     38 #include "mirror/throwable.h"
     39 #include "nth_caller_visitor.h"
     40 #include "runtime.h"
     41 #include "stack_map.h"
     42 #include "thread.h"
     43 #include "well_known_classes.h"
     44 
     45 namespace art {
     46 
     47 inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method,
     48                                     const MethodInfo& method_info,
     49                                     const InlineInfo& inline_info,
     50                                     const InlineInfoEncoding& encoding,
     51                                     uint8_t inlining_depth)
     52     REQUIRES_SHARED(Locks::mutator_lock_) {
     53   DCHECK(!outer_method->IsObsolete());
     54 
     55   // This method is being used by artQuickResolutionTrampoline, before it sets up
     56   // the passed parameters in a GC friendly way. Therefore we must never be
     57   // suspended while executing it.
     58   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
     59 
     60   if (inline_info.EncodesArtMethodAtDepth(encoding, inlining_depth)) {
     61     return inline_info.GetArtMethodAtDepth(encoding, inlining_depth);
     62   }
     63 
     64   uint32_t method_index = inline_info.GetMethodIndexAtDepth(encoding, method_info, inlining_depth);
     65   if (inline_info.GetDexPcAtDepth(encoding, inlining_depth) == static_cast<uint32_t>(-1)) {
     66     // "charAt" special case. It is the only non-leaf method we inline across dex files.
     67     ArtMethod* inlined_method = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
     68     DCHECK_EQ(inlined_method->GetDexMethodIndex(), method_index);
     69     return inlined_method;
     70   }
     71 
     72   // Find which method did the call in the inlining hierarchy.
     73   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
     74   ArtMethod* method = outer_method;
     75   for (uint32_t depth = 0, end = inlining_depth + 1u; depth != end; ++depth) {
     76     DCHECK(!inline_info.EncodesArtMethodAtDepth(encoding, depth));
     77     DCHECK_NE(inline_info.GetDexPcAtDepth(encoding, depth), static_cast<uint32_t>(-1));
     78     method_index = inline_info.GetMethodIndexAtDepth(encoding, method_info, depth);
     79     ArtMethod* inlined_method = class_linker->LookupResolvedMethod(method_index,
     80                                                                    method->GetDexCache(),
     81                                                                    method->GetClassLoader());
     82     if (UNLIKELY(inlined_method == nullptr)) {
     83       LOG(FATAL) << "Could not find an inlined method from an .oat file: "
     84                  << method->GetDexFile()->PrettyMethod(method_index) << " . "
     85                  << "This must be due to duplicate classes or playing wrongly with class loaders";
     86       UNREACHABLE();
     87     }
     88     DCHECK(!inlined_method->IsRuntimeMethod());
     89     if (UNLIKELY(inlined_method->GetDexFile() != method->GetDexFile())) {
     90       // TODO: We could permit inlining within a multi-dex oat file and the boot image,
     91       // even going back from boot image methods to the same oat file. However, this is
     92       // not currently implemented in the compiler. Therefore crossing dex file boundary
     93       // indicates that the inlined definition is not the same as the one used at runtime.
     94       LOG(FATAL) << "Inlined method resolution crossed dex file boundary: from "
     95                  << method->PrettyMethod()
     96                  << " in " << method->GetDexFile()->GetLocation() << "/"
     97                  << static_cast<const void*>(method->GetDexFile())
     98                  << " to " << inlined_method->PrettyMethod()
     99                  << " in " << inlined_method->GetDexFile()->GetLocation() << "/"
    100                  << static_cast<const void*>(inlined_method->GetDexFile()) << ". "
    101                  << "This must be due to duplicate classes or playing wrongly with class loaders";
    102       UNREACHABLE();
    103     }
    104     method = inlined_method;
    105   }
    106 
    107   return method;
    108 }
    109 
    110 ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(mirror::Class* klass,
    111                                                      Thread* self,
    112                                                      bool* slow_path)
    113     REQUIRES_SHARED(Locks::mutator_lock_)
    114     REQUIRES(!Roles::uninterruptible_) {
    115   if (UNLIKELY(!klass->IsInstantiable())) {
    116     self->ThrowNewException("Ljava/lang/InstantiationError;", klass->PrettyDescriptor().c_str());
    117     *slow_path = true;
    118     return nullptr;  // Failure
    119   }
    120   if (UNLIKELY(klass->IsClassClass())) {
    121     ThrowIllegalAccessError(nullptr, "Class %s is inaccessible",
    122                             klass->PrettyDescriptor().c_str());
    123     *slow_path = true;
    124     return nullptr;  // Failure
    125   }
    126   if (UNLIKELY(!klass->IsInitialized())) {
    127     StackHandleScope<1> hs(self);
    128     Handle<mirror::Class> h_klass(hs.NewHandle(klass));
    129     // EnsureInitialized (the class initializer) might cause a GC.
    130     // may cause us to suspend meaning that another thread may try to
    131     // change the allocator while we are stuck in the entrypoints of
    132     // an old allocator. Also, the class initialization may fail. To
    133     // handle these cases we mark the slow path boolean as true so
    134     // that the caller knows to check the allocator type to see if it
    135     // has changed and to null-check the return value in case the
    136     // initialization fails.
    137     *slow_path = true;
    138     if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
    139       DCHECK(self->IsExceptionPending());
    140       return nullptr;  // Failure
    141     } else {
    142       DCHECK(!self->IsExceptionPending());
    143     }
    144     return h_klass.Get();
    145   }
    146   return klass;
    147 }
    148 
    149 ALWAYS_INLINE
    150 inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
    151                                                           Thread* self,
    152                                                           bool* slow_path)
    153     REQUIRES_SHARED(Locks::mutator_lock_)
    154     REQUIRES(!Roles::uninterruptible_) {
    155   if (UNLIKELY(!klass->IsInitialized())) {
    156     StackHandleScope<1> hs(self);
    157     Handle<mirror::Class> h_class(hs.NewHandle(klass));
    158     // EnsureInitialized (the class initializer) might cause a GC.
    159     // may cause us to suspend meaning that another thread may try to
    160     // change the allocator while we are stuck in the entrypoints of
    161     // an old allocator. Also, the class initialization may fail. To
    162     // handle these cases we mark the slow path boolean as true so
    163     // that the caller knows to check the allocator type to see if it
    164     // has changed and to null-check the return value in case the
    165     // initialization fails.
    166     *slow_path = true;
    167     if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
    168       DCHECK(self->IsExceptionPending());
    169       return nullptr;  // Failure
    170     }
    171     return h_class.Get();
    172   }
    173   return klass;
    174 }
    175 
    176 // Allocate an instance of klass. Throws InstantationError if klass is not instantiable,
    177 // or IllegalAccessError if klass is j.l.Class. Performs a clinit check too.
    178 template <bool kInstrumented>
    179 ALWAYS_INLINE
    180 inline mirror::Object* AllocObjectFromCode(mirror::Class* klass,
    181                                            Thread* self,
    182                                            gc::AllocatorType allocator_type) {
    183   bool slow_path = false;
    184   klass = CheckObjectAlloc(klass, self, &slow_path);
    185   if (UNLIKELY(slow_path)) {
    186     if (klass == nullptr) {
    187       return nullptr;
    188     }
    189     // CheckObjectAlloc can cause thread suspension which means we may now be instrumented.
    190     return klass->Alloc</*kInstrumented*/true>(
    191         self,
    192         Runtime::Current()->GetHeap()->GetCurrentAllocator()).Ptr();
    193   }
    194   DCHECK(klass != nullptr);
    195   return klass->Alloc<kInstrumented>(self, allocator_type).Ptr();
    196 }
    197 
    198 // Given the context of a calling Method and a resolved class, create an instance.
    199 template <bool kInstrumented>
    200 ALWAYS_INLINE
    201 inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
    202                                                    Thread* self,
    203                                                    gc::AllocatorType allocator_type) {
    204   DCHECK(klass != nullptr);
    205   bool slow_path = false;
    206   klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
    207   if (UNLIKELY(slow_path)) {
    208     if (klass == nullptr) {
    209       return nullptr;
    210     }
    211     gc::Heap* heap = Runtime::Current()->GetHeap();
    212     // Pass in false since the object cannot be finalizable.
    213     // CheckClassInitializedForObjectAlloc can cause thread suspension which means we may now be
    214     // instrumented.
    215     return klass->Alloc</*kInstrumented*/true, false>(self, heap->GetCurrentAllocator()).Ptr();
    216   }
    217   // Pass in false since the object cannot be finalizable.
    218   return klass->Alloc<kInstrumented, false>(self, allocator_type).Ptr();
    219 }
    220 
    221 // Given the context of a calling Method and an initialized class, create an instance.
    222 template <bool kInstrumented>
    223 ALWAYS_INLINE
    224 inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
    225                                                       Thread* self,
    226                                                       gc::AllocatorType allocator_type) {
    227   DCHECK(klass != nullptr);
    228   // Pass in false since the object cannot be finalizable.
    229   return klass->Alloc<kInstrumented, false>(self, allocator_type).Ptr();
    230 }
    231 
    232 
    233 template <bool kAccessCheck>
    234 ALWAYS_INLINE
    235 inline mirror::Class* CheckArrayAlloc(dex::TypeIndex type_idx,
    236                                       int32_t component_count,
    237                                       ArtMethod* method,
    238                                       bool* slow_path) {
    239   if (UNLIKELY(component_count < 0)) {
    240     ThrowNegativeArraySizeException(component_count);
    241     *slow_path = true;
    242     return nullptr;  // Failure
    243   }
    244   ObjPtr<mirror::Class> klass = method->GetDexCache()->GetResolvedType(type_idx);
    245   if (UNLIKELY(klass == nullptr)) {  // Not in dex cache so try to resolve
    246     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    247     klass = class_linker->ResolveType(type_idx, method);
    248     *slow_path = true;
    249     if (klass == nullptr) {  // Error
    250       DCHECK(Thread::Current()->IsExceptionPending());
    251       return nullptr;  // Failure
    252     }
    253     CHECK(klass->IsArrayClass()) << klass->PrettyClass();
    254   }
    255   if (kAccessCheck) {
    256     mirror::Class* referrer = method->GetDeclaringClass();
    257     if (UNLIKELY(!referrer->CanAccess(klass))) {
    258       ThrowIllegalAccessErrorClass(referrer, klass);
    259       *slow_path = true;
    260       return nullptr;  // Failure
    261     }
    262   }
    263   return klass.Ptr();
    264 }
    265 
    266 // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
    267 // it cannot be resolved, throw an error. If it can, use it to create an array.
    268 // When verification/compiler hasn't been able to verify access, optionally perform an access
    269 // check.
    270 template <bool kAccessCheck, bool kInstrumented>
    271 ALWAYS_INLINE
    272 inline mirror::Array* AllocArrayFromCode(dex::TypeIndex type_idx,
    273                                          int32_t component_count,
    274                                          ArtMethod* method,
    275                                          Thread* self,
    276                                          gc::AllocatorType allocator_type) {
    277   bool slow_path = false;
    278   mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
    279                                                        &slow_path);
    280   if (UNLIKELY(slow_path)) {
    281     if (klass == nullptr) {
    282       return nullptr;
    283     }
    284     gc::Heap* heap = Runtime::Current()->GetHeap();
    285     // CheckArrayAlloc can cause thread suspension which means we may now be instrumented.
    286     return mirror::Array::Alloc</*kInstrumented*/true>(self,
    287                                                        klass,
    288                                                        component_count,
    289                                                        klass->GetComponentSizeShift(),
    290                                                        heap->GetCurrentAllocator());
    291   }
    292   return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
    293                                              klass->GetComponentSizeShift(), allocator_type);
    294 }
    295 
    296 template <bool kInstrumented>
    297 ALWAYS_INLINE
    298 inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
    299                                                  int32_t component_count,
    300                                                  Thread* self,
    301                                                  gc::AllocatorType allocator_type) {
    302   DCHECK(klass != nullptr);
    303   if (UNLIKELY(component_count < 0)) {
    304     ThrowNegativeArraySizeException(component_count);
    305     return nullptr;  // Failure
    306   }
    307   // No need to retry a slow-path allocation as the above code won't cause a GC or thread
    308   // suspension.
    309   return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
    310                                              klass->GetComponentSizeShift(), allocator_type);
    311 }
    312 
    313 template<FindFieldType type, bool access_check>
    314 inline ArtField* FindFieldFromCode(uint32_t field_idx,
    315                                    ArtMethod* referrer,
    316                                    Thread* self,
    317                                    size_t expected_size) {
    318   bool is_primitive;
    319   bool is_set;
    320   bool is_static;
    321   switch (type) {
    322     case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break;
    323     case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break;
    324     case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break;
    325     case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break;
    326     case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break;
    327     case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break;
    328     case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break;
    329     case StaticPrimitiveWrite:   // Keep GCC happy by having a default handler, fall-through.
    330     default:                     is_primitive = true;  is_set = true;  is_static = true;  break;
    331   }
    332   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    333 
    334   ArtField* resolved_field;
    335   if (access_check) {
    336     // Slow path: According to JLS 13.4.8, a linkage error may occur if a compile-time
    337     // qualifying type of a field and the resolved run-time qualifying type of a field differed
    338     // in their static-ness.
    339     //
    340     // In particular, don't assume the dex instruction already correctly knows if the
    341     // real field is static or not. The resolution must not be aware of this.
    342     ArtMethod* method = referrer->GetInterfaceMethodIfProxy(kRuntimePointerSize);
    343 
    344     StackHandleScope<2> hs(self);
    345     Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(method->GetDexCache()));
    346     Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(method->GetClassLoader()));
    347 
    348     resolved_field = class_linker->ResolveFieldJLS(field_idx,
    349                                                    h_dex_cache,
    350                                                    h_class_loader);
    351   } else {
    352     // Fast path: Verifier already would've called ResolveFieldJLS and we wouldn't
    353     // be executing here if there was a static/non-static mismatch.
    354     resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
    355   }
    356 
    357   if (UNLIKELY(resolved_field == nullptr)) {
    358     DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
    359     return nullptr;  // Failure.
    360   }
    361   ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
    362   if (access_check) {
    363     if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
    364       ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
    365       return nullptr;
    366     }
    367     mirror::Class* referring_class = referrer->GetDeclaringClass();
    368     if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class,
    369                                                             resolved_field,
    370                                                             referrer->GetDexCache(),
    371                                                             field_idx))) {
    372       DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
    373       return nullptr;  // Failure.
    374     }
    375     if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
    376       ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
    377       return nullptr;  // Failure.
    378     } else {
    379       if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
    380                    resolved_field->FieldSize() != expected_size)) {
    381         self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
    382                                  "Attempted read of %zd-bit %s on field '%s'",
    383                                  expected_size * (32 / sizeof(int32_t)),
    384                                  is_primitive ? "primitive" : "non-primitive",
    385                                  resolved_field->PrettyField(true).c_str());
    386         return nullptr;  // Failure.
    387       }
    388     }
    389   }
    390   if (!is_static) {
    391     // instance fields must be being accessed on an initialized class
    392     return resolved_field;
    393   } else {
    394     // If the class is initialized we're done.
    395     if (LIKELY(fields_class->IsInitialized())) {
    396       return resolved_field;
    397     } else {
    398       StackHandleScope<1> hs(self);
    399       if (LIKELY(class_linker->EnsureInitialized(self, hs.NewHandle(fields_class), true, true))) {
    400         // Otherwise let's ensure the class is initialized before resolving the field.
    401         return resolved_field;
    402       }
    403       DCHECK(self->IsExceptionPending());  // Throw exception and unwind
    404       return nullptr;  // Failure.
    405     }
    406   }
    407 }
    408 
    409 // Explicit template declarations of FindFieldFromCode for all field access types.
    410 #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
    411 template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE \
    412 ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
    413                                                   ArtMethod* referrer, \
    414                                                   Thread* self, size_t expected_size) \
    415 
    416 #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
    417     EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
    418     EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
    419 
    420 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
    421 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
    422 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
    423 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
    424 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
    425 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
    426 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
    427 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
    428 
    429 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
    430 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
    431 
    432 template<InvokeType type, bool access_check>
    433 inline ArtMethod* FindMethodFromCode(uint32_t method_idx,
    434                                      ObjPtr<mirror::Object>* this_object,
    435                                      ArtMethod* referrer,
    436                                      Thread* self) {
    437   ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
    438   constexpr ClassLinker::ResolveMode resolve_mode =
    439       access_check ? ClassLinker::ResolveMode::kCheckICCEAndIAE
    440                    : ClassLinker::ResolveMode::kNoChecks;
    441   ArtMethod* resolved_method;
    442   if (type == kStatic) {
    443     resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
    444   } else {
    445     StackHandleScope<1> hs(self);
    446     HandleWrapperObjPtr<mirror::Object> h_this(hs.NewHandleWrapper(this_object));
    447     resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
    448   }
    449   if (UNLIKELY(resolved_method == nullptr)) {
    450     DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
    451     return nullptr;  // Failure.
    452   }
    453   // Next, null pointer check.
    454   if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
    455     if (UNLIKELY(resolved_method->GetDeclaringClass()->IsStringClass() &&
    456                  resolved_method->IsConstructor())) {
    457       // Hack for String init:
    458       //
    459       // We assume that the input of String.<init> in verified code is always
    460       // an unitialized reference. If it is a null constant, it must have been
    461       // optimized out by the compiler. Do not throw NullPointerException.
    462     } else {
    463       // Maintain interpreter-like semantics where NullPointerException is thrown
    464       // after potential NoSuchMethodError from class linker.
    465       ThrowNullPointerExceptionForMethodAccess(method_idx, type);
    466       return nullptr;  // Failure.
    467     }
    468   }
    469   switch (type) {
    470     case kStatic:
    471     case kDirect:
    472       return resolved_method;
    473     case kVirtual: {
    474       ObjPtr<mirror::Class> klass = (*this_object)->GetClass();
    475       uint16_t vtable_index = resolved_method->GetMethodIndex();
    476       if (access_check &&
    477           (!klass->HasVTable() ||
    478            vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
    479         // Behavior to agree with that of the verifier.
    480         ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
    481                                resolved_method->GetName(), resolved_method->GetSignature());
    482         return nullptr;  // Failure.
    483       }
    484       DCHECK(klass->HasVTable()) << klass->PrettyClass();
    485       return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
    486     }
    487     case kSuper: {
    488       // TODO This lookup is quite slow.
    489       // NB This is actually quite tricky to do any other way. We cannot use GetDeclaringClass since
    490       //    that will actually not be what we want in some cases where there are miranda methods or
    491       //    defaults. What we actually need is a GetContainingClass that says which classes virtuals
    492       //    this method is coming from.
    493       StackHandleScope<2> hs2(self);
    494       HandleWrapperObjPtr<mirror::Object> h_this(hs2.NewHandleWrapper(this_object));
    495       Handle<mirror::Class> h_referring_class(hs2.NewHandle(referrer->GetDeclaringClass()));
    496       const dex::TypeIndex method_type_idx =
    497           referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
    498       ObjPtr<mirror::Class> method_reference_class =
    499           class_linker->ResolveType(method_type_idx, referrer);
    500       if (UNLIKELY(method_reference_class == nullptr)) {
    501         // Bad type idx.
    502         CHECK(self->IsExceptionPending());
    503         return nullptr;
    504       } else if (!method_reference_class->IsInterface()) {
    505         // It is not an interface. If the referring class is in the class hierarchy of the
    506         // referenced class in the bytecode, we use its super class. Otherwise, we throw
    507         // a NoSuchMethodError.
    508         ObjPtr<mirror::Class> super_class = nullptr;
    509         if (method_reference_class->IsAssignableFrom(h_referring_class.Get())) {
    510           super_class = h_referring_class->GetSuperClass();
    511         }
    512         uint16_t vtable_index = resolved_method->GetMethodIndex();
    513         if (access_check) {
    514           // Check existence of super class.
    515           if (super_class == nullptr ||
    516               !super_class->HasVTable() ||
    517               vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
    518             // Behavior to agree with that of the verifier.
    519             ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
    520                                    resolved_method->GetName(), resolved_method->GetSignature());
    521             return nullptr;  // Failure.
    522           }
    523         }
    524         DCHECK(super_class != nullptr);
    525         DCHECK(super_class->HasVTable());
    526         return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
    527       } else {
    528         // It is an interface.
    529         if (access_check) {
    530           if (!method_reference_class->IsAssignableFrom(h_this->GetClass())) {
    531             ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
    532                                                                     method_reference_class,
    533                                                                     h_this.Get(),
    534                                                                     referrer);
    535             return nullptr;  // Failure.
    536           }
    537         }
    538         // TODO We can do better than this for a (compiled) fastpath.
    539         ArtMethod* result = method_reference_class->FindVirtualMethodForInterfaceSuper(
    540             resolved_method, class_linker->GetImagePointerSize());
    541         // Throw an NSME if nullptr;
    542         if (result == nullptr) {
    543           ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
    544                                  resolved_method->GetName(), resolved_method->GetSignature());
    545         }
    546         return result;
    547       }
    548       UNREACHABLE();
    549     }
    550     case kInterface: {
    551       uint32_t imt_index = ImTable::GetImtIndex(resolved_method);
    552       PointerSize pointer_size = class_linker->GetImagePointerSize();
    553       ObjPtr<mirror::Class> klass = (*this_object)->GetClass();
    554       ArtMethod* imt_method = klass->GetImt(pointer_size)->Get(imt_index, pointer_size);
    555       if (!imt_method->IsRuntimeMethod()) {
    556         if (kIsDebugBuild) {
    557           ArtMethod* method = klass->FindVirtualMethodForInterface(
    558               resolved_method, class_linker->GetImagePointerSize());
    559           CHECK_EQ(imt_method, method) << ArtMethod::PrettyMethod(resolved_method) << " / "
    560                                        << imt_method->PrettyMethod() << " / "
    561                                        << ArtMethod::PrettyMethod(method) << " / "
    562                                        << klass->PrettyClass();
    563         }
    564         return imt_method;
    565       } else {
    566         ArtMethod* interface_method = klass->FindVirtualMethodForInterface(
    567             resolved_method, class_linker->GetImagePointerSize());
    568         if (UNLIKELY(interface_method == nullptr)) {
    569           ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
    570                                                                      *this_object, referrer);
    571           return nullptr;  // Failure.
    572         }
    573         return interface_method;
    574       }
    575     }
    576     default:
    577       LOG(FATAL) << "Unknown invoke type " << type;
    578       return nullptr;  // Failure.
    579   }
    580 }
    581 
    582 // Explicit template declarations of FindMethodFromCode for all invoke types.
    583 #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check)                 \
    584   template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE                       \
    585   ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx,         \
    586                                                       ObjPtr<mirror::Object>* this_object, \
    587                                                       ArtMethod* referrer, \
    588                                                       Thread* self)
    589 #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
    590     EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false);   \
    591     EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
    592 
    593 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
    594 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
    595 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
    596 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
    597 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
    598 
    599 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
    600 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
    601 
    602 // Fast path field resolution that can't initialize classes or throw exceptions.
    603 inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
    604                                size_t expected_size) {
    605   ScopedAssertNoThreadSuspension ants(__FUNCTION__);
    606   ArtField* resolved_field =
    607       referrer->GetDexCache()->GetResolvedField(field_idx, kRuntimePointerSize);
    608   if (UNLIKELY(resolved_field == nullptr)) {
    609     return nullptr;
    610   }
    611   // Check for incompatible class change.
    612   bool is_primitive;
    613   bool is_set;
    614   bool is_static;
    615   switch (type) {
    616     case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break;
    617     case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break;
    618     case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break;
    619     case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break;
    620     case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break;
    621     case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break;
    622     case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break;
    623     case StaticPrimitiveWrite:   is_primitive = true;  is_set = true;  is_static = true;  break;
    624     default:
    625       LOG(FATAL) << "UNREACHABLE";
    626       UNREACHABLE();
    627   }
    628   if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
    629     // Incompatible class change.
    630     return nullptr;
    631   }
    632   ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
    633   if (is_static) {
    634     // Check class is initialized else fail so that we can contend to initialize the class with
    635     // other threads that may be racing to do this.
    636     if (UNLIKELY(!fields_class->IsInitialized())) {
    637       return nullptr;
    638     }
    639   }
    640   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
    641   if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
    642                !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
    643                (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
    644     // Illegal access.
    645     return nullptr;
    646   }
    647   if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
    648                resolved_field->FieldSize() != expected_size)) {
    649     return nullptr;
    650   }
    651   return resolved_field;
    652 }
    653 
    654 // Fast path method resolution that can't throw exceptions.
    655 template <InvokeType type, bool access_check>
    656 inline ArtMethod* FindMethodFast(uint32_t method_idx,
    657                                  ObjPtr<mirror::Object> this_object,
    658                                  ArtMethod* referrer) {
    659   ScopedAssertNoThreadSuspension ants(__FUNCTION__);
    660   if (UNLIKELY(this_object == nullptr && type != kStatic)) {
    661     return nullptr;
    662   }
    663   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
    664   ObjPtr<mirror::DexCache> dex_cache = referrer->GetDexCache();
    665   constexpr ClassLinker::ResolveMode resolve_mode = access_check
    666       ? ClassLinker::ResolveMode::kCheckICCEAndIAE
    667       : ClassLinker::ResolveMode::kNoChecks;
    668   ClassLinker* linker = Runtime::Current()->GetClassLinker();
    669   ArtMethod* resolved_method = linker->GetResolvedMethod<type, resolve_mode>(method_idx, referrer);
    670   if (UNLIKELY(resolved_method == nullptr)) {
    671     return nullptr;
    672   }
    673   if (type == kInterface) {  // Most common form of slow path dispatch.
    674     return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method,
    675                                                                   kRuntimePointerSize);
    676   } else if (type == kStatic || type == kDirect) {
    677     return resolved_method;
    678   } else if (type == kSuper) {
    679     // TODO This lookup is rather slow.
    680     dex::TypeIndex method_type_idx = dex_cache->GetDexFile()->GetMethodId(method_idx).class_idx_;
    681     ObjPtr<mirror::Class> method_reference_class = linker->LookupResolvedType(
    682         method_type_idx, dex_cache, referrer->GetClassLoader());
    683     if (method_reference_class == nullptr) {
    684       // Need to do full type resolution...
    685       return nullptr;
    686     } else if (!method_reference_class->IsInterface()) {
    687       // It is not an interface. If the referring class is in the class hierarchy of the
    688       // referenced class in the bytecode, we use its super class. Otherwise, we cannot
    689       // resolve the method.
    690       if (!method_reference_class->IsAssignableFrom(referring_class)) {
    691         return nullptr;
    692       }
    693       ObjPtr<mirror::Class> super_class = referring_class->GetSuperClass();
    694       if (resolved_method->GetMethodIndex() >= super_class->GetVTableLength()) {
    695         // The super class does not have the method.
    696         return nullptr;
    697       }
    698       return super_class->GetVTableEntry(resolved_method->GetMethodIndex(), kRuntimePointerSize);
    699     } else {
    700       return method_reference_class->FindVirtualMethodForInterfaceSuper(
    701           resolved_method, kRuntimePointerSize);
    702     }
    703   } else {
    704     DCHECK(type == kVirtual);
    705     return this_object->GetClass()->GetVTableEntry(
    706         resolved_method->GetMethodIndex(), kRuntimePointerSize);
    707   }
    708 }
    709 
    710 inline ObjPtr<mirror::Class> ResolveVerifyAndClinit(dex::TypeIndex type_idx,
    711                                                     ArtMethod* referrer,
    712                                                     Thread* self,
    713                                                     bool can_run_clinit,
    714                                                     bool verify_access) {
    715   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    716   ObjPtr<mirror::Class> klass = class_linker->ResolveType(type_idx, referrer);
    717   if (UNLIKELY(klass == nullptr)) {
    718     CHECK(self->IsExceptionPending());
    719     return nullptr;  // Failure - Indicate to caller to deliver exception
    720   }
    721   // Perform access check if necessary.
    722   mirror::Class* referring_class = referrer->GetDeclaringClass();
    723   if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
    724     ThrowIllegalAccessErrorClass(referring_class, klass);
    725     return nullptr;  // Failure - Indicate to caller to deliver exception
    726   }
    727   // If we're just implementing const-class, we shouldn't call <clinit>.
    728   if (!can_run_clinit) {
    729     return klass;
    730   }
    731   // If we are the <clinit> of this class, just return our storage.
    732   //
    733   // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
    734   // running.
    735   if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
    736     return klass;
    737   }
    738   StackHandleScope<1> hs(self);
    739   Handle<mirror::Class> h_class(hs.NewHandle(klass));
    740   if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
    741     CHECK(self->IsExceptionPending());
    742     return nullptr;  // Failure - Indicate to caller to deliver exception
    743   }
    744   return h_class.Get();
    745 }
    746 
    747 static inline ObjPtr<mirror::String> ResolveString(ClassLinker* class_linker,
    748                                                    dex::StringIndex string_idx,
    749                                                    ArtMethod* referrer)
    750     REQUIRES_SHARED(Locks::mutator_lock_) {
    751   Thread::PoisonObjectPointersIfDebug();
    752   ObjPtr<mirror::String> string = referrer->GetDexCache()->GetResolvedString(string_idx);
    753   if (UNLIKELY(string == nullptr)) {
    754     StackHandleScope<1> hs(Thread::Current());
    755     Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
    756     string = class_linker->ResolveString(string_idx, dex_cache);
    757   }
    758   return string;
    759 }
    760 
    761 inline ObjPtr<mirror::String> ResolveStringFromCode(ArtMethod* referrer,
    762                                                     dex::StringIndex string_idx) {
    763   Thread::PoisonObjectPointersIfDebug();
    764   ObjPtr<mirror::String> string = referrer->GetDexCache()->GetResolvedString(string_idx);
    765   if (UNLIKELY(string == nullptr)) {
    766     StackHandleScope<1> hs(Thread::Current());
    767     Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
    768     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    769     string = class_linker->ResolveString(string_idx, dex_cache);
    770   }
    771   return string;
    772 }
    773 
    774 inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
    775   // Save any pending exception over monitor exit call.
    776   mirror::Throwable* saved_exception = nullptr;
    777   if (UNLIKELY(self->IsExceptionPending())) {
    778     saved_exception = self->GetException();
    779     self->ClearException();
    780   }
    781   // Decode locked object and unlock, before popping local references.
    782   self->DecodeJObject(locked)->MonitorExit(self);
    783   if (UNLIKELY(self->IsExceptionPending())) {
    784     LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
    785         << saved_exception->Dump()
    786         << "\nEncountered second exception during implicit MonitorExit:\n"
    787         << self->GetException()->Dump();
    788   }
    789   // Restore pending exception.
    790   if (saved_exception != nullptr) {
    791     self->SetException(saved_exception);
    792   }
    793 }
    794 
    795 template <typename INT_TYPE, typename FLOAT_TYPE>
    796 inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
    797   const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
    798   const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
    799   const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
    800   const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
    801   if (LIKELY(f > kMinIntAsFloat)) {
    802      if (LIKELY(f < kMaxIntAsFloat)) {
    803        return static_cast<INT_TYPE>(f);
    804      } else {
    805        return kMaxInt;
    806      }
    807   } else {
    808     return (f != f) ? 0 : kMinInt;  // f != f implies NaN
    809   }
    810 }
    811 
    812 }  // namespace art
    813 
    814 #endif  // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
    815