Home | History | Annotate | Download | only in mirror
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_
     18 #define ART_RUNTIME_MIRROR_ART_METHOD_H_
     19 
     20 #include "dex_file.h"
     21 #include "gc_root.h"
     22 #include "invoke_type.h"
     23 #include "modifiers.h"
     24 #include "object.h"
     25 #include "object_callbacks.h"
     26 #include "quick/quick_method_frame_info.h"
     27 #include "read_barrier_option.h"
     28 #include "stack.h"
     29 
     30 namespace art {
     31 
     32 struct ArtMethodOffsets;
     33 struct ConstructorMethodOffsets;
     34 union JValue;
     35 class MethodHelper;
     36 class ScopedObjectAccessAlreadyRunnable;
     37 class StringPiece;
     38 class ShadowFrame;
     39 
     40 namespace mirror {
     41 
     42 typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh,
     43     const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result);
     44 
     45 #define ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
     46 
     47 // C++ mirror of java.lang.reflect.ArtMethod.
     48 class MANAGED ArtMethod FINAL : public Object {
     49  public:
     50   // Size of java.lang.reflect.ArtMethod.class.
     51   static uint32_t ClassSize();
     52 
     53   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
     54                                         jobject jlr_method)
     55       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     56 
     57   Class* GetDeclaringClass() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     58 
     59   void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     60 
     61   static MemberOffset DeclaringClassOffset() {
     62     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
     63   }
     64 
     65   ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     66 
     67   void SetAccessFlags(uint32_t new_access_flags) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     68     // Not called within a transaction.
     69     SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_), new_access_flags);
     70   }
     71 
     72   // Approximate what kind of method call would be used for this method.
     73   InvokeType GetInvokeType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     74 
     75   // Returns true if the method is declared public.
     76   bool IsPublic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     77     return (GetAccessFlags() & kAccPublic) != 0;
     78   }
     79 
     80   // Returns true if the method is declared private.
     81   bool IsPrivate() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     82     return (GetAccessFlags() & kAccPrivate) != 0;
     83   }
     84 
     85   // Returns true if the method is declared static.
     86   bool IsStatic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     87     return (GetAccessFlags() & kAccStatic) != 0;
     88   }
     89 
     90   // Returns true if the method is a constructor.
     91   bool IsConstructor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     92     return (GetAccessFlags() & kAccConstructor) != 0;
     93   }
     94 
     95   // Returns true if the method is a class initializer.
     96   bool IsClassInitializer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     97     return IsConstructor() && IsStatic();
     98   }
     99 
    100   // Returns true if the method is static, private, or a constructor.
    101   bool IsDirect() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    102     return IsDirect(GetAccessFlags());
    103   }
    104 
    105   static bool IsDirect(uint32_t access_flags) {
    106     return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
    107   }
    108 
    109   // Returns true if the method is declared synchronized.
    110   bool IsSynchronized() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    111     uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
    112     return (GetAccessFlags() & synchonized) != 0;
    113   }
    114 
    115   bool IsFinal() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    116     return (GetAccessFlags() & kAccFinal) != 0;
    117   }
    118 
    119   bool IsMiranda() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    120     return (GetAccessFlags() & kAccMiranda) != 0;
    121   }
    122 
    123   bool IsNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    124     return (GetAccessFlags() & kAccNative) != 0;
    125   }
    126 
    127   bool IsFastNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    128     uint32_t mask = kAccFastNative | kAccNative;
    129     return (GetAccessFlags() & mask) == mask;
    130   }
    131 
    132   bool IsAbstract() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    133     return (GetAccessFlags() & kAccAbstract) != 0;
    134   }
    135 
    136   bool IsSynthetic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    137     return (GetAccessFlags() & kAccSynthetic) != 0;
    138   }
    139 
    140   bool IsProxyMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    141 
    142   bool IsPreverified() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    143     return (GetAccessFlags() & kAccPreverified) != 0;
    144   }
    145 
    146   void SetPreverified() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    147     DCHECK(!IsPreverified());
    148     SetAccessFlags(GetAccessFlags() | kAccPreverified);
    149   }
    150 
    151   bool IsPortableCompiled() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    152     return kUsePortableCompiler && ((GetAccessFlags() & kAccPortableCompiled) != 0);
    153   }
    154 
    155   void SetIsPortableCompiled() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    156     DCHECK(!IsPortableCompiled());
    157     SetAccessFlags(GetAccessFlags() | kAccPortableCompiled);
    158   }
    159 
    160   void ClearIsPortableCompiled() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    161     DCHECK(IsPortableCompiled());
    162     SetAccessFlags(GetAccessFlags() & ~kAccPortableCompiled);
    163   }
    164 
    165   bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    166 
    167   uint16_t GetMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    168 
    169   // Doesn't do erroneous / unresolved class checks.
    170   uint16_t GetMethodIndexDuringLinking() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    171 
    172   size_t GetVtableIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    173     return GetMethodIndex();
    174   }
    175 
    176   void SetMethodIndex(uint16_t new_method_index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    177     // Not called within a transaction.
    178     SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_), new_method_index);
    179   }
    180 
    181   static MemberOffset MethodIndexOffset() {
    182     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
    183   }
    184 
    185   uint32_t GetCodeItemOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    186     return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_code_item_offset_));
    187   }
    188 
    189   void SetCodeItemOffset(uint32_t new_code_off) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    190     // Not called within a transaction.
    191     SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_code_item_offset_), new_code_off);
    192   }
    193 
    194   // Number of 32bit registers that would be required to hold all the arguments
    195   static size_t NumArgRegisters(const StringPiece& shorty);
    196 
    197   ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    198 
    199   void SetDexMethodIndex(uint32_t new_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    200     // Not called within a transaction.
    201     SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_), new_idx);
    202   }
    203 
    204   static MemberOffset DexCacheResolvedMethodsOffset() {
    205     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_);
    206   }
    207 
    208   static MemberOffset DexCacheResolvedTypesOffset() {
    209     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_);
    210   }
    211 
    212   ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx)
    213       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    214   ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method)
    215       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    216   ALWAYS_INLINE void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods)
    217       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    218   bool HasDexCacheResolvedMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    219   bool HasSameDexCacheResolvedMethods(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    220   bool HasSameDexCacheResolvedMethods(ObjectArray<ArtMethod>* other_cache)
    221       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    222 
    223   template <bool kWithCheck = true>
    224   Class* GetDexCacheResolvedType(uint32_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    225   void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
    226       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    227   bool HasDexCacheResolvedTypes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    228   bool HasSameDexCacheResolvedTypes(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    229   bool HasSameDexCacheResolvedTypes(ObjectArray<Class>* other_cache)
    230       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    231 
    232   // Find the method that this method overrides
    233   ArtMethod* FindOverriddenMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    234 
    235   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
    236       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    237 
    238   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    239   EntryPointFromInterpreter* GetEntryPointFromInterpreter()
    240       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    241     CheckObjectSizeEqualsMirrorSize();
    242     return GetEntryPointFromInterpreterPtrSize(sizeof(void*));
    243   }
    244   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    245   EntryPointFromInterpreter* GetEntryPointFromInterpreterPtrSize(size_t pointer_size)
    246       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    247     return GetFieldPtrWithSize<EntryPointFromInterpreter*, kVerifyFlags>(
    248         EntryPointFromInterpreterOffset(pointer_size), pointer_size);
    249   }
    250 
    251   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    252   void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter)
    253       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    254     CheckObjectSizeEqualsMirrorSize();
    255     SetEntryPointFromInterpreterPtrSize(entry_point_from_interpreter, sizeof(void*));
    256   }
    257   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    258   void SetEntryPointFromInterpreterPtrSize(EntryPointFromInterpreter* entry_point_from_interpreter,
    259                                            size_t pointer_size)
    260       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    261     SetFieldPtrWithSize<false, true, kVerifyFlags>(
    262         EntryPointFromInterpreterOffset(pointer_size), entry_point_from_interpreter, pointer_size);
    263   }
    264 
    265 #if defined(ART_USE_PORTABLE_COMPILER)
    266   ALWAYS_INLINE static MemberOffset EntryPointFromPortableCompiledCodeOffset(size_t pointer_size) {
    267     return MemberOffset(PtrSizedFieldsOffset() + OFFSETOF_MEMBER(
    268         PtrSizedFields, entry_point_from_portable_compiled_code_) / sizeof(void*) * pointer_size);
    269   }
    270 
    271   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    272   const void* GetEntryPointFromPortableCompiledCode()
    273       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    274     CheckObjectSizeEqualsMirrorSize();
    275     return GetEntryPointFromPortableCompiledCodePtrSize(sizeof(void*));
    276   }
    277 
    278   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    279   ALWAYS_INLINE const void* GetEntryPointFromPortableCompiledCodePtrSize(size_t pointer_size)
    280       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    281     return GetFieldPtrWithSize<const void*, kVerifyFlags>(
    282         EntryPointFromPortableCompiledCodeOffset(pointer_size), pointer_size);
    283   }
    284 
    285   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    286   void SetEntryPointFromPortableCompiledCode(const void* entry_point_from_portable_compiled_code)
    287       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    288     CheckObjectSizeEqualsMirrorSize();
    289     return SetEntryPointFromPortableCompiledCodePtrSize(entry_point_from_portable_compiled_code,
    290                                                         sizeof(void*));
    291   }
    292 
    293   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    294   void SetEntryPointFromPortableCompiledCodePtrSize(
    295       const void* entry_point_from_portable_compiled_code, size_t pointer_size)
    296       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    297     SetFieldPtrWithSize<false, true, kVerifyFlags>(
    298         EntryPointFromPortableCompiledCodeOffset(pointer_size),
    299         entry_point_from_portable_compiled_code, pointer_size);
    300   }
    301 #endif
    302 
    303   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    304   const void* GetEntryPointFromQuickCompiledCode() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    305     CheckObjectSizeEqualsMirrorSize();
    306     return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*));
    307   }
    308   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    309   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size)
    310       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    311     return GetFieldPtrWithSize<const void*, kVerifyFlags>(
    312         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
    313   }
    314 
    315   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    316   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
    317       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    318     CheckObjectSizeEqualsMirrorSize();
    319     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
    320                                               sizeof(void*));
    321   }
    322   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    323   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
    324       const void* entry_point_from_quick_compiled_code, size_t pointer_size)
    325       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    326     SetFieldPtrWithSize<false, true, kVerifyFlags>(
    327         EntryPointFromQuickCompiledCodeOffset(pointer_size), entry_point_from_quick_compiled_code,
    328         pointer_size);
    329   }
    330 
    331   uint32_t GetCodeSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    332 
    333   bool IsWithinQuickCode(uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    334     uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromQuickCompiledCode());
    335     if (code == 0) {
    336       return pc == 0;
    337     }
    338     /*
    339      * During a stack walk, a return PC may point past-the-end of the code
    340      * in the case that the last instruction is a call that isn't expected to
    341      * return.  Thus, we check <= code + GetCodeSize().
    342      *
    343      * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
    344      */
    345     return code <= pc && pc <= code + GetCodeSize();
    346   }
    347 
    348   void AssertPcIsWithinQuickCode(uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    349 
    350 #if defined(ART_USE_PORTABLE_COMPILER)
    351   uint32_t GetPortableOatCodeOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    352   void SetPortableOatCodeOffset(uint32_t code_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    353 #endif
    354   uint32_t GetQuickOatCodeOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    355   void SetQuickOatCodeOffset(uint32_t code_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    356 
    357   ALWAYS_INLINE static const void* EntryPointToCodePointer(const void* entry_point) {
    358     uintptr_t code = reinterpret_cast<uintptr_t>(entry_point);
    359     code &= ~0x1;  // TODO: Make this Thumb2 specific.
    360     return reinterpret_cast<const void*>(code);
    361   }
    362 
    363   // Actual entry point pointer to compiled oat code or nullptr.
    364   const void* GetQuickOatEntryPoint(size_t pointer_size)
    365       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    366   // Actual pointer to compiled oat code or nullptr.
    367   const void* GetQuickOatCodePointer(size_t pointer_size)
    368       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    369 
    370   // Callers should wrap the uint8_t* in a MappingTable instance for convenient access.
    371   const uint8_t* GetMappingTable(size_t pointer_size)
    372       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    373   const uint8_t* GetMappingTable(const void* code_pointer, size_t pointer_size)
    374       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    375 
    376   // Callers should wrap the uint8_t* in a VmapTable instance for convenient access.
    377   const uint8_t* GetVmapTable(size_t pointer_size)
    378       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    379   const uint8_t* GetVmapTable(const void* code_pointer, size_t pointer_size)
    380       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    381 
    382   // Callers should wrap the uint8_t* in a GcMap instance for convenient access.
    383   const uint8_t* GetNativeGcMap(size_t pointer_size)
    384       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    385   const uint8_t* GetNativeGcMap(const void* code_pointer, size_t pointer_size)
    386       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    387 
    388   template <bool kCheckFrameSize = true>
    389   uint32_t GetFrameSizeInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    390     uint32_t result = GetQuickFrameInfo().FrameSizeInBytes();
    391     if (kCheckFrameSize) {
    392       DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
    393     }
    394     return result;
    395   }
    396 
    397   QuickMethodFrameInfo GetQuickFrameInfo() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    398   QuickMethodFrameInfo GetQuickFrameInfo(const void* code_pointer)
    399       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    400 
    401   size_t GetReturnPcOffsetInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    402     return GetReturnPcOffsetInBytes(GetFrameSizeInBytes());
    403   }
    404 
    405   size_t GetReturnPcOffsetInBytes(uint32_t frame_size_in_bytes)
    406       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    407     DCHECK_EQ(frame_size_in_bytes, GetFrameSizeInBytes());
    408     return frame_size_in_bytes - kPointerSize;
    409   }
    410 
    411   FrameOffset GetHandleScopeOffsetInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    412     constexpr size_t handle_scope_offset = sizeof(StackReference<mirror::ArtMethod>);
    413     DCHECK_LT(handle_scope_offset, GetFrameSizeInBytes());
    414     return FrameOffset(handle_scope_offset);
    415   }
    416 
    417   void RegisterNative(Thread* self, const void* native_method, bool is_fast)
    418       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    419 
    420   void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    421 
    422   static MemberOffset EntryPointFromInterpreterOffset(size_t pointer_size) {
    423     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    424         PtrSizedFields, entry_point_from_interpreter_) / sizeof(void*) * pointer_size);
    425   }
    426 
    427   static MemberOffset EntryPointFromJniOffset(size_t pointer_size) {
    428     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    429         PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size);
    430   }
    431 
    432   static MemberOffset EntryPointFromQuickCompiledCodeOffset(size_t pointer_size) {
    433     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    434         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) * pointer_size);
    435   }
    436 
    437   void* GetEntryPointFromJni() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    438     CheckObjectSizeEqualsMirrorSize();
    439     return GetEntryPointFromJniPtrSize(sizeof(void*));
    440   }
    441   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size)
    442       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    443     return GetFieldPtrWithSize<void*>(EntryPointFromJniOffset(pointer_size), pointer_size);
    444   }
    445 
    446   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    447   void SetEntryPointFromJni(const void* entrypoint) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    448     CheckObjectSizeEqualsMirrorSize();
    449     SetEntryPointFromJniPtrSize<kVerifyFlags>(entrypoint, sizeof(void*));
    450   }
    451   template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    452   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size)
    453       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    454     SetFieldPtrWithSize<false, true, kVerifyFlags>(
    455         EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size);
    456   }
    457 
    458   static MemberOffset GetMethodIndexOffset() {
    459     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
    460   }
    461 
    462   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
    463   // conventions for a method of managed code. Returns false for Proxy methods.
    464   bool IsRuntimeMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    465 
    466   // Is this a hand crafted method used for something like describing callee saves?
    467   bool IsCalleeSaveMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    468 
    469   bool IsResolutionMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    470 
    471   bool IsImtConflictMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    472 
    473   bool IsImtUnimplementedMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    474 
    475   uintptr_t NativePcOffset(const uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    476   uintptr_t NativePcOffset(const uintptr_t pc, const void* quick_entry_point)
    477       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    478 
    479   // Converts a native PC to a dex PC.
    480   uint32_t ToDexPc(const uintptr_t pc, bool abort_on_failure = true)
    481       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    482 
    483   // Converts a dex PC to a native PC.
    484   uintptr_t ToNativePc(const uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    485 
    486   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
    487   // indicates whether the found catch block is responsible for clearing the exception or whether
    488   // a move-exception instruction is present.
    489   static uint32_t FindCatchBlock(Handle<ArtMethod> h_this, Handle<Class> exception_type,
    490                                  uint32_t dex_pc, bool* has_no_move_exception)
    491       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    492 
    493   static void SetClass(Class* java_lang_reflect_ArtMethod);
    494 
    495   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    496   static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    497 
    498   static void ResetClass();
    499 
    500   static void VisitRoots(RootCallback* callback, void* arg)
    501       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    502 
    503   const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    504 
    505   const char* GetDeclaringClassDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    506 
    507   const char* GetShorty() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    508     uint32_t unused_length;
    509     return GetShorty(&unused_length);
    510   }
    511 
    512   const char* GetShorty(uint32_t* out_length) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    513 
    514   const Signature GetSignature() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    515 
    516   ALWAYS_INLINE const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    517 
    518   const DexFile::CodeItem* GetCodeItem() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    519 
    520   bool IsResolvedTypeIdx(uint16_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    521 
    522   int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    523 
    524   const DexFile::ProtoId& GetPrototype() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    525 
    526   const DexFile::TypeList* GetParameterTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    527 
    528   const char* GetDeclaringClassSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    529 
    530   uint16_t GetClassDefIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    531 
    532   const DexFile::ClassDef& GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    533 
    534   const char* GetReturnTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    535 
    536   const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
    537       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    538 
    539   mirror::ClassLoader* GetClassLoader() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    540 
    541   mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    542 
    543   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    544 
    545   static size_t SizeWithoutPointerFields(size_t pointer_size) {
    546     size_t total = sizeof(ArtMethod) - sizeof(PtrSizedFields);
    547 #ifdef ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
    548     // Add 4 bytes if 64 bit, otherwise 0.
    549     total += pointer_size - sizeof(uint32_t);
    550 #endif
    551     return total;
    552   }
    553 
    554   // Size of an instance of java.lang.reflect.ArtMethod not including its value array.
    555   static size_t InstanceSize(size_t pointer_size) {
    556     return SizeWithoutPointerFields(pointer_size) +
    557         (sizeof(PtrSizedFields) / sizeof(void*)) * pointer_size;
    558   }
    559 
    560  protected:
    561   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
    562   // The class we are a part of.
    563   HeapReference<Class> declaring_class_;
    564 
    565   // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
    566   HeapReference<ObjectArray<ArtMethod>> dex_cache_resolved_methods_;
    567 
    568   // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
    569   HeapReference<ObjectArray<Class>> dex_cache_resolved_types_;
    570 
    571   // Access flags; low 16 bits are defined by spec.
    572   uint32_t access_flags_;
    573 
    574   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
    575 
    576   // Offset to the CodeItem.
    577   uint32_t dex_code_item_offset_;
    578 
    579   // Index into method_ids of the dex file associated with this method.
    580   uint32_t dex_method_index_;
    581 
    582   /* End of dex file fields. */
    583 
    584   // Entry within a dispatch table for this method. For static/direct methods the index is into
    585   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
    586   // ifTable.
    587   uint32_t method_index_;
    588 
    589   // Fake padding field gets inserted here.
    590 
    591   // Must be the last fields in the method.
    592   struct PACKED(4) PtrSizedFields {
    593     // Method dispatch from the interpreter invokes this pointer which may cause a bridge into
    594     // compiled code.
    595     void* entry_point_from_interpreter_;
    596 
    597     // Pointer to JNI function registered to this method, or a function to resolve the JNI function.
    598     void* entry_point_from_jni_;
    599 
    600     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
    601     // portable compiled code or the interpreter.
    602     void* entry_point_from_quick_compiled_code_;
    603 
    604     // Method dispatch from portable compiled code invokes this pointer which may cause bridging
    605     // into quick compiled code or the interpreter. Last to simplify entrypoint logic.
    606 #if defined(ART_USE_PORTABLE_COMPILER)
    607     void* entry_point_from_portable_compiled_code_;
    608 #endif
    609   } ptr_sized_fields_;
    610 
    611   static GcRoot<Class> java_lang_reflect_ArtMethod_;
    612 
    613  private:
    614   ALWAYS_INLINE void CheckObjectSizeEqualsMirrorSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    615 
    616   ALWAYS_INLINE ObjectArray<ArtMethod>* GetDexCacheResolvedMethods()
    617       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    618 
    619   ALWAYS_INLINE ObjectArray<Class>* GetDexCacheResolvedTypes()
    620       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    621 
    622   static size_t PtrSizedFieldsOffset(size_t pointer_size) {
    623     size_t offset = OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_);
    624 #ifdef ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
    625     // Add 4 bytes if 64 bit, otherwise 0.
    626     offset += pointer_size - sizeof(uint32_t);
    627 #endif
    628     return offset;
    629   }
    630 
    631   friend struct art::ArtMethodOffsets;  // for verifying offset information
    632   DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod);
    633 };
    634 
    635 }  // namespace mirror
    636 }  // namespace art
    637 
    638 #endif  // ART_RUNTIME_MIRROR_ART_METHOD_H_
    639