Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_ART_METHOD_H_
     18 #define ART_RUNTIME_ART_METHOD_H_
     19 
     20 #include "base/bit_utils.h"
     21 #include "base/casts.h"
     22 #include "dex_file.h"
     23 #include "gc_root.h"
     24 #include "invoke_type.h"
     25 #include "method_reference.h"
     26 #include "modifiers.h"
     27 #include "mirror/object.h"
     28 #include "read_barrier_option.h"
     29 #include "stack.h"
     30 #include "utils.h"
     31 
     32 namespace art {
     33 
     34 union JValue;
     35 class OatQuickMethodHeader;
     36 class ProfilingInfo;
     37 class ScopedObjectAccessAlreadyRunnable;
     38 class StringPiece;
     39 class ShadowFrame;
     40 
     41 namespace mirror {
     42 class Array;
     43 class Class;
     44 class IfTable;
     45 class PointerArray;
     46 }  // namespace mirror
     47 
     48 // Table to resolve IMT conflicts at runtime. The table is attached to
     49 // the jni entrypoint of IMT conflict ArtMethods.
     50 // The table contains a list of pairs of { interface_method, implementation_method }
     51 // with the last entry being null to make an assembly implementation of a lookup
     52 // faster.
     53 class ImtConflictTable {
     54   enum MethodIndex {
     55     kMethodInterface,
     56     kMethodImplementation,
     57     kMethodCount,  // Number of elements in enum.
     58   };
     59 
     60  public:
     61   // Build a new table copying `other` and adding the new entry formed of
     62   // the pair { `interface_method`, `implementation_method` }
     63   ImtConflictTable(ImtConflictTable* other,
     64                    ArtMethod* interface_method,
     65                    ArtMethod* implementation_method,
     66                    size_t pointer_size) {
     67     const size_t count = other->NumEntries(pointer_size);
     68     for (size_t i = 0; i < count; ++i) {
     69       SetInterfaceMethod(i, pointer_size, other->GetInterfaceMethod(i, pointer_size));
     70       SetImplementationMethod(i, pointer_size, other->GetImplementationMethod(i, pointer_size));
     71     }
     72     SetInterfaceMethod(count, pointer_size, interface_method);
     73     SetImplementationMethod(count, pointer_size, implementation_method);
     74     // Add the null marker.
     75     SetInterfaceMethod(count + 1, pointer_size, nullptr);
     76     SetImplementationMethod(count + 1, pointer_size, nullptr);
     77   }
     78 
     79   // num_entries excludes the header.
     80   ImtConflictTable(size_t num_entries, size_t pointer_size) {
     81     SetInterfaceMethod(num_entries, pointer_size, nullptr);
     82     SetImplementationMethod(num_entries, pointer_size, nullptr);
     83   }
     84 
     85   // Set an entry at an index.
     86   void SetInterfaceMethod(size_t index, size_t pointer_size, ArtMethod* method) {
     87     SetMethod(index * kMethodCount + kMethodInterface, pointer_size, method);
     88   }
     89 
     90   void SetImplementationMethod(size_t index, size_t pointer_size, ArtMethod* method) {
     91     SetMethod(index * kMethodCount + kMethodImplementation, pointer_size, method);
     92   }
     93 
     94   ArtMethod* GetInterfaceMethod(size_t index, size_t pointer_size) const {
     95     return GetMethod(index * kMethodCount + kMethodInterface, pointer_size);
     96   }
     97 
     98   ArtMethod* GetImplementationMethod(size_t index, size_t pointer_size) const {
     99     return GetMethod(index * kMethodCount + kMethodImplementation, pointer_size);
    100   }
    101 
    102   // Visit all of the entries.
    103   // NO_THREAD_SAFETY_ANALYSIS for calling with held locks. Visitor is passed a pair of ArtMethod*
    104   // and also returns one. The order is <interface, implementation>.
    105   template<typename Visitor>
    106   void Visit(const Visitor& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS {
    107     uint32_t table_index = 0;
    108     for (;;) {
    109       ArtMethod* interface_method = GetInterfaceMethod(table_index, pointer_size);
    110       if (interface_method == nullptr) {
    111         break;
    112       }
    113       ArtMethod* implementation_method = GetImplementationMethod(table_index, pointer_size);
    114       auto input = std::make_pair(interface_method, implementation_method);
    115       std::pair<ArtMethod*, ArtMethod*> updated = visitor(input);
    116       if (input.first != updated.first) {
    117         SetInterfaceMethod(table_index, pointer_size, updated.first);
    118       }
    119       if (input.second != updated.second) {
    120         SetImplementationMethod(table_index, pointer_size, updated.second);
    121       }
    122       ++table_index;
    123     }
    124   }
    125 
    126   // Lookup the implementation ArtMethod associated to `interface_method`. Return null
    127   // if not found.
    128   ArtMethod* Lookup(ArtMethod* interface_method, size_t pointer_size) const {
    129     uint32_t table_index = 0;
    130     for (;;) {
    131       ArtMethod* current_interface_method = GetInterfaceMethod(table_index, pointer_size);
    132       if (current_interface_method == nullptr) {
    133         break;
    134       }
    135       if (current_interface_method == interface_method) {
    136         return GetImplementationMethod(table_index, pointer_size);
    137       }
    138       ++table_index;
    139     }
    140     return nullptr;
    141   }
    142 
    143   // Compute the number of entries in this table.
    144   size_t NumEntries(size_t pointer_size) const {
    145     uint32_t table_index = 0;
    146     while (GetInterfaceMethod(table_index, pointer_size) != nullptr) {
    147       ++table_index;
    148     }
    149     return table_index;
    150   }
    151 
    152   // Compute the size in bytes taken by this table.
    153   size_t ComputeSize(size_t pointer_size) const {
    154     // Add the end marker.
    155     return ComputeSize(NumEntries(pointer_size), pointer_size);
    156   }
    157 
    158   // Compute the size in bytes needed for copying the given `table` and add
    159   // one more entry.
    160   static size_t ComputeSizeWithOneMoreEntry(ImtConflictTable* table, size_t pointer_size) {
    161     return table->ComputeSize(pointer_size) + EntrySize(pointer_size);
    162   }
    163 
    164   // Compute size with a fixed number of entries.
    165   static size_t ComputeSize(size_t num_entries, size_t pointer_size) {
    166     return (num_entries + 1) * EntrySize(pointer_size);  // Add one for null terminator.
    167   }
    168 
    169   static size_t EntrySize(size_t pointer_size) {
    170     return pointer_size * static_cast<size_t>(kMethodCount);
    171   }
    172 
    173  private:
    174   ArtMethod* GetMethod(size_t index, size_t pointer_size) const {
    175     if (pointer_size == 8) {
    176       return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data64_[index]));
    177     } else {
    178       DCHECK_EQ(pointer_size, 4u);
    179       return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data32_[index]));
    180     }
    181   }
    182 
    183   void SetMethod(size_t index, size_t pointer_size, ArtMethod* method) {
    184     if (pointer_size == 8) {
    185       data64_[index] = dchecked_integral_cast<uint64_t>(reinterpret_cast<uintptr_t>(method));
    186     } else {
    187       DCHECK_EQ(pointer_size, 4u);
    188       data32_[index] = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(method));
    189     }
    190   }
    191 
    192   // Array of entries that the assembly stubs will iterate over. Note that this is
    193   // not fixed size, and we allocate data prior to calling the constructor
    194   // of ImtConflictTable.
    195   union {
    196     uint32_t data32_[0];
    197     uint64_t data64_[0];
    198   };
    199 
    200   DISALLOW_COPY_AND_ASSIGN(ImtConflictTable);
    201 };
    202 
    203 class ArtMethod FINAL {
    204  public:
    205   ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
    206       method_index_(0) { }
    207 
    208   ArtMethod(ArtMethod* src, size_t image_pointer_size) {
    209     CopyFrom(src, image_pointer_size);
    210   }
    211 
    212   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
    213                                         jobject jlr_method)
    214       SHARED_REQUIRES(Locks::mutator_lock_);
    215 
    216   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    217   ALWAYS_INLINE mirror::Class* GetDeclaringClass() SHARED_REQUIRES(Locks::mutator_lock_);
    218 
    219   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    220   ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
    221       SHARED_REQUIRES(Locks::mutator_lock_);
    222 
    223   void SetDeclaringClass(mirror::Class *new_declaring_class)
    224       SHARED_REQUIRES(Locks::mutator_lock_);
    225 
    226   bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
    227       SHARED_REQUIRES(Locks::mutator_lock_);
    228 
    229   static MemberOffset DeclaringClassOffset() {
    230     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
    231   }
    232 
    233   // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
    234   // a proxy method.
    235   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    236   ALWAYS_INLINE uint32_t GetAccessFlags();
    237 
    238   void SetAccessFlags(uint32_t new_access_flags) {
    239     // Not called within a transaction.
    240     access_flags_ = new_access_flags;
    241   }
    242 
    243   // Approximate what kind of method call would be used for this method.
    244   InvokeType GetInvokeType() SHARED_REQUIRES(Locks::mutator_lock_);
    245 
    246   // Returns true if the method is declared public.
    247   bool IsPublic() {
    248     return (GetAccessFlags() & kAccPublic) != 0;
    249   }
    250 
    251   // Returns true if the method is declared private.
    252   bool IsPrivate() {
    253     return (GetAccessFlags() & kAccPrivate) != 0;
    254   }
    255 
    256   // Returns true if the method is declared static.
    257   bool IsStatic() {
    258     return (GetAccessFlags() & kAccStatic) != 0;
    259   }
    260 
    261   // Returns true if the method is a constructor.
    262   bool IsConstructor() {
    263     return (GetAccessFlags() & kAccConstructor) != 0;
    264   }
    265 
    266   // Returns true if the method is a class initializer.
    267   bool IsClassInitializer() {
    268     return IsConstructor() && IsStatic();
    269   }
    270 
    271   // Returns true if the method is static, private, or a constructor.
    272   bool IsDirect() {
    273     return IsDirect(GetAccessFlags());
    274   }
    275 
    276   static bool IsDirect(uint32_t access_flags) {
    277     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
    278     return (access_flags & direct) != 0;
    279   }
    280 
    281   // Returns true if the method is declared synchronized.
    282   bool IsSynchronized() {
    283     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
    284     return (GetAccessFlags() & synchonized) != 0;
    285   }
    286 
    287   bool IsFinal() {
    288     return (GetAccessFlags() & kAccFinal) != 0;
    289   }
    290 
    291   bool IsCopied() {
    292     const bool copied = (GetAccessFlags() & kAccCopied) != 0;
    293     // (IsMiranda() || IsDefaultConflicting()) implies copied
    294     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
    295         << "Miranda or default-conflict methods must always be copied.";
    296     return copied;
    297   }
    298 
    299   bool IsMiranda() {
    300     return (GetAccessFlags() & kAccMiranda) != 0;
    301   }
    302 
    303   // Returns true if invoking this method will not throw an AbstractMethodError or
    304   // IncompatibleClassChangeError.
    305   bool IsInvokable() {
    306     return !IsAbstract() && !IsDefaultConflicting();
    307   }
    308 
    309   bool IsCompilable() {
    310     return (GetAccessFlags() & kAccCompileDontBother) == 0;
    311   }
    312 
    313   // A default conflict method is a special sentinel method that stands for a conflict between
    314   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
    315   // attempts to do so.
    316   bool IsDefaultConflicting() {
    317     return (GetAccessFlags() & kAccDefaultConflict) != 0u;
    318   }
    319 
    320   // This is set by the class linker.
    321   bool IsDefault() {
    322     return (GetAccessFlags() & kAccDefault) != 0;
    323   }
    324 
    325   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    326   bool IsNative() {
    327     return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
    328   }
    329 
    330   bool IsFastNative() {
    331     constexpr uint32_t mask = kAccFastNative | kAccNative;
    332     return (GetAccessFlags() & mask) == mask;
    333   }
    334 
    335   bool IsAbstract() {
    336     return (GetAccessFlags() & kAccAbstract) != 0;
    337   }
    338 
    339   bool IsSynthetic() {
    340     return (GetAccessFlags() & kAccSynthetic) != 0;
    341   }
    342 
    343   bool IsProxyMethod() SHARED_REQUIRES(Locks::mutator_lock_);
    344 
    345   bool SkipAccessChecks() {
    346     return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
    347   }
    348 
    349   void SetSkipAccessChecks() {
    350     DCHECK(!SkipAccessChecks());
    351     SetAccessFlags(GetAccessFlags() | kAccSkipAccessChecks);
    352   }
    353 
    354   // Should this method be run in the interpreter and count locks (e.g., failed structured-
    355   // locking verification)?
    356   bool MustCountLocks() {
    357     return (GetAccessFlags() & kAccMustCountLocks) != 0;
    358   }
    359 
    360   // Returns true if this method could be overridden by a default method.
    361   bool IsOverridableByDefaultMethod() SHARED_REQUIRES(Locks::mutator_lock_);
    362 
    363   bool CheckIncompatibleClassChange(InvokeType type) SHARED_REQUIRES(Locks::mutator_lock_);
    364 
    365   // Throws the error that would result from trying to invoke this method (i.e.
    366   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
    367   void ThrowInvocationTimeError() SHARED_REQUIRES(Locks::mutator_lock_);
    368 
    369   uint16_t GetMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
    370 
    371   // Doesn't do erroneous / unresolved class checks.
    372   uint16_t GetMethodIndexDuringLinking() SHARED_REQUIRES(Locks::mutator_lock_);
    373 
    374   size_t GetVtableIndex() SHARED_REQUIRES(Locks::mutator_lock_) {
    375     return GetMethodIndex();
    376   }
    377 
    378   void SetMethodIndex(uint16_t new_method_index) SHARED_REQUIRES(Locks::mutator_lock_) {
    379     // Not called within a transaction.
    380     method_index_ = new_method_index;
    381   }
    382 
    383   static MemberOffset DexMethodIndexOffset() {
    384     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_);
    385   }
    386 
    387   static MemberOffset MethodIndexOffset() {
    388     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
    389   }
    390 
    391   uint32_t GetCodeItemOffset() {
    392     return dex_code_item_offset_;
    393   }
    394 
    395   void SetCodeItemOffset(uint32_t new_code_off) {
    396     // Not called within a transaction.
    397     dex_code_item_offset_ = new_code_off;
    398   }
    399 
    400   // Number of 32bit registers that would be required to hold all the arguments
    401   static size_t NumArgRegisters(const StringPiece& shorty);
    402 
    403   ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
    404 
    405   void SetDexMethodIndex(uint32_t new_idx) {
    406     // Not called within a transaction.
    407     dex_method_index_ = new_idx;
    408   }
    409 
    410   ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(size_t pointer_size)
    411       SHARED_REQUIRES(Locks::mutator_lock_);
    412   ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size)
    413       SHARED_REQUIRES(Locks::mutator_lock_);
    414   ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
    415                                                ArtMethod* new_method,
    416                                                size_t ptr_size)
    417       SHARED_REQUIRES(Locks::mutator_lock_);
    418   ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, size_t ptr_size)
    419       SHARED_REQUIRES(Locks::mutator_lock_);
    420   bool HasDexCacheResolvedMethods(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
    421   bool HasSameDexCacheResolvedMethods(ArtMethod* other, size_t pointer_size)
    422       SHARED_REQUIRES(Locks::mutator_lock_);
    423   bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, size_t pointer_size)
    424       SHARED_REQUIRES(Locks::mutator_lock_);
    425 
    426   template <bool kWithCheck = true>
    427   mirror::Class* GetDexCacheResolvedType(uint32_t type_idx, size_t ptr_size)
    428       SHARED_REQUIRES(Locks::mutator_lock_);
    429   void SetDexCacheResolvedTypes(GcRoot<mirror::Class>* new_dex_cache_types, size_t ptr_size)
    430       SHARED_REQUIRES(Locks::mutator_lock_);
    431   bool HasDexCacheResolvedTypes(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
    432   bool HasSameDexCacheResolvedTypes(ArtMethod* other, size_t pointer_size)
    433       SHARED_REQUIRES(Locks::mutator_lock_);
    434   bool HasSameDexCacheResolvedTypes(GcRoot<mirror::Class>* other_cache, size_t pointer_size)
    435       SHARED_REQUIRES(Locks::mutator_lock_);
    436 
    437   // Get the Class* from the type index into this method's dex cache.
    438   mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve, size_t ptr_size)
    439       SHARED_REQUIRES(Locks::mutator_lock_);
    440 
    441   // Returns true if this method has the same name and signature of the other method.
    442   bool HasSameNameAndSignature(ArtMethod* other) SHARED_REQUIRES(Locks::mutator_lock_);
    443 
    444   // Find the method that this method overrides.
    445   ArtMethod* FindOverriddenMethod(size_t pointer_size)
    446       SHARED_REQUIRES(Locks::mutator_lock_);
    447 
    448   // Find the method index for this method within other_dexfile. If this method isn't present then
    449   // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
    450   // name and signature in the other_dexfile, such as the method index used to resolve this method
    451   // in the other_dexfile.
    452   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
    453                                             uint32_t name_and_signature_idx)
    454       SHARED_REQUIRES(Locks::mutator_lock_);
    455 
    456   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
    457       SHARED_REQUIRES(Locks::mutator_lock_);
    458 
    459   const void* GetEntryPointFromQuickCompiledCode() {
    460     return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*));
    461   }
    462   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) {
    463     return GetNativePointer<const void*>(
    464         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
    465   }
    466 
    467   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
    468     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
    469                                               sizeof(void*));
    470   }
    471   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
    472       const void* entry_point_from_quick_compiled_code, size_t pointer_size) {
    473     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
    474                      entry_point_from_quick_compiled_code, pointer_size);
    475   }
    476 
    477   void RegisterNative(const void* native_method, bool is_fast)
    478       SHARED_REQUIRES(Locks::mutator_lock_);
    479 
    480   void UnregisterNative() SHARED_REQUIRES(Locks::mutator_lock_);
    481 
    482   static MemberOffset DexCacheResolvedMethodsOffset(size_t pointer_size) {
    483     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    484         PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*) * pointer_size);
    485   }
    486 
    487   static MemberOffset DexCacheResolvedTypesOffset(size_t pointer_size) {
    488     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    489         PtrSizedFields, dex_cache_resolved_types_) / sizeof(void*) * pointer_size);
    490   }
    491 
    492   static MemberOffset EntryPointFromJniOffset(size_t pointer_size) {
    493     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    494         PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size);
    495   }
    496 
    497   static MemberOffset EntryPointFromQuickCompiledCodeOffset(size_t pointer_size) {
    498     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
    499         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) * pointer_size);
    500   }
    501 
    502   ProfilingInfo* GetProfilingInfo(size_t pointer_size) {
    503     return reinterpret_cast<ProfilingInfo*>(GetEntryPointFromJniPtrSize(pointer_size));
    504   }
    505 
    506   ImtConflictTable* GetImtConflictTable(size_t pointer_size) {
    507     DCHECK(IsRuntimeMethod());
    508     return reinterpret_cast<ImtConflictTable*>(GetEntryPointFromJniPtrSize(pointer_size));
    509   }
    510 
    511   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, size_t pointer_size) {
    512     SetEntryPointFromJniPtrSize(table, pointer_size);
    513   }
    514 
    515   ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
    516     SetEntryPointFromJniPtrSize(info, sizeof(void*));
    517   }
    518 
    519   ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, size_t pointer_size) {
    520     SetEntryPointFromJniPtrSize(info, pointer_size);
    521   }
    522 
    523   static MemberOffset ProfilingInfoOffset() {
    524     return EntryPointFromJniOffset(sizeof(void*));
    525   }
    526 
    527   void* GetEntryPointFromJni() {
    528     return GetEntryPointFromJniPtrSize(sizeof(void*));
    529   }
    530 
    531   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size) {
    532     return GetNativePointer<void*>(EntryPointFromJniOffset(pointer_size), pointer_size);
    533   }
    534 
    535   void SetEntryPointFromJni(const void* entrypoint) {
    536     DCHECK(IsNative());
    537     SetEntryPointFromJniPtrSize(entrypoint, sizeof(void*));
    538   }
    539 
    540   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size) {
    541     SetNativePointer(EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size);
    542   }
    543 
    544   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
    545   // conventions for a method of managed code. Returns false for Proxy methods.
    546   ALWAYS_INLINE bool IsRuntimeMethod();
    547 
    548   // Is this a hand crafted method used for something like describing callee saves?
    549   bool IsCalleeSaveMethod() SHARED_REQUIRES(Locks::mutator_lock_);
    550 
    551   bool IsResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_);
    552 
    553   bool IsImtUnimplementedMethod() SHARED_REQUIRES(Locks::mutator_lock_);
    554 
    555   MethodReference ToMethodReference() SHARED_REQUIRES(Locks::mutator_lock_) {
    556     return MethodReference(GetDexFile(), GetDexMethodIndex());
    557   }
    558 
    559   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
    560   // indicates whether the found catch block is responsible for clearing the exception or whether
    561   // a move-exception instruction is present.
    562   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
    563                           bool* has_no_move_exception)
    564       SHARED_REQUIRES(Locks::mutator_lock_);
    565 
    566   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
    567   template<typename RootVisitorType>
    568   void VisitRoots(RootVisitorType& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS;
    569 
    570   const DexFile* GetDexFile() SHARED_REQUIRES(Locks::mutator_lock_);
    571 
    572   const char* GetDeclaringClassDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
    573 
    574   const char* GetShorty() SHARED_REQUIRES(Locks::mutator_lock_) {
    575     uint32_t unused_length;
    576     return GetShorty(&unused_length);
    577   }
    578 
    579   const char* GetShorty(uint32_t* out_length) SHARED_REQUIRES(Locks::mutator_lock_);
    580 
    581   const Signature GetSignature() SHARED_REQUIRES(Locks::mutator_lock_);
    582 
    583   ALWAYS_INLINE const char* GetName() SHARED_REQUIRES(Locks::mutator_lock_);
    584 
    585   mirror::String* GetNameAsString(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
    586 
    587   const DexFile::CodeItem* GetCodeItem() SHARED_REQUIRES(Locks::mutator_lock_);
    588 
    589   bool IsResolvedTypeIdx(uint16_t type_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_);
    590 
    591   int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_);
    592 
    593   const DexFile::ProtoId& GetPrototype() SHARED_REQUIRES(Locks::mutator_lock_);
    594 
    595   const DexFile::TypeList* GetParameterTypeList() SHARED_REQUIRES(Locks::mutator_lock_);
    596 
    597   const char* GetDeclaringClassSourceFile() SHARED_REQUIRES(Locks::mutator_lock_);
    598 
    599   uint16_t GetClassDefIndex() SHARED_REQUIRES(Locks::mutator_lock_);
    600 
    601   const DexFile::ClassDef& GetClassDef() SHARED_REQUIRES(Locks::mutator_lock_);
    602 
    603   const char* GetReturnTypeDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
    604 
    605   const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
    606       SHARED_REQUIRES(Locks::mutator_lock_);
    607 
    608   // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
    609   // number of bugs at call sites.
    610   mirror::Class* GetReturnType(bool resolve, size_t ptr_size)
    611       SHARED_REQUIRES(Locks::mutator_lock_);
    612 
    613   mirror::ClassLoader* GetClassLoader() SHARED_REQUIRES(Locks::mutator_lock_);
    614 
    615   mirror::DexCache* GetDexCache() SHARED_REQUIRES(Locks::mutator_lock_);
    616 
    617   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(size_t pointer_size)
    618       SHARED_REQUIRES(Locks::mutator_lock_);
    619 
    620   // May cause thread suspension due to class resolution.
    621   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
    622       SHARED_REQUIRES(Locks::mutator_lock_);
    623 
    624   // Size of an instance of this native class.
    625   static size_t Size(size_t pointer_size) {
    626     return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size) +
    627         (sizeof(PtrSizedFields) / sizeof(void*)) * pointer_size;
    628   }
    629 
    630   // Alignment of an instance of this native class.
    631   static size_t Alignment(size_t pointer_size) {
    632     // The ArtMethod alignment is the same as image pointer size. This differs from
    633     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
    634     return pointer_size;
    635   }
    636 
    637   void CopyFrom(ArtMethod* src, size_t image_pointer_size)
    638       SHARED_REQUIRES(Locks::mutator_lock_);
    639 
    640   ALWAYS_INLINE GcRoot<mirror::Class>* GetDexCacheResolvedTypes(size_t pointer_size)
    641       SHARED_REQUIRES(Locks::mutator_lock_);
    642 
    643   // Note, hotness_counter_ updates are non-atomic but it doesn't need to be precise.  Also,
    644   // given that the counter is only 16 bits wide we can expect wrap-around in some
    645   // situations.  Consumers of hotness_count_ must be able to deal with that.
    646   uint16_t IncrementCounter() {
    647     return ++hotness_count_;
    648   }
    649 
    650   void ClearCounter() {
    651     hotness_count_ = 0;
    652   }
    653 
    654   void SetCounter(int16_t hotness_count) {
    655     hotness_count_ = hotness_count;
    656   }
    657 
    658   uint16_t GetCounter() const {
    659     return hotness_count_;
    660   }
    661 
    662   const uint8_t* GetQuickenedInfo() SHARED_REQUIRES(Locks::mutator_lock_);
    663 
    664   // Returns the method header for the compiled code containing 'pc'. Note that runtime
    665   // methods will return null for this method, as they are not oat based.
    666   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
    667       SHARED_REQUIRES(Locks::mutator_lock_);
    668 
    669   // Returns whether the method has any compiled code, JIT or AOT.
    670   bool HasAnyCompiledCode() SHARED_REQUIRES(Locks::mutator_lock_);
    671 
    672 
    673   // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
    674   // Does not use read barrier.
    675   template <typename Visitor>
    676   ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor, size_t pointer_size)
    677       SHARED_REQUIRES(Locks::mutator_lock_);
    678 
    679   // Update entry points by passing them through the visitor.
    680   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
    681   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, size_t pointer_size);
    682 
    683  protected:
    684   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
    685   // The class we are a part of.
    686   GcRoot<mirror::Class> declaring_class_;
    687 
    688   // Access flags; low 16 bits are defined by spec.
    689   uint32_t access_flags_;
    690 
    691   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
    692 
    693   // Offset to the CodeItem.
    694   uint32_t dex_code_item_offset_;
    695 
    696   // Index into method_ids of the dex file associated with this method.
    697   uint32_t dex_method_index_;
    698 
    699   /* End of dex file fields. */
    700 
    701   // Entry within a dispatch table for this method. For static/direct methods the index is into
    702   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
    703   // ifTable.
    704   uint16_t method_index_;
    705 
    706   // The hotness we measure for this method. Managed by the interpreter. Not atomic, as we allow
    707   // missing increments: if the method is hot, we will see it eventually.
    708   uint16_t hotness_count_;
    709 
    710   // Fake padding field gets inserted here.
    711 
    712   // Must be the last fields in the method.
    713   // PACKED(4) is necessary for the correctness of
    714   // RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size).
    715   struct PACKED(4) PtrSizedFields {
    716     // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
    717     ArtMethod** dex_cache_resolved_methods_;
    718 
    719     // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
    720     GcRoot<mirror::Class>* dex_cache_resolved_types_;
    721 
    722     // Pointer to JNI function registered to this method, or a function to resolve the JNI function,
    723     // or the profiling data for non-native methods, or an ImtConflictTable.
    724     void* entry_point_from_jni_;
    725 
    726     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
    727     // the interpreter.
    728     void* entry_point_from_quick_compiled_code_;
    729   } ptr_sized_fields_;
    730 
    731  private:
    732   static size_t PtrSizedFieldsOffset(size_t pointer_size) {
    733     // Round up to pointer size for padding field.
    734     return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size);
    735   }
    736 
    737   template<typename T>
    738   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, size_t pointer_size) const {
    739     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
    740     DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
    741     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
    742     if (pointer_size == sizeof(uint32_t)) {
    743       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
    744     } else {
    745       auto v = *reinterpret_cast<const uint64_t*>(addr);
    746       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
    747     }
    748   }
    749 
    750   template<typename T>
    751   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, size_t pointer_size) {
    752     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
    753     DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
    754     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
    755     if (pointer_size == sizeof(uint32_t)) {
    756       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
    757       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
    758     } else {
    759       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
    760     }
    761   }
    762 
    763   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
    764 };
    765 
    766 }  // namespace art
    767 
    768 #endif  // ART_RUNTIME_ART_METHOD_H_
    769