Home | History | Annotate | Download | only in mirror
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_
     18 #define ART_RUNTIME_MIRROR_ART_METHOD_H_
     19 
     20 #include "class.h"
     21 #include "dex_file.h"
     22 #include "invoke_type.h"
     23 #include "locks.h"
     24 #include "modifiers.h"
     25 #include "object.h"
     26 
     27 namespace art {
     28 
     29 struct ArtMethodOffsets;
     30 struct ConstructorMethodOffsets;
     31 union JValue;
     32 struct MethodClassOffsets;
     33 class MethodHelper;
     34 class StringPiece;
     35 class ShadowFrame;
     36 
     37 namespace mirror {
     38 
     39 class StaticStorageBase;
     40 
     41 typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh,
     42     const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result);
     43 
     44 // C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
     45 class MANAGED ArtMethod : public Object {
     46  public:
     47   Class* GetDeclaringClass() const;
     48 
     49   void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     50 
     51   static MemberOffset DeclaringClassOffset() {
     52     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
     53   }
     54 
     55   static MemberOffset EntryPointFromCompiledCodeOffset() {
     56     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, entry_point_from_compiled_code_));
     57   }
     58 
     59   uint32_t GetAccessFlags() const;
     60 
     61   void SetAccessFlags(uint32_t new_access_flags) {
     62     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_), new_access_flags, false);
     63   }
     64 
     65   // Approximate what kind of method call would be used for this method.
     66   InvokeType GetInvokeType() const;
     67 
     68   // Returns true if the method is declared public.
     69   bool IsPublic() const {
     70     return (GetAccessFlags() & kAccPublic) != 0;
     71   }
     72 
     73   // Returns true if the method is declared private.
     74   bool IsPrivate() const {
     75     return (GetAccessFlags() & kAccPrivate) != 0;
     76   }
     77 
     78   // Returns true if the method is declared static.
     79   bool IsStatic() const {
     80     return (GetAccessFlags() & kAccStatic) != 0;
     81   }
     82 
     83   // Returns true if the method is a constructor.
     84   bool IsConstructor() const {
     85     return (GetAccessFlags() & kAccConstructor) != 0;
     86   }
     87 
     88   // Returns true if the method is static, private, or a constructor.
     89   bool IsDirect() const {
     90     return IsDirect(GetAccessFlags());
     91   }
     92 
     93   static bool IsDirect(uint32_t access_flags) {
     94     return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
     95   }
     96 
     97   // Returns true if the method is declared synchronized.
     98   bool IsSynchronized() const {
     99     uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
    100     return (GetAccessFlags() & synchonized) != 0;
    101   }
    102 
    103   bool IsFinal() const {
    104     return (GetAccessFlags() & kAccFinal) != 0;
    105   }
    106 
    107   bool IsMiranda() const {
    108     return (GetAccessFlags() & kAccMiranda) != 0;
    109   }
    110 
    111   bool IsNative() const {
    112     return (GetAccessFlags() & kAccNative) != 0;
    113   }
    114 
    115   bool IsAbstract() const {
    116     return (GetAccessFlags() & kAccAbstract) != 0;
    117   }
    118 
    119   bool IsSynthetic() const {
    120     return (GetAccessFlags() & kAccSynthetic) != 0;
    121   }
    122 
    123   bool IsProxyMethod() const;
    124 
    125   bool IsPreverified() const {
    126     return (GetAccessFlags() & kAccPreverified) != 0;
    127   }
    128 
    129   void SetPreverified() {
    130     SetAccessFlags(GetAccessFlags() | kAccPreverified);
    131   }
    132 
    133   bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    134 
    135   uint16_t GetMethodIndex() const;
    136 
    137   size_t GetVtableIndex() const {
    138     return GetMethodIndex();
    139   }
    140 
    141   void SetMethodIndex(uint16_t new_method_index) {
    142     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_), new_method_index, false);
    143   }
    144 
    145   static MemberOffset MethodIndexOffset() {
    146     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
    147   }
    148 
    149   uint32_t GetCodeItemOffset() const {
    150     return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, code_item_offset_), false);
    151   }
    152 
    153   void SetCodeItemOffset(uint32_t new_code_off) {
    154     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, code_item_offset_), new_code_off, false);
    155   }
    156 
    157   // Number of 32bit registers that would be required to hold all the arguments
    158   static size_t NumArgRegisters(const StringPiece& shorty);
    159 
    160   uint32_t GetDexMethodIndex() const;
    161 
    162   void SetDexMethodIndex(uint32_t new_idx) {
    163     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_dex_index_), new_idx, false);
    164   }
    165 
    166   ObjectArray<String>* GetDexCacheStrings() const;
    167   void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
    168       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    169 
    170   static MemberOffset DexCacheStringsOffset() {
    171     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_strings_);
    172   }
    173 
    174   static MemberOffset DexCacheResolvedMethodsOffset() {
    175     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_);
    176   }
    177 
    178   static MemberOffset DexCacheResolvedTypesOffset() {
    179     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_);
    180   }
    181 
    182   static MemberOffset DexCacheInitializedStaticStorageOffset() {
    183     return OFFSET_OF_OBJECT_MEMBER(ArtMethod,
    184         dex_cache_initialized_static_storage_);
    185   }
    186 
    187   ObjectArray<ArtMethod>* GetDexCacheResolvedMethods() const;
    188   void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods)
    189       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    190 
    191   ObjectArray<Class>* GetDexCacheResolvedTypes() const;
    192   void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
    193       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    194 
    195   ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
    196   void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
    197       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    198 
    199   // Find the method that this method overrides
    200   ArtMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    201 
    202   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type)
    203       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    204 
    205   EntryPointFromInterpreter* GetEntryPointFromInterpreter() const {
    206     return GetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), false);
    207   }
    208 
    209   void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) {
    210     SetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), entry_point_from_interpreter, false);
    211   }
    212 
    213   const void* GetEntryPointFromCompiledCode() const {
    214     return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_), false);
    215   }
    216 
    217   void SetEntryPointFromCompiledCode(const void* entry_point_from_compiled_code) {
    218     SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_), entry_point_from_compiled_code, false);
    219   }
    220 
    221   uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    222 
    223   bool IsWithinCode(uintptr_t pc) const
    224       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    225     uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromCompiledCode());
    226     if (code == 0) {
    227       return pc == 0;
    228     }
    229     /*
    230      * During a stack walk, a return PC may point to the end of the code + 1
    231      * (in the case that the last instruction is a call that isn't expected to
    232      * return.  Thus, we check <= code + GetCodeSize().
    233      */
    234     return (code <= pc && pc <= code + GetCodeSize());
    235   }
    236 
    237   void AssertPcIsWithinCode(uintptr_t pc) const
    238       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    239 
    240   uint32_t GetOatCodeOffset() const;
    241 
    242   void SetOatCodeOffset(uint32_t code_offset);
    243 
    244   static MemberOffset GetEntryPointFromCompiledCodeOffset() {
    245     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_);
    246   }
    247 
    248   // Callers should wrap the uint8_t* in a MappingTable instance for convenient access.
    249   const uint8_t* GetMappingTable() const {
    250     return GetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, mapping_table_), false);
    251   }
    252 
    253   void SetMappingTable(const uint8_t* mapping_table) {
    254     SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, mapping_table_),
    255                                  mapping_table, false);
    256   }
    257 
    258   uint32_t GetOatMappingTableOffset() const;
    259 
    260   void SetOatMappingTableOffset(uint32_t mapping_table_offset);
    261 
    262   // Callers should wrap the uint8_t* in a VmapTable instance for convenient access.
    263   const uint8_t* GetVmapTable() const {
    264     return GetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, vmap_table_), false);
    265   }
    266 
    267   void SetVmapTable(const uint8_t* vmap_table) {
    268     SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, vmap_table_), vmap_table, false);
    269   }
    270 
    271   uint32_t GetOatVmapTableOffset() const;
    272 
    273   void SetOatVmapTableOffset(uint32_t vmap_table_offset);
    274 
    275   const uint8_t* GetNativeGcMap() const {
    276     return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), false);
    277   }
    278   void SetNativeGcMap(const uint8_t* data) {
    279     SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), data, false);
    280   }
    281 
    282   // When building the oat need a convenient place to stuff the offset of the native GC map.
    283   void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
    284   uint32_t GetOatNativeGcMapOffset() const;
    285 
    286   size_t GetFrameSizeInBytes() const {
    287     DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
    288     size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, frame_size_in_bytes_), false);
    289     DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
    290     return result;
    291   }
    292 
    293   void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
    294     DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
    295     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, frame_size_in_bytes_),
    296                new_frame_size_in_bytes, false);
    297   }
    298 
    299   size_t GetReturnPcOffsetInBytes() const {
    300     return GetFrameSizeInBytes() - kPointerSize;
    301   }
    302 
    303   size_t GetSirtOffsetInBytes() const {
    304     CHECK(IsNative());
    305     return kPointerSize;
    306   }
    307 
    308   bool IsRegistered() const;
    309 
    310   void RegisterNative(Thread* self, const void* native_method)
    311       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    312 
    313   void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    314 
    315   static MemberOffset NativeMethodOffset() {
    316     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, native_method_);
    317   }
    318 
    319   const void* GetNativeMethod() const {
    320     return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
    321   }
    322 
    323   void SetNativeMethod(const void*);
    324 
    325   static MemberOffset GetMethodIndexOffset() {
    326     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
    327   }
    328 
    329   uint32_t GetCoreSpillMask() const {
    330     return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, core_spill_mask_), false);
    331   }
    332 
    333   void SetCoreSpillMask(uint32_t core_spill_mask) {
    334     // Computed during compilation
    335     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, core_spill_mask_), core_spill_mask, false);
    336   }
    337 
    338   uint32_t GetFpSpillMask() const {
    339     return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, fp_spill_mask_), false);
    340   }
    341 
    342   void SetFpSpillMask(uint32_t fp_spill_mask) {
    343     // Computed during compilation
    344     SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, fp_spill_mask_), fp_spill_mask, false);
    345   }
    346 
    347   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
    348   // conventions for a method of managed code. Returns false for Proxy methods.
    349   bool IsRuntimeMethod() const;
    350 
    351   // Is this a hand crafted method used for something like describing callee saves?
    352   bool IsCalleeSaveMethod() const;
    353 
    354   bool IsResolutionMethod() const;
    355 
    356   uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    357 
    358   // Converts a native PC to a dex PC.
    359   uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    360 
    361   // Converts a dex PC to a native PC.
    362   uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    363 
    364   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
    365   // indicates whether the found catch block is responsible for clearing the exception or whether
    366   // a move-exception instruction is present.
    367   uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc, bool* has_no_move_exception) const
    368       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    369 
    370   static void SetClass(Class* java_lang_reflect_ArtMethod);
    371 
    372   static Class* GetJavaLangReflectArtMethod() {
    373     return java_lang_reflect_ArtMethod_;
    374   }
    375 
    376   static void ResetClass();
    377 
    378  protected:
    379   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
    380   // The class we are a part of
    381   Class* declaring_class_;
    382 
    383   // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
    384   ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
    385 
    386   // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
    387   ObjectArray<ArtMethod>* dex_cache_resolved_methods_;
    388 
    389   // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
    390   ObjectArray<Class>* dex_cache_resolved_types_;
    391 
    392   // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
    393   ObjectArray<String>* dex_cache_strings_;
    394 
    395   // Access flags; low 16 bits are defined by spec.
    396   uint32_t access_flags_;
    397 
    398   // Offset to the CodeItem.
    399   uint32_t code_item_offset_;
    400 
    401   // Architecture-dependent register spill mask
    402   uint32_t core_spill_mask_;
    403 
    404   // Compiled code associated with this method for callers from managed code.
    405   // May be compiled managed code or a bridge for invoking a native method.
    406   // TODO: Break apart this into portable and quick.
    407   const void* entry_point_from_compiled_code_;
    408 
    409   // Called by the interpreter to execute this method.
    410   EntryPointFromInterpreter* entry_point_from_interpreter_;
    411 
    412   // Architecture-dependent register spill mask
    413   uint32_t fp_spill_mask_;
    414 
    415   // Total size in bytes of the frame
    416   size_t frame_size_in_bytes_;
    417 
    418   // Garbage collection map of native PC offsets (quick) or dex PCs (portable) to reference bitmaps.
    419   const uint8_t* gc_map_;
    420 
    421   // Mapping from native pc to dex pc
    422   const uint32_t* mapping_table_;
    423 
    424   // Index into method_ids of the dex file associated with this method
    425   uint32_t method_dex_index_;
    426 
    427   // For concrete virtual methods, this is the offset of the method in Class::vtable_.
    428   //
    429   // For abstract methods in an interface class, this is the offset of the method in
    430   // "iftable_->Get(n)->GetMethodArray()".
    431   //
    432   // For static and direct methods this is the index in the direct methods table.
    433   uint32_t method_index_;
    434 
    435   // The target native method registered with this method
    436   const void* native_method_;
    437 
    438   // When a register is promoted into a register, the spill mask holds which registers hold dex
    439   // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
    440   // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
    441   const uint16_t* vmap_table_;
    442 
    443   static Class* java_lang_reflect_ArtMethod_;
    444 
    445  private:
    446   friend struct art::ArtMethodOffsets;  // for verifying offset information
    447   DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod);
    448 };
    449 
    450 class MANAGED ArtMethodClass : public Class {
    451  private:
    452   DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethodClass);
    453 };
    454 
    455 }  // namespace mirror
    456 }  // namespace art
    457 
    458 #endif  // ART_RUNTIME_MIRROR_ART_METHOD_H_
    459