Home | History | Annotate | Download | only in mirror
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_MIRROR_OBJECT_H_
     18 #define ART_RUNTIME_MIRROR_OBJECT_H_
     19 
     20 #include "object_reference.h"
     21 #include "offsets.h"
     22 #include "verify_object.h"
     23 
     24 namespace art {
     25 
     26 class ImageWriter;
     27 class LockWord;
     28 class Monitor;
     29 struct ObjectOffsets;
     30 class Thread;
     31 class VoidFunctor;
     32 
     33 namespace mirror {
     34 
     35 class ArtField;
     36 class ArtMethod;
     37 class Array;
     38 class Class;
     39 class FinalizerReference;
     40 template<class T> class ObjectArray;
     41 template<class T> class PrimitiveArray;
     42 typedef PrimitiveArray<uint8_t> BooleanArray;
     43 typedef PrimitiveArray<int8_t> ByteArray;
     44 typedef PrimitiveArray<uint16_t> CharArray;
     45 typedef PrimitiveArray<double> DoubleArray;
     46 typedef PrimitiveArray<float> FloatArray;
     47 typedef PrimitiveArray<int32_t> IntArray;
     48 typedef PrimitiveArray<int64_t> LongArray;
     49 typedef PrimitiveArray<int16_t> ShortArray;
     50 class Reference;
     51 class String;
     52 class Throwable;
     53 
     54 // Fields within mirror objects aren't accessed directly so that the appropriate amount of
     55 // handshaking is done with GC (for example, read and write barriers). This macro is used to
     56 // compute an offset for the Set/Get methods defined in Object that can safely access fields.
     57 #define OFFSET_OF_OBJECT_MEMBER(type, field) \
     58     MemberOffset(OFFSETOF_MEMBER(type, field))
     59 
     60 // Checks that we don't do field assignments which violate the typing system.
     61 static constexpr bool kCheckFieldAssignments = false;
     62 
     63 // C++ mirror of java.lang.Object
     64 class MANAGED LOCKABLE Object {
     65  public:
     66   // The number of vtable entries in java.lang.Object.
     67   static constexpr size_t kVTableLength = 11;
     68 
     69   // The size of the java.lang.Class representing a java.lang.Object.
     70   static uint32_t ClassSize();
     71 
     72   // Size of an instance of java.lang.Object.
     73   static constexpr uint32_t InstanceSize() {
     74     return sizeof(Object);
     75   }
     76 
     77   static MemberOffset ClassOffset() {
     78     return OFFSET_OF_OBJECT_MEMBER(Object, klass_);
     79   }
     80 
     81   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
     82            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
     83   ALWAYS_INLINE Class* GetClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     84 
     85   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
     86   void SetClass(Class* new_klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     87 
     88   Object* GetReadBarrierPointer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     89   void SetReadBarrierPointer(Object* rb_ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     90   bool AtomicSetReadBarrierPointer(Object* expected_rb_ptr, Object* rb_ptr)
     91       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     92   void AssertReadBarrierPointer() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     93 
     94   // The verifier treats all interfaces as java.lang.Object and relies on runtime checks in
     95   // invoke-interface to detect incompatible interface types.
     96   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
     97   bool VerifierInstanceOf(Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     98   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
     99   bool InstanceOf(Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    100 
    101   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    102            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    103   size_t SizeOf() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    104 
    105   Object* Clone(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    106 
    107   int32_t IdentityHashCode() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    108 
    109   static MemberOffset MonitorOffset() {
    110     return OFFSET_OF_OBJECT_MEMBER(Object, monitor_);
    111   }
    112 
    113   // As_volatile can be false if the mutators are suspended. This is an optimization since it
    114   // avoids the barriers.
    115   LockWord GetLockWord(bool as_volatile) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    116   void SetLockWord(LockWord new_val, bool as_volatile) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    117   bool CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val)
    118       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    119   bool CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val)
    120       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    121   uint32_t GetLockOwnerThreadId();
    122 
    123   mirror::Object* MonitorEnter(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    124       EXCLUSIVE_LOCK_FUNCTION();
    125   bool MonitorExit(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    126       UNLOCK_FUNCTION();
    127   void Notify(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    128   void NotifyAll(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    129   void Wait(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    130   void Wait(Thread* self, int64_t timeout, int32_t nanos) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    131 
    132   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    133            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    134   bool IsClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    135   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    136            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    137   Class* AsClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    138 
    139   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    140   bool IsObjectArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    141   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    142   ObjectArray<T>* AsObjectArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    143 
    144   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    145            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    146   bool IsArrayInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    147   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    148            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    149   Array* AsArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    150 
    151   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    152   BooleanArray* AsBooleanArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    153   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    154   ByteArray* AsByteArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    155   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    156   ByteArray* AsByteSizedArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    157 
    158   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    159   CharArray* AsCharArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    160   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    161   ShortArray* AsShortArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    162   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    163   ShortArray* AsShortSizedArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    164 
    165   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    166   IntArray* AsIntArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    167   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    168   LongArray* AsLongArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    169 
    170   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    171   FloatArray* AsFloatArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    172   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    173   DoubleArray* AsDoubleArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    174 
    175   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    176   String* AsString() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    177 
    178   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    179   Throwable* AsThrowable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    180 
    181   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    182            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    183   bool IsArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    184   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    185   ArtMethod* AsArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    186 
    187   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    188            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    189   bool IsArtField() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    190   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    191   ArtField* AsArtField() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    192 
    193   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    194   bool IsReferenceInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    195   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    196   Reference* AsReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    197   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    198   bool IsWeakReferenceInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    199   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    200   bool IsSoftReferenceInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    201   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    202   bool IsFinalizerReferenceInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    203   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    204   FinalizerReference* AsFinalizerReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    205   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    206   bool IsPhantomReferenceInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    207 
    208   // Accessor for Java type fields.
    209   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    210       ReadBarrierOption kReadBarrierOption = kWithReadBarrier, bool kIsVolatile = false>
    211   ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset)
    212       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    213 
    214   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    215       ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
    216   ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset)
    217       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    218 
    219   template<bool kTransactionActive, bool kCheckTransaction = true,
    220       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    221   ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value)
    222       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    223 
    224   template<bool kTransactionActive, bool kCheckTransaction = true,
    225       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    226   ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, Object* new_value)
    227       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    228 
    229   template<bool kTransactionActive, bool kCheckTransaction = true,
    230       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    231   ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value)
    232       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    233 
    234   template<bool kTransactionActive, bool kCheckTransaction = true,
    235       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    236   bool CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset, Object* old_value,
    237                                                 Object* new_value)
    238       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    239 
    240   template<bool kTransactionActive, bool kCheckTransaction = true,
    241       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    242   bool CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset, Object* old_value,
    243                                                   Object* new_value)
    244       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    245 
    246   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    247   HeapReference<Object>* GetFieldObjectReferenceAddr(MemberOffset field_offset);
    248 
    249   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    250   ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset)
    251       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    252 
    253   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    254   ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset)
    255       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    256 
    257   template<bool kTransactionActive, bool kCheckTransaction = true,
    258       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    259   ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value)
    260       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    261 
    262   template<bool kTransactionActive, bool kCheckTransaction = true,
    263       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    264   ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value)
    265       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    266 
    267   template<bool kTransactionActive, bool kCheckTransaction = true,
    268       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    269   ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
    270                                                           int32_t old_value, int32_t new_value)
    271       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    272 
    273   template<bool kTransactionActive, bool kCheckTransaction = true,
    274       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    275   bool CasFieldWeakRelaxed32(MemberOffset field_offset, int32_t old_value,
    276                              int32_t new_value) ALWAYS_INLINE
    277       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    278 
    279   template<bool kTransactionActive, bool kCheckTransaction = true,
    280       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    281   bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value,
    282                                               int32_t new_value) ALWAYS_INLINE
    283       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    284 
    285   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    286   ALWAYS_INLINE int64_t GetField64(MemberOffset field_offset)
    287       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    288 
    289   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    290   ALWAYS_INLINE int64_t GetField64Volatile(MemberOffset field_offset)
    291       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    292 
    293   template<bool kTransactionActive, bool kCheckTransaction = true,
    294       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    295   ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value)
    296       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    297 
    298   template<bool kTransactionActive, bool kCheckTransaction = true,
    299       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    300   ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value)
    301       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    302 
    303   template<bool kTransactionActive, bool kCheckTransaction = true,
    304       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    305   bool CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
    306                                             int64_t new_value)
    307       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    308 
    309   template<bool kTransactionActive, bool kCheckTransaction = true,
    310       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
    311   bool CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
    312                                               int64_t new_value)
    313       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    314 
    315   template<bool kTransactionActive, bool kCheckTransaction = true,
    316       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
    317   void SetFieldPtr(MemberOffset field_offset, T new_value)
    318       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    319     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
    320         field_offset, new_value, sizeof(void*));
    321   }
    322 
    323   template<bool kTransactionActive, bool kCheckTransaction = true,
    324       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
    325   ALWAYS_INLINE void SetFieldPtrWithSize(MemberOffset field_offset, T new_value,
    326                                          size_t pointer_size)
    327       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    328     DCHECK(pointer_size == 4 || pointer_size == 8) << pointer_size;
    329     if (pointer_size == 4) {
    330       intptr_t ptr  = reinterpret_cast<intptr_t>(new_value);
    331       DCHECK_EQ(static_cast<int32_t>(ptr), ptr);  // Check that we dont lose any non 0 bits.
    332       SetField32<kTransactionActive, kCheckTransaction, kVerifyFlags>(
    333           field_offset, static_cast<int32_t>(ptr));
    334     } else {
    335       SetField64<kTransactionActive, kCheckTransaction, kVerifyFlags>(
    336           field_offset, static_cast<int64_t>(reinterpret_cast<intptr_t>(new_value)));
    337     }
    338   }
    339   // TODO fix thread safety analysis broken by the use of template. This should be
    340   // SHARED_LOCKS_REQUIRED(Locks::mutator_lock_).
    341   template <const bool kVisitClass, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
    342       typename Visitor, typename JavaLangRefVisitor = VoidFunctor>
    343   void VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor)
    344       NO_THREAD_SAFETY_ANALYSIS;
    345 
    346   // Used by object_test.
    347   static void SetHashCodeSeed(uint32_t new_seed);
    348   // Generate an identity hash code. Public for object test.
    349   static uint32_t GenerateIdentityHashCode();
    350 
    351  protected:
    352   // Accessors for non-Java type fields
    353   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    354   T GetFieldPtr(MemberOffset field_offset)
    355       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    356     return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, sizeof(void*));
    357   }
    358 
    359   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
    360   ALWAYS_INLINE T GetFieldPtrWithSize(MemberOffset field_offset, size_t pointer_size)
    361       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    362     DCHECK(pointer_size == 4 || pointer_size == 8) << pointer_size;
    363     if (pointer_size == 4) {
    364       return reinterpret_cast<T>(GetField32<kVerifyFlags, kIsVolatile>(field_offset));
    365     } else {
    366       int64_t v = GetField64<kVerifyFlags, kIsVolatile>(field_offset);
    367       // Check that we dont lose any non 0 bits.
    368       DCHECK_EQ(reinterpret_cast<int64_t>(reinterpret_cast<T>(v)), v);
    369       return reinterpret_cast<T>(v);
    370     }
    371   }
    372 
    373   // TODO: Fixme when anotatalysis works with visitors.
    374   template<bool kVisitClass, bool kIsStatic, typename Visitor>
    375   void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR
    376       NO_THREAD_SAFETY_ANALYSIS;
    377   template<bool kVisitClass, typename Visitor>
    378   void VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR
    379       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    380   template<bool kVisitClass, typename Visitor>
    381   void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR
    382       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    383 
    384  private:
    385   // Verify the type correctness of stores to fields.
    386   // TODO: This can cause thread suspension and isn't moving GC safe.
    387   void CheckFieldAssignmentImpl(MemberOffset field_offset, Object* new_value)
    388       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    389   void CheckFieldAssignment(MemberOffset field_offset, Object* new_value)
    390       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    391     if (kCheckFieldAssignments) {
    392       CheckFieldAssignmentImpl(field_offset, new_value);
    393     }
    394   }
    395 
    396   // A utility function that copies an object in a read barrier and
    397   // write barrier-aware way. This is internally used by Clone() and
    398   // Class::CopyOf().
    399   static Object* CopyObject(Thread* self, mirror::Object* dest, mirror::Object* src,
    400                             size_t num_bytes)
    401       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    402 
    403   static Atomic<uint32_t> hash_code_seed;
    404 
    405   // The Class representing the type of the object.
    406   HeapReference<Class> klass_;
    407   // Monitor and hash code information.
    408   uint32_t monitor_;
    409 
    410 #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER
    411   // Note names use a 'x' prefix and the x_rb_ptr_ is of type int
    412   // instead of Object to go with the alphabetical/by-type field order
    413   // on the Java side.
    414   uint32_t x_rb_ptr_;      // For the Baker or Brooks pointer.
    415   uint32_t x_xpadding_;    // For 8-byte alignment. TODO: get rid of this.
    416 #endif
    417 
    418   friend class art::ImageWriter;
    419   friend class art::Monitor;
    420   friend struct art::ObjectOffsets;  // for verifying offset information
    421   friend class CopyObjectVisitor;  // for CopyObject().
    422   friend class CopyClassVisitor;   // for CopyObject().
    423   DISALLOW_IMPLICIT_CONSTRUCTORS(Object);
    424 };
    425 
    426 }  // namespace mirror
    427 }  // namespace art
    428 
    429 #endif  // ART_RUNTIME_MIRROR_OBJECT_H_
    430