1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_MIRROR_OBJECT_H_ 18 #define ART_RUNTIME_MIRROR_OBJECT_H_ 19 20 #include "base/casts.h" 21 #include "globals.h" 22 #include "object_reference.h" 23 #include "offsets.h" 24 #include "verify_object.h" 25 26 namespace art { 27 28 class ArtField; 29 class ArtMethod; 30 class ImageWriter; 31 class LockWord; 32 class Monitor; 33 struct ObjectOffsets; 34 class Thread; 35 class VoidFunctor; 36 37 namespace mirror { 38 39 class Array; 40 class Class; 41 class ClassLoader; 42 class DexCache; 43 class FinalizerReference; 44 template<class T> class ObjectArray; 45 template<class T> class PrimitiveArray; 46 typedef PrimitiveArray<uint8_t> BooleanArray; 47 typedef PrimitiveArray<int8_t> ByteArray; 48 typedef PrimitiveArray<uint16_t> CharArray; 49 typedef PrimitiveArray<double> DoubleArray; 50 typedef PrimitiveArray<float> FloatArray; 51 typedef PrimitiveArray<int32_t> IntArray; 52 typedef PrimitiveArray<int64_t> LongArray; 53 typedef PrimitiveArray<int16_t> ShortArray; 54 class Reference; 55 class String; 56 class Throwable; 57 58 // Fields within mirror objects aren't accessed directly so that the appropriate amount of 59 // handshaking is done with GC (for example, read and write barriers). This macro is used to 60 // compute an offset for the Set/Get methods defined in Object that can safely access fields. 61 #define OFFSET_OF_OBJECT_MEMBER(type, field) \ 62 MemberOffset(OFFSETOF_MEMBER(type, field)) 63 64 // Checks that we don't do field assignments which violate the typing system. 65 static constexpr bool kCheckFieldAssignments = false; 66 67 // Size of Object. 68 static constexpr uint32_t kObjectHeaderSize = kUseBrooksReadBarrier ? 16 : 8; 69 70 // C++ mirror of java.lang.Object 71 class MANAGED LOCKABLE Object { 72 public: 73 // The number of vtable entries in java.lang.Object. 74 static constexpr size_t kVTableLength = 11; 75 76 // The size of the java.lang.Class representing a java.lang.Object. 77 static uint32_t ClassSize(size_t pointer_size); 78 79 // Size of an instance of java.lang.Object. 80 static constexpr uint32_t InstanceSize() { 81 return sizeof(Object); 82 } 83 84 static MemberOffset ClassOffset() { 85 return OFFSET_OF_OBJECT_MEMBER(Object, klass_); 86 } 87 88 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 89 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 90 ALWAYS_INLINE Class* GetClass() SHARED_REQUIRES(Locks::mutator_lock_); 91 92 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 93 void SetClass(Class* new_klass) SHARED_REQUIRES(Locks::mutator_lock_); 94 95 Object* GetReadBarrierPointer() SHARED_REQUIRES(Locks::mutator_lock_); 96 97 #ifndef USE_BAKER_OR_BROOKS_READ_BARRIER 98 NO_RETURN 99 #endif 100 void SetReadBarrierPointer(Object* rb_ptr) SHARED_REQUIRES(Locks::mutator_lock_); 101 102 template<bool kCasRelease = false> 103 ALWAYS_INLINE bool AtomicSetReadBarrierPointer(Object* expected_rb_ptr, Object* rb_ptr) 104 SHARED_REQUIRES(Locks::mutator_lock_); 105 void AssertReadBarrierPointer() const SHARED_REQUIRES(Locks::mutator_lock_); 106 107 // The verifier treats all interfaces as java.lang.Object and relies on runtime checks in 108 // invoke-interface to detect incompatible interface types. 109 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 110 bool VerifierInstanceOf(Class* klass) SHARED_REQUIRES(Locks::mutator_lock_); 111 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 112 ALWAYS_INLINE bool InstanceOf(Class* klass) SHARED_REQUIRES(Locks::mutator_lock_); 113 114 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 115 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 116 size_t SizeOf() SHARED_REQUIRES(Locks::mutator_lock_); 117 118 Object* Clone(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_) 119 REQUIRES(!Roles::uninterruptible_); 120 121 int32_t IdentityHashCode() const 122 SHARED_REQUIRES(Locks::mutator_lock_) 123 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_); 124 125 static MemberOffset MonitorOffset() { 126 return OFFSET_OF_OBJECT_MEMBER(Object, monitor_); 127 } 128 129 // As_volatile can be false if the mutators are suspended. This is an optimization since it 130 // avoids the barriers. 131 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 132 LockWord GetLockWord(bool as_volatile) SHARED_REQUIRES(Locks::mutator_lock_); 133 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 134 void SetLockWord(LockWord new_val, bool as_volatile) SHARED_REQUIRES(Locks::mutator_lock_); 135 bool CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val) 136 SHARED_REQUIRES(Locks::mutator_lock_); 137 bool CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val) 138 SHARED_REQUIRES(Locks::mutator_lock_); 139 bool CasLockWordWeakRelease(LockWord old_val, LockWord new_val) 140 SHARED_REQUIRES(Locks::mutator_lock_); 141 uint32_t GetLockOwnerThreadId(); 142 143 mirror::Object* MonitorEnter(Thread* self) 144 EXCLUSIVE_LOCK_FUNCTION() 145 REQUIRES(!Roles::uninterruptible_) 146 SHARED_REQUIRES(Locks::mutator_lock_); 147 bool MonitorExit(Thread* self) 148 REQUIRES(!Roles::uninterruptible_) 149 SHARED_REQUIRES(Locks::mutator_lock_) 150 UNLOCK_FUNCTION(); 151 void Notify(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_); 152 void NotifyAll(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_); 153 void Wait(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_); 154 void Wait(Thread* self, int64_t timeout, int32_t nanos) SHARED_REQUIRES(Locks::mutator_lock_); 155 156 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 157 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 158 bool IsClass() SHARED_REQUIRES(Locks::mutator_lock_); 159 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 160 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 161 Class* AsClass() SHARED_REQUIRES(Locks::mutator_lock_); 162 163 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 164 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 165 bool IsObjectArray() SHARED_REQUIRES(Locks::mutator_lock_); 166 template<class T, 167 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 168 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 169 ObjectArray<T>* AsObjectArray() SHARED_REQUIRES(Locks::mutator_lock_); 170 171 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 172 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 173 bool IsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_); 174 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 175 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 176 ClassLoader* AsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_); 177 178 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 179 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 180 bool IsDexCache() SHARED_REQUIRES(Locks::mutator_lock_); 181 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 182 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 183 DexCache* AsDexCache() SHARED_REQUIRES(Locks::mutator_lock_); 184 185 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 186 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 187 bool IsArrayInstance() SHARED_REQUIRES(Locks::mutator_lock_); 188 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 189 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 190 Array* AsArray() SHARED_REQUIRES(Locks::mutator_lock_); 191 192 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 193 BooleanArray* AsBooleanArray() SHARED_REQUIRES(Locks::mutator_lock_); 194 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 195 ByteArray* AsByteArray() SHARED_REQUIRES(Locks::mutator_lock_); 196 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 197 ByteArray* AsByteSizedArray() SHARED_REQUIRES(Locks::mutator_lock_); 198 199 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 200 CharArray* AsCharArray() SHARED_REQUIRES(Locks::mutator_lock_); 201 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 202 ShortArray* AsShortArray() SHARED_REQUIRES(Locks::mutator_lock_); 203 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 204 ShortArray* AsShortSizedArray() SHARED_REQUIRES(Locks::mutator_lock_); 205 206 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 207 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 208 bool IsIntArray() SHARED_REQUIRES(Locks::mutator_lock_); 209 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 210 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 211 IntArray* AsIntArray() SHARED_REQUIRES(Locks::mutator_lock_); 212 213 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 214 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 215 bool IsLongArray() SHARED_REQUIRES(Locks::mutator_lock_); 216 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 217 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 218 LongArray* AsLongArray() SHARED_REQUIRES(Locks::mutator_lock_); 219 220 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 221 bool IsFloatArray() SHARED_REQUIRES(Locks::mutator_lock_); 222 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 223 FloatArray* AsFloatArray() SHARED_REQUIRES(Locks::mutator_lock_); 224 225 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 226 bool IsDoubleArray() SHARED_REQUIRES(Locks::mutator_lock_); 227 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 228 DoubleArray* AsDoubleArray() SHARED_REQUIRES(Locks::mutator_lock_); 229 230 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 231 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 232 bool IsString() SHARED_REQUIRES(Locks::mutator_lock_); 233 234 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 235 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 236 String* AsString() SHARED_REQUIRES(Locks::mutator_lock_); 237 238 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 239 Throwable* AsThrowable() SHARED_REQUIRES(Locks::mutator_lock_); 240 241 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 242 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 243 bool IsReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_); 244 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 245 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 246 Reference* AsReference() SHARED_REQUIRES(Locks::mutator_lock_); 247 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 248 bool IsWeakReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_); 249 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 250 bool IsSoftReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_); 251 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 252 bool IsFinalizerReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_); 253 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 254 FinalizerReference* AsFinalizerReference() SHARED_REQUIRES(Locks::mutator_lock_); 255 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 256 bool IsPhantomReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_); 257 258 // Accessor for Java type fields. 259 template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 260 ReadBarrierOption kReadBarrierOption = kWithReadBarrier, bool kIsVolatile = false> 261 ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset) 262 SHARED_REQUIRES(Locks::mutator_lock_); 263 264 template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 265 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 266 ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset) 267 SHARED_REQUIRES(Locks::mutator_lock_); 268 269 template<bool kTransactionActive, bool kCheckTransaction = true, 270 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 271 ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value) 272 SHARED_REQUIRES(Locks::mutator_lock_); 273 274 template<bool kTransactionActive, bool kCheckTransaction = true, 275 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 276 ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, Object* new_value) 277 SHARED_REQUIRES(Locks::mutator_lock_); 278 279 template<bool kTransactionActive, bool kCheckTransaction = true, 280 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 281 ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) 282 SHARED_REQUIRES(Locks::mutator_lock_); 283 284 template<bool kTransactionActive, bool kCheckTransaction = true, 285 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 286 bool CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset, Object* old_value, 287 Object* new_value) 288 SHARED_REQUIRES(Locks::mutator_lock_); 289 template<bool kTransactionActive, bool kCheckTransaction = true, 290 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 291 bool CasFieldWeakSequentiallyConsistentObjectWithoutWriteBarrier(MemberOffset field_offset, 292 Object* old_value, 293 Object* new_value) 294 SHARED_REQUIRES(Locks::mutator_lock_); 295 template<bool kTransactionActive, bool kCheckTransaction = true, 296 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 297 bool CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset, Object* old_value, 298 Object* new_value) 299 SHARED_REQUIRES(Locks::mutator_lock_); 300 template<bool kTransactionActive, bool kCheckTransaction = true, 301 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 302 bool CasFieldStrongSequentiallyConsistentObjectWithoutWriteBarrier(MemberOffset field_offset, 303 Object* old_value, 304 Object* new_value) 305 SHARED_REQUIRES(Locks::mutator_lock_); 306 template<bool kTransactionActive, bool kCheckTransaction = true, 307 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 308 bool CasFieldWeakRelaxedObjectWithoutWriteBarrier(MemberOffset field_offset, 309 Object* old_value, 310 Object* new_value) 311 SHARED_REQUIRES(Locks::mutator_lock_); 312 template<bool kTransactionActive, bool kCheckTransaction = true, 313 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 314 bool CasFieldStrongRelaxedObjectWithoutWriteBarrier(MemberOffset field_offset, 315 Object* old_value, 316 Object* new_value) 317 SHARED_REQUIRES(Locks::mutator_lock_); 318 319 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 320 HeapReference<Object>* GetFieldObjectReferenceAddr(MemberOffset field_offset); 321 322 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 323 ALWAYS_INLINE uint8_t GetFieldBoolean(MemberOffset field_offset) 324 SHARED_REQUIRES(Locks::mutator_lock_); 325 326 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 327 ALWAYS_INLINE int8_t GetFieldByte(MemberOffset field_offset) 328 SHARED_REQUIRES(Locks::mutator_lock_); 329 330 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 331 ALWAYS_INLINE uint8_t GetFieldBooleanVolatile(MemberOffset field_offset) 332 SHARED_REQUIRES(Locks::mutator_lock_); 333 334 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 335 ALWAYS_INLINE int8_t GetFieldByteVolatile(MemberOffset field_offset) 336 SHARED_REQUIRES(Locks::mutator_lock_); 337 338 template<bool kTransactionActive, bool kCheckTransaction = true, 339 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 340 ALWAYS_INLINE void SetFieldBoolean(MemberOffset field_offset, uint8_t new_value) 341 SHARED_REQUIRES(Locks::mutator_lock_); 342 343 template<bool kTransactionActive, bool kCheckTransaction = true, 344 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 345 ALWAYS_INLINE void SetFieldByte(MemberOffset field_offset, int8_t new_value) 346 SHARED_REQUIRES(Locks::mutator_lock_); 347 348 template<bool kTransactionActive, bool kCheckTransaction = true, 349 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 350 ALWAYS_INLINE void SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value) 351 SHARED_REQUIRES(Locks::mutator_lock_); 352 353 template<bool kTransactionActive, bool kCheckTransaction = true, 354 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 355 ALWAYS_INLINE void SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value) 356 SHARED_REQUIRES(Locks::mutator_lock_); 357 358 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 359 ALWAYS_INLINE uint16_t GetFieldChar(MemberOffset field_offset) 360 SHARED_REQUIRES(Locks::mutator_lock_); 361 362 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 363 ALWAYS_INLINE int16_t GetFieldShort(MemberOffset field_offset) 364 SHARED_REQUIRES(Locks::mutator_lock_); 365 366 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 367 ALWAYS_INLINE uint16_t GetFieldCharVolatile(MemberOffset field_offset) 368 SHARED_REQUIRES(Locks::mutator_lock_); 369 370 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 371 ALWAYS_INLINE int16_t GetFieldShortVolatile(MemberOffset field_offset) 372 SHARED_REQUIRES(Locks::mutator_lock_); 373 374 template<bool kTransactionActive, bool kCheckTransaction = true, 375 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 376 ALWAYS_INLINE void SetFieldChar(MemberOffset field_offset, uint16_t new_value) 377 SHARED_REQUIRES(Locks::mutator_lock_); 378 379 template<bool kTransactionActive, bool kCheckTransaction = true, 380 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 381 ALWAYS_INLINE void SetFieldShort(MemberOffset field_offset, int16_t new_value) 382 SHARED_REQUIRES(Locks::mutator_lock_); 383 384 template<bool kTransactionActive, bool kCheckTransaction = true, 385 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 386 ALWAYS_INLINE void SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value) 387 SHARED_REQUIRES(Locks::mutator_lock_); 388 389 template<bool kTransactionActive, bool kCheckTransaction = true, 390 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 391 ALWAYS_INLINE void SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value) 392 SHARED_REQUIRES(Locks::mutator_lock_); 393 394 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 395 ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset) 396 SHARED_REQUIRES(Locks::mutator_lock_); 397 398 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 399 ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset) 400 SHARED_REQUIRES(Locks::mutator_lock_); 401 402 template<bool kTransactionActive, bool kCheckTransaction = true, 403 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 404 ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value) 405 SHARED_REQUIRES(Locks::mutator_lock_); 406 407 template<bool kTransactionActive, bool kCheckTransaction = true, 408 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 409 ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value) 410 SHARED_REQUIRES(Locks::mutator_lock_); 411 412 template<bool kTransactionActive, bool kCheckTransaction = true, 413 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 414 ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, 415 int32_t old_value, int32_t new_value) 416 SHARED_REQUIRES(Locks::mutator_lock_); 417 418 template<bool kTransactionActive, bool kCheckTransaction = true, 419 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 420 bool CasFieldWeakRelaxed32(MemberOffset field_offset, int32_t old_value, 421 int32_t new_value) ALWAYS_INLINE 422 SHARED_REQUIRES(Locks::mutator_lock_); 423 424 template<bool kTransactionActive, bool kCheckTransaction = true, 425 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 426 bool CasFieldWeakRelease32(MemberOffset field_offset, int32_t old_value, 427 int32_t new_value) ALWAYS_INLINE 428 SHARED_REQUIRES(Locks::mutator_lock_); 429 430 template<bool kTransactionActive, bool kCheckTransaction = true, 431 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 432 bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value, 433 int32_t new_value) ALWAYS_INLINE 434 SHARED_REQUIRES(Locks::mutator_lock_); 435 436 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 437 ALWAYS_INLINE int64_t GetField64(MemberOffset field_offset) 438 SHARED_REQUIRES(Locks::mutator_lock_); 439 440 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 441 ALWAYS_INLINE int64_t GetField64Volatile(MemberOffset field_offset) 442 SHARED_REQUIRES(Locks::mutator_lock_); 443 444 template<bool kTransactionActive, bool kCheckTransaction = true, 445 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 446 ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value) 447 SHARED_REQUIRES(Locks::mutator_lock_); 448 449 template<bool kTransactionActive, bool kCheckTransaction = true, 450 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 451 ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value) 452 SHARED_REQUIRES(Locks::mutator_lock_); 453 454 template<bool kTransactionActive, bool kCheckTransaction = true, 455 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 456 bool CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value, 457 int64_t new_value) 458 SHARED_REQUIRES(Locks::mutator_lock_); 459 460 template<bool kTransactionActive, bool kCheckTransaction = true, 461 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 462 bool CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value, 463 int64_t new_value) 464 SHARED_REQUIRES(Locks::mutator_lock_); 465 466 template<bool kTransactionActive, bool kCheckTransaction = true, 467 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T> 468 void SetFieldPtr(MemberOffset field_offset, T new_value) 469 SHARED_REQUIRES(Locks::mutator_lock_) { 470 SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>( 471 field_offset, new_value, sizeof(void*)); 472 } 473 template<bool kTransactionActive, bool kCheckTransaction = true, 474 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T> 475 void SetFieldPtr64(MemberOffset field_offset, T new_value) 476 SHARED_REQUIRES(Locks::mutator_lock_) { 477 SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>( 478 field_offset, new_value, 8u); 479 } 480 481 template<bool kTransactionActive, bool kCheckTransaction = true, 482 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T> 483 ALWAYS_INLINE void SetFieldPtrWithSize(MemberOffset field_offset, T new_value, 484 size_t pointer_size) 485 SHARED_REQUIRES(Locks::mutator_lock_) { 486 DCHECK(pointer_size == 4 || pointer_size == 8) << pointer_size; 487 if (pointer_size == 4) { 488 intptr_t ptr = reinterpret_cast<intptr_t>(new_value); 489 DCHECK_EQ(static_cast<int32_t>(ptr), ptr); // Check that we dont lose any non 0 bits. 490 SetField32<kTransactionActive, kCheckTransaction, kVerifyFlags>( 491 field_offset, static_cast<int32_t>(ptr)); 492 } else { 493 SetField64<kTransactionActive, kCheckTransaction, kVerifyFlags>( 494 field_offset, reinterpret_cast64<int64_t>(new_value)); 495 } 496 } 497 // TODO fix thread safety analysis broken by the use of template. This should be 498 // SHARED_REQUIRES(Locks::mutator_lock_). 499 template <bool kVisitNativeRoots = true, 500 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 501 ReadBarrierOption kReadBarrierOption = kWithReadBarrier, 502 typename Visitor, 503 typename JavaLangRefVisitor = VoidFunctor> 504 void VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor) 505 NO_THREAD_SAFETY_ANALYSIS; 506 507 ArtField* FindFieldByOffset(MemberOffset offset) SHARED_REQUIRES(Locks::mutator_lock_); 508 509 // Used by object_test. 510 static void SetHashCodeSeed(uint32_t new_seed); 511 // Generate an identity hash code. Public for object test. 512 static uint32_t GenerateIdentityHashCode(); 513 514 protected: 515 // Accessors for non-Java type fields 516 template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 517 T GetFieldPtr(MemberOffset field_offset) 518 SHARED_REQUIRES(Locks::mutator_lock_) { 519 return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, sizeof(void*)); 520 } 521 template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 522 T GetFieldPtr64(MemberOffset field_offset) 523 SHARED_REQUIRES(Locks::mutator_lock_) { 524 return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, 8u); 525 } 526 527 template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> 528 ALWAYS_INLINE T GetFieldPtrWithSize(MemberOffset field_offset, size_t pointer_size) 529 SHARED_REQUIRES(Locks::mutator_lock_) { 530 DCHECK(pointer_size == 4 || pointer_size == 8) << pointer_size; 531 if (pointer_size == 4) { 532 return reinterpret_cast<T>(GetField32<kVerifyFlags, kIsVolatile>(field_offset)); 533 } else { 534 int64_t v = GetField64<kVerifyFlags, kIsVolatile>(field_offset); 535 return reinterpret_cast64<T>(v); 536 } 537 } 538 539 // TODO: Fixme when anotatalysis works with visitors. 540 template<bool kIsStatic, 541 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 542 ReadBarrierOption kReadBarrierOption = kWithReadBarrier, 543 typename Visitor> 544 void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR 545 NO_THREAD_SAFETY_ANALYSIS; 546 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 547 ReadBarrierOption kReadBarrierOption = kWithReadBarrier, 548 typename Visitor> 549 void VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR 550 SHARED_REQUIRES(Locks::mutator_lock_); 551 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 552 ReadBarrierOption kReadBarrierOption = kWithReadBarrier, 553 typename Visitor> 554 void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) HOT_ATTR 555 SHARED_REQUIRES(Locks::mutator_lock_); 556 557 private: 558 template<typename kSize, bool kIsVolatile> 559 ALWAYS_INLINE void SetField(MemberOffset field_offset, kSize new_value) 560 SHARED_REQUIRES(Locks::mutator_lock_); 561 template<typename kSize, bool kIsVolatile> 562 ALWAYS_INLINE kSize GetField(MemberOffset field_offset) 563 SHARED_REQUIRES(Locks::mutator_lock_); 564 565 // Verify the type correctness of stores to fields. 566 // TODO: This can cause thread suspension and isn't moving GC safe. 567 void CheckFieldAssignmentImpl(MemberOffset field_offset, Object* new_value) 568 SHARED_REQUIRES(Locks::mutator_lock_); 569 void CheckFieldAssignment(MemberOffset field_offset, Object* new_value) 570 SHARED_REQUIRES(Locks::mutator_lock_) { 571 if (kCheckFieldAssignments) { 572 CheckFieldAssignmentImpl(field_offset, new_value); 573 } 574 } 575 576 // A utility function that copies an object in a read barrier and 577 // write barrier-aware way. This is internally used by Clone() and 578 // Class::CopyOf(). 579 static Object* CopyObject(Thread* self, mirror::Object* dest, mirror::Object* src, 580 size_t num_bytes) 581 SHARED_REQUIRES(Locks::mutator_lock_); 582 583 static Atomic<uint32_t> hash_code_seed; 584 585 // The Class representing the type of the object. 586 HeapReference<Class> klass_; 587 // Monitor and hash code information. 588 uint32_t monitor_; 589 590 #ifdef USE_BROOKS_READ_BARRIER 591 // Note names use a 'x' prefix and the x_rb_ptr_ is of type int 592 // instead of Object to go with the alphabetical/by-type field order 593 // on the Java side. 594 uint32_t x_rb_ptr_; // For the Brooks pointer. 595 uint32_t x_xpadding_; // For 8-byte alignment. TODO: get rid of this. 596 #endif 597 598 friend class art::ImageWriter; 599 friend class art::Monitor; 600 friend struct art::ObjectOffsets; // for verifying offset information 601 friend class CopyObjectVisitor; // for CopyObject(). 602 friend class CopyClassVisitor; // for CopyObject(). 603 DISALLOW_ALLOCATION(); 604 DISALLOW_IMPLICIT_CONSTRUCTORS(Object); 605 }; 606 607 } // namespace mirror 608 } // namespace art 609 610 #endif // ART_RUNTIME_MIRROR_OBJECT_H_ 611