1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "reg_type.h" 18 19 #include <set> 20 21 #include "base/bit_vector.h" 22 #include "base/casts.h" 23 #include "base/scoped_arena_allocator.h" 24 #include "common_runtime_test.h" 25 #include "compiler_callbacks.h" 26 #include "reg_type-inl.h" 27 #include "reg_type_cache-inl.h" 28 #include "scoped_thread_state_change-inl.h" 29 #include "thread-current-inl.h" 30 31 namespace art { 32 namespace verifier { 33 34 class RegTypeTest : public CommonRuntimeTest {}; 35 36 TEST_F(RegTypeTest, ConstLoHi) { 37 // Tests creating primitive types types. 38 ArenaStack stack(Runtime::Current()->GetArenaPool()); 39 ScopedArenaAllocator allocator(&stack); 40 ScopedObjectAccess soa(Thread::Current()); 41 RegTypeCache cache(true, allocator); 42 const RegType& ref_type_const_0 = cache.FromCat1Const(10, true); 43 const RegType& ref_type_const_1 = cache.FromCat1Const(10, true); 44 const RegType& ref_type_const_2 = cache.FromCat1Const(30, true); 45 const RegType& ref_type_const_3 = cache.FromCat1Const(30, false); 46 EXPECT_TRUE(ref_type_const_0.Equals(ref_type_const_1)); 47 EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_2)); 48 EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_3)); 49 50 const RegType& ref_type_const_wide_0 = cache.FromCat2ConstHi(50, true); 51 const RegType& ref_type_const_wide_1 = cache.FromCat2ConstHi(50, true); 52 EXPECT_TRUE(ref_type_const_wide_0.Equals(ref_type_const_wide_1)); 53 54 const RegType& ref_type_const_wide_2 = cache.FromCat2ConstLo(50, true); 55 const RegType& ref_type_const_wide_3 = cache.FromCat2ConstLo(50, true); 56 const RegType& ref_type_const_wide_4 = cache.FromCat2ConstLo(55, true); 57 EXPECT_TRUE(ref_type_const_wide_2.Equals(ref_type_const_wide_3)); 58 EXPECT_FALSE(ref_type_const_wide_2.Equals(ref_type_const_wide_4)); 59 } 60 61 TEST_F(RegTypeTest, Pairs) { 62 ArenaStack stack(Runtime::Current()->GetArenaPool()); 63 ScopedArenaAllocator allocator(&stack); 64 ScopedObjectAccess soa(Thread::Current()); 65 RegTypeCache cache(true, allocator); 66 int64_t val = static_cast<int32_t>(1234); 67 const RegType& precise_lo = cache.FromCat2ConstLo(static_cast<int32_t>(val), true); 68 const RegType& precise_hi = cache.FromCat2ConstHi(static_cast<int32_t>(val >> 32), true); 69 const RegType& precise_const = cache.FromCat1Const(static_cast<int32_t>(val >> 32), true); 70 const RegType& long_lo = cache.LongLo(); 71 const RegType& long_hi = cache.LongHi(); 72 // Check sanity of types. 73 EXPECT_TRUE(precise_lo.IsLowHalf()); 74 EXPECT_FALSE(precise_hi.IsLowHalf()); 75 EXPECT_FALSE(precise_lo.IsHighHalf()); 76 EXPECT_TRUE(precise_hi.IsHighHalf()); 77 EXPECT_TRUE(long_hi.IsLongHighTypes()); 78 EXPECT_TRUE(precise_hi.IsLongHighTypes()); 79 // Check Pairing. 80 EXPECT_FALSE(precise_lo.CheckWidePair(precise_const)); 81 EXPECT_TRUE(precise_lo.CheckWidePair(precise_hi)); 82 // Test Merging. 83 EXPECT_TRUE((long_lo.Merge(precise_lo, &cache, /* verifier */ nullptr)).IsLongTypes()); 84 EXPECT_TRUE((long_hi.Merge(precise_hi, &cache, /* verifier */ nullptr)).IsLongHighTypes()); 85 } 86 87 TEST_F(RegTypeTest, Primitives) { 88 ArenaStack stack(Runtime::Current()->GetArenaPool()); 89 ScopedArenaAllocator allocator(&stack); 90 ScopedObjectAccess soa(Thread::Current()); 91 RegTypeCache cache(true, allocator); 92 93 const RegType& bool_reg_type = cache.Boolean(); 94 EXPECT_FALSE(bool_reg_type.IsUndefined()); 95 EXPECT_FALSE(bool_reg_type.IsConflict()); 96 EXPECT_FALSE(bool_reg_type.IsZero()); 97 EXPECT_FALSE(bool_reg_type.IsOne()); 98 EXPECT_FALSE(bool_reg_type.IsLongConstant()); 99 EXPECT_TRUE(bool_reg_type.IsBoolean()); 100 EXPECT_FALSE(bool_reg_type.IsByte()); 101 EXPECT_FALSE(bool_reg_type.IsChar()); 102 EXPECT_FALSE(bool_reg_type.IsShort()); 103 EXPECT_FALSE(bool_reg_type.IsInteger()); 104 EXPECT_FALSE(bool_reg_type.IsLong()); 105 EXPECT_FALSE(bool_reg_type.IsFloat()); 106 EXPECT_FALSE(bool_reg_type.IsDouble()); 107 EXPECT_FALSE(bool_reg_type.IsReference()); 108 EXPECT_FALSE(bool_reg_type.IsLowHalf()); 109 EXPECT_FALSE(bool_reg_type.IsHighHalf()); 110 EXPECT_FALSE(bool_reg_type.IsLongOrDoubleTypes()); 111 EXPECT_FALSE(bool_reg_type.IsReferenceTypes()); 112 EXPECT_TRUE(bool_reg_type.IsCategory1Types()); 113 EXPECT_FALSE(bool_reg_type.IsCategory2Types()); 114 EXPECT_TRUE(bool_reg_type.IsBooleanTypes()); 115 EXPECT_TRUE(bool_reg_type.IsByteTypes()); 116 EXPECT_TRUE(bool_reg_type.IsShortTypes()); 117 EXPECT_TRUE(bool_reg_type.IsCharTypes()); 118 EXPECT_TRUE(bool_reg_type.IsIntegralTypes()); 119 EXPECT_FALSE(bool_reg_type.IsFloatTypes()); 120 EXPECT_FALSE(bool_reg_type.IsLongTypes()); 121 EXPECT_FALSE(bool_reg_type.IsDoubleTypes()); 122 EXPECT_TRUE(bool_reg_type.IsArrayIndexTypes()); 123 EXPECT_FALSE(bool_reg_type.IsNonZeroReferenceTypes()); 124 EXPECT_TRUE(bool_reg_type.HasClass()); 125 126 const RegType& byte_reg_type = cache.Byte(); 127 EXPECT_FALSE(byte_reg_type.IsUndefined()); 128 EXPECT_FALSE(byte_reg_type.IsConflict()); 129 EXPECT_FALSE(byte_reg_type.IsZero()); 130 EXPECT_FALSE(byte_reg_type.IsOne()); 131 EXPECT_FALSE(byte_reg_type.IsLongConstant()); 132 EXPECT_FALSE(byte_reg_type.IsBoolean()); 133 EXPECT_TRUE(byte_reg_type.IsByte()); 134 EXPECT_FALSE(byte_reg_type.IsChar()); 135 EXPECT_FALSE(byte_reg_type.IsShort()); 136 EXPECT_FALSE(byte_reg_type.IsInteger()); 137 EXPECT_FALSE(byte_reg_type.IsLong()); 138 EXPECT_FALSE(byte_reg_type.IsFloat()); 139 EXPECT_FALSE(byte_reg_type.IsDouble()); 140 EXPECT_FALSE(byte_reg_type.IsReference()); 141 EXPECT_FALSE(byte_reg_type.IsLowHalf()); 142 EXPECT_FALSE(byte_reg_type.IsHighHalf()); 143 EXPECT_FALSE(byte_reg_type.IsLongOrDoubleTypes()); 144 EXPECT_FALSE(byte_reg_type.IsReferenceTypes()); 145 EXPECT_TRUE(byte_reg_type.IsCategory1Types()); 146 EXPECT_FALSE(byte_reg_type.IsCategory2Types()); 147 EXPECT_FALSE(byte_reg_type.IsBooleanTypes()); 148 EXPECT_TRUE(byte_reg_type.IsByteTypes()); 149 EXPECT_TRUE(byte_reg_type.IsShortTypes()); 150 EXPECT_FALSE(byte_reg_type.IsCharTypes()); 151 EXPECT_TRUE(byte_reg_type.IsIntegralTypes()); 152 EXPECT_FALSE(byte_reg_type.IsFloatTypes()); 153 EXPECT_FALSE(byte_reg_type.IsLongTypes()); 154 EXPECT_FALSE(byte_reg_type.IsDoubleTypes()); 155 EXPECT_TRUE(byte_reg_type.IsArrayIndexTypes()); 156 EXPECT_FALSE(byte_reg_type.IsNonZeroReferenceTypes()); 157 EXPECT_TRUE(byte_reg_type.HasClass()); 158 159 const RegType& char_reg_type = cache.Char(); 160 EXPECT_FALSE(char_reg_type.IsUndefined()); 161 EXPECT_FALSE(char_reg_type.IsConflict()); 162 EXPECT_FALSE(char_reg_type.IsZero()); 163 EXPECT_FALSE(char_reg_type.IsOne()); 164 EXPECT_FALSE(char_reg_type.IsLongConstant()); 165 EXPECT_FALSE(char_reg_type.IsBoolean()); 166 EXPECT_FALSE(char_reg_type.IsByte()); 167 EXPECT_TRUE(char_reg_type.IsChar()); 168 EXPECT_FALSE(char_reg_type.IsShort()); 169 EXPECT_FALSE(char_reg_type.IsInteger()); 170 EXPECT_FALSE(char_reg_type.IsLong()); 171 EXPECT_FALSE(char_reg_type.IsFloat()); 172 EXPECT_FALSE(char_reg_type.IsDouble()); 173 EXPECT_FALSE(char_reg_type.IsReference()); 174 EXPECT_FALSE(char_reg_type.IsLowHalf()); 175 EXPECT_FALSE(char_reg_type.IsHighHalf()); 176 EXPECT_FALSE(char_reg_type.IsLongOrDoubleTypes()); 177 EXPECT_FALSE(char_reg_type.IsReferenceTypes()); 178 EXPECT_TRUE(char_reg_type.IsCategory1Types()); 179 EXPECT_FALSE(char_reg_type.IsCategory2Types()); 180 EXPECT_FALSE(char_reg_type.IsBooleanTypes()); 181 EXPECT_FALSE(char_reg_type.IsByteTypes()); 182 EXPECT_FALSE(char_reg_type.IsShortTypes()); 183 EXPECT_TRUE(char_reg_type.IsCharTypes()); 184 EXPECT_TRUE(char_reg_type.IsIntegralTypes()); 185 EXPECT_FALSE(char_reg_type.IsFloatTypes()); 186 EXPECT_FALSE(char_reg_type.IsLongTypes()); 187 EXPECT_FALSE(char_reg_type.IsDoubleTypes()); 188 EXPECT_TRUE(char_reg_type.IsArrayIndexTypes()); 189 EXPECT_FALSE(char_reg_type.IsNonZeroReferenceTypes()); 190 EXPECT_TRUE(char_reg_type.HasClass()); 191 192 const RegType& short_reg_type = cache.Short(); 193 EXPECT_FALSE(short_reg_type.IsUndefined()); 194 EXPECT_FALSE(short_reg_type.IsConflict()); 195 EXPECT_FALSE(short_reg_type.IsZero()); 196 EXPECT_FALSE(short_reg_type.IsOne()); 197 EXPECT_FALSE(short_reg_type.IsLongConstant()); 198 EXPECT_FALSE(short_reg_type.IsBoolean()); 199 EXPECT_FALSE(short_reg_type.IsByte()); 200 EXPECT_FALSE(short_reg_type.IsChar()); 201 EXPECT_TRUE(short_reg_type.IsShort()); 202 EXPECT_FALSE(short_reg_type.IsInteger()); 203 EXPECT_FALSE(short_reg_type.IsLong()); 204 EXPECT_FALSE(short_reg_type.IsFloat()); 205 EXPECT_FALSE(short_reg_type.IsDouble()); 206 EXPECT_FALSE(short_reg_type.IsReference()); 207 EXPECT_FALSE(short_reg_type.IsLowHalf()); 208 EXPECT_FALSE(short_reg_type.IsHighHalf()); 209 EXPECT_FALSE(short_reg_type.IsLongOrDoubleTypes()); 210 EXPECT_FALSE(short_reg_type.IsReferenceTypes()); 211 EXPECT_TRUE(short_reg_type.IsCategory1Types()); 212 EXPECT_FALSE(short_reg_type.IsCategory2Types()); 213 EXPECT_FALSE(short_reg_type.IsBooleanTypes()); 214 EXPECT_FALSE(short_reg_type.IsByteTypes()); 215 EXPECT_TRUE(short_reg_type.IsShortTypes()); 216 EXPECT_FALSE(short_reg_type.IsCharTypes()); 217 EXPECT_TRUE(short_reg_type.IsIntegralTypes()); 218 EXPECT_FALSE(short_reg_type.IsFloatTypes()); 219 EXPECT_FALSE(short_reg_type.IsLongTypes()); 220 EXPECT_FALSE(short_reg_type.IsDoubleTypes()); 221 EXPECT_TRUE(short_reg_type.IsArrayIndexTypes()); 222 EXPECT_FALSE(short_reg_type.IsNonZeroReferenceTypes()); 223 EXPECT_TRUE(short_reg_type.HasClass()); 224 225 const RegType& int_reg_type = cache.Integer(); 226 EXPECT_FALSE(int_reg_type.IsUndefined()); 227 EXPECT_FALSE(int_reg_type.IsConflict()); 228 EXPECT_FALSE(int_reg_type.IsZero()); 229 EXPECT_FALSE(int_reg_type.IsOne()); 230 EXPECT_FALSE(int_reg_type.IsLongConstant()); 231 EXPECT_FALSE(int_reg_type.IsBoolean()); 232 EXPECT_FALSE(int_reg_type.IsByte()); 233 EXPECT_FALSE(int_reg_type.IsChar()); 234 EXPECT_FALSE(int_reg_type.IsShort()); 235 EXPECT_TRUE(int_reg_type.IsInteger()); 236 EXPECT_FALSE(int_reg_type.IsLong()); 237 EXPECT_FALSE(int_reg_type.IsFloat()); 238 EXPECT_FALSE(int_reg_type.IsDouble()); 239 EXPECT_FALSE(int_reg_type.IsReference()); 240 EXPECT_FALSE(int_reg_type.IsLowHalf()); 241 EXPECT_FALSE(int_reg_type.IsHighHalf()); 242 EXPECT_FALSE(int_reg_type.IsLongOrDoubleTypes()); 243 EXPECT_FALSE(int_reg_type.IsReferenceTypes()); 244 EXPECT_TRUE(int_reg_type.IsCategory1Types()); 245 EXPECT_FALSE(int_reg_type.IsCategory2Types()); 246 EXPECT_FALSE(int_reg_type.IsBooleanTypes()); 247 EXPECT_FALSE(int_reg_type.IsByteTypes()); 248 EXPECT_FALSE(int_reg_type.IsShortTypes()); 249 EXPECT_FALSE(int_reg_type.IsCharTypes()); 250 EXPECT_TRUE(int_reg_type.IsIntegralTypes()); 251 EXPECT_FALSE(int_reg_type.IsFloatTypes()); 252 EXPECT_FALSE(int_reg_type.IsLongTypes()); 253 EXPECT_FALSE(int_reg_type.IsDoubleTypes()); 254 EXPECT_TRUE(int_reg_type.IsArrayIndexTypes()); 255 EXPECT_FALSE(int_reg_type.IsNonZeroReferenceTypes()); 256 EXPECT_TRUE(int_reg_type.HasClass()); 257 258 const RegType& long_reg_type = cache.LongLo(); 259 EXPECT_FALSE(long_reg_type.IsUndefined()); 260 EXPECT_FALSE(long_reg_type.IsConflict()); 261 EXPECT_FALSE(long_reg_type.IsZero()); 262 EXPECT_FALSE(long_reg_type.IsOne()); 263 EXPECT_FALSE(long_reg_type.IsLongConstant()); 264 EXPECT_FALSE(long_reg_type.IsBoolean()); 265 EXPECT_FALSE(long_reg_type.IsByte()); 266 EXPECT_FALSE(long_reg_type.IsChar()); 267 EXPECT_FALSE(long_reg_type.IsShort()); 268 EXPECT_FALSE(long_reg_type.IsInteger()); 269 EXPECT_TRUE(long_reg_type.IsLong()); 270 EXPECT_FALSE(long_reg_type.IsFloat()); 271 EXPECT_FALSE(long_reg_type.IsDouble()); 272 EXPECT_FALSE(long_reg_type.IsReference()); 273 EXPECT_TRUE(long_reg_type.IsLowHalf()); 274 EXPECT_FALSE(long_reg_type.IsHighHalf()); 275 EXPECT_TRUE(long_reg_type.IsLongOrDoubleTypes()); 276 EXPECT_FALSE(long_reg_type.IsReferenceTypes()); 277 EXPECT_FALSE(long_reg_type.IsCategory1Types()); 278 EXPECT_TRUE(long_reg_type.IsCategory2Types()); 279 EXPECT_FALSE(long_reg_type.IsBooleanTypes()); 280 EXPECT_FALSE(long_reg_type.IsByteTypes()); 281 EXPECT_FALSE(long_reg_type.IsShortTypes()); 282 EXPECT_FALSE(long_reg_type.IsCharTypes()); 283 EXPECT_FALSE(long_reg_type.IsIntegralTypes()); 284 EXPECT_FALSE(long_reg_type.IsFloatTypes()); 285 EXPECT_TRUE(long_reg_type.IsLongTypes()); 286 EXPECT_FALSE(long_reg_type.IsDoubleTypes()); 287 EXPECT_FALSE(long_reg_type.IsArrayIndexTypes()); 288 EXPECT_FALSE(long_reg_type.IsNonZeroReferenceTypes()); 289 EXPECT_TRUE(long_reg_type.HasClass()); 290 291 const RegType& float_reg_type = cache.Float(); 292 EXPECT_FALSE(float_reg_type.IsUndefined()); 293 EXPECT_FALSE(float_reg_type.IsConflict()); 294 EXPECT_FALSE(float_reg_type.IsZero()); 295 EXPECT_FALSE(float_reg_type.IsOne()); 296 EXPECT_FALSE(float_reg_type.IsLongConstant()); 297 EXPECT_FALSE(float_reg_type.IsBoolean()); 298 EXPECT_FALSE(float_reg_type.IsByte()); 299 EXPECT_FALSE(float_reg_type.IsChar()); 300 EXPECT_FALSE(float_reg_type.IsShort()); 301 EXPECT_FALSE(float_reg_type.IsInteger()); 302 EXPECT_FALSE(float_reg_type.IsLong()); 303 EXPECT_TRUE(float_reg_type.IsFloat()); 304 EXPECT_FALSE(float_reg_type.IsDouble()); 305 EXPECT_FALSE(float_reg_type.IsReference()); 306 EXPECT_FALSE(float_reg_type.IsLowHalf()); 307 EXPECT_FALSE(float_reg_type.IsHighHalf()); 308 EXPECT_FALSE(float_reg_type.IsLongOrDoubleTypes()); 309 EXPECT_FALSE(float_reg_type.IsReferenceTypes()); 310 EXPECT_TRUE(float_reg_type.IsCategory1Types()); 311 EXPECT_FALSE(float_reg_type.IsCategory2Types()); 312 EXPECT_FALSE(float_reg_type.IsBooleanTypes()); 313 EXPECT_FALSE(float_reg_type.IsByteTypes()); 314 EXPECT_FALSE(float_reg_type.IsShortTypes()); 315 EXPECT_FALSE(float_reg_type.IsCharTypes()); 316 EXPECT_FALSE(float_reg_type.IsIntegralTypes()); 317 EXPECT_TRUE(float_reg_type.IsFloatTypes()); 318 EXPECT_FALSE(float_reg_type.IsLongTypes()); 319 EXPECT_FALSE(float_reg_type.IsDoubleTypes()); 320 EXPECT_FALSE(float_reg_type.IsArrayIndexTypes()); 321 EXPECT_FALSE(float_reg_type.IsNonZeroReferenceTypes()); 322 EXPECT_TRUE(float_reg_type.HasClass()); 323 324 const RegType& double_reg_type = cache.DoubleLo(); 325 EXPECT_FALSE(double_reg_type.IsUndefined()); 326 EXPECT_FALSE(double_reg_type.IsConflict()); 327 EXPECT_FALSE(double_reg_type.IsZero()); 328 EXPECT_FALSE(double_reg_type.IsOne()); 329 EXPECT_FALSE(double_reg_type.IsLongConstant()); 330 EXPECT_FALSE(double_reg_type.IsBoolean()); 331 EXPECT_FALSE(double_reg_type.IsByte()); 332 EXPECT_FALSE(double_reg_type.IsChar()); 333 EXPECT_FALSE(double_reg_type.IsShort()); 334 EXPECT_FALSE(double_reg_type.IsInteger()); 335 EXPECT_FALSE(double_reg_type.IsLong()); 336 EXPECT_FALSE(double_reg_type.IsFloat()); 337 EXPECT_TRUE(double_reg_type.IsDouble()); 338 EXPECT_FALSE(double_reg_type.IsReference()); 339 EXPECT_TRUE(double_reg_type.IsLowHalf()); 340 EXPECT_FALSE(double_reg_type.IsHighHalf()); 341 EXPECT_TRUE(double_reg_type.IsLongOrDoubleTypes()); 342 EXPECT_FALSE(double_reg_type.IsReferenceTypes()); 343 EXPECT_FALSE(double_reg_type.IsCategory1Types()); 344 EXPECT_TRUE(double_reg_type.IsCategory2Types()); 345 EXPECT_FALSE(double_reg_type.IsBooleanTypes()); 346 EXPECT_FALSE(double_reg_type.IsByteTypes()); 347 EXPECT_FALSE(double_reg_type.IsShortTypes()); 348 EXPECT_FALSE(double_reg_type.IsCharTypes()); 349 EXPECT_FALSE(double_reg_type.IsIntegralTypes()); 350 EXPECT_FALSE(double_reg_type.IsFloatTypes()); 351 EXPECT_FALSE(double_reg_type.IsLongTypes()); 352 EXPECT_TRUE(double_reg_type.IsDoubleTypes()); 353 EXPECT_FALSE(double_reg_type.IsArrayIndexTypes()); 354 EXPECT_FALSE(double_reg_type.IsNonZeroReferenceTypes()); 355 EXPECT_TRUE(double_reg_type.HasClass()); 356 } 357 358 class RegTypeReferenceTest : public CommonRuntimeTest {}; 359 360 TEST_F(RegTypeReferenceTest, JavalangObjectImprecise) { 361 // Tests matching precisions. A reference type that was created precise doesn't 362 // match the one that is imprecise. 363 ArenaStack stack(Runtime::Current()->GetArenaPool()); 364 ScopedArenaAllocator allocator(&stack); 365 ScopedObjectAccess soa(Thread::Current()); 366 RegTypeCache cache(true, allocator); 367 const RegType& imprecise_obj = cache.JavaLangObject(false); 368 const RegType& precise_obj = cache.JavaLangObject(true); 369 const RegType& precise_obj_2 = cache.FromDescriptor(nullptr, "Ljava/lang/Object;", true); 370 371 EXPECT_TRUE(precise_obj.Equals(precise_obj_2)); 372 EXPECT_FALSE(imprecise_obj.Equals(precise_obj)); 373 EXPECT_FALSE(imprecise_obj.Equals(precise_obj)); 374 EXPECT_FALSE(imprecise_obj.Equals(precise_obj_2)); 375 } 376 377 TEST_F(RegTypeReferenceTest, UnresolvedType) { 378 // Tests creating unresolved types. Miss for the first time asking the cache and 379 // a hit second time. 380 ArenaStack stack(Runtime::Current()->GetArenaPool()); 381 ScopedArenaAllocator allocator(&stack); 382 ScopedObjectAccess soa(Thread::Current()); 383 RegTypeCache cache(true, allocator); 384 const RegType& ref_type_0 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true); 385 EXPECT_TRUE(ref_type_0.IsUnresolvedReference()); 386 EXPECT_TRUE(ref_type_0.IsNonZeroReferenceTypes()); 387 388 const RegType& ref_type_1 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true); 389 EXPECT_TRUE(ref_type_0.Equals(ref_type_1)); 390 391 const RegType& unresolved_super_class = cache.FromUnresolvedSuperClass(ref_type_0); 392 EXPECT_TRUE(unresolved_super_class.IsUnresolvedSuperClass()); 393 EXPECT_TRUE(unresolved_super_class.IsNonZeroReferenceTypes()); 394 } 395 396 TEST_F(RegTypeReferenceTest, UnresolvedUnintializedType) { 397 // Tests creating types uninitialized types from unresolved types. 398 ArenaStack stack(Runtime::Current()->GetArenaPool()); 399 ScopedArenaAllocator allocator(&stack); 400 ScopedObjectAccess soa(Thread::Current()); 401 RegTypeCache cache(true, allocator); 402 const RegType& ref_type_0 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true); 403 EXPECT_TRUE(ref_type_0.IsUnresolvedReference()); 404 const RegType& ref_type = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true); 405 EXPECT_TRUE(ref_type_0.Equals(ref_type)); 406 // Create an uninitialized type of this unresolved type 407 const RegType& unresolved_unintialised = cache.Uninitialized(ref_type, 1101ull); 408 EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference()); 409 EXPECT_TRUE(unresolved_unintialised.IsUninitializedTypes()); 410 EXPECT_TRUE(unresolved_unintialised.IsNonZeroReferenceTypes()); 411 // Create an uninitialized type of this unresolved type with different PC 412 const RegType& ref_type_unresolved_unintialised_1 = cache.Uninitialized(ref_type, 1102ull); 413 EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference()); 414 EXPECT_FALSE(unresolved_unintialised.Equals(ref_type_unresolved_unintialised_1)); 415 // Create an uninitialized type of this unresolved type with the same PC 416 const RegType& unresolved_unintialised_2 = cache.Uninitialized(ref_type, 1101ull); 417 EXPECT_TRUE(unresolved_unintialised.Equals(unresolved_unintialised_2)); 418 } 419 420 TEST_F(RegTypeReferenceTest, Dump) { 421 // Tests types for proper Dump messages. 422 ArenaStack stack(Runtime::Current()->GetArenaPool()); 423 ScopedArenaAllocator allocator(&stack); 424 ScopedObjectAccess soa(Thread::Current()); 425 RegTypeCache cache(true, allocator); 426 const RegType& unresolved_ref = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true); 427 const RegType& unresolved_ref_another = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExistEither;", true); 428 const RegType& resolved_ref = cache.JavaLangString(); 429 const RegType& resolved_unintialiesd = cache.Uninitialized(resolved_ref, 10); 430 const RegType& unresolved_unintialized = cache.Uninitialized(unresolved_ref, 12); 431 const RegType& unresolved_merged = cache.FromUnresolvedMerge( 432 unresolved_ref, unresolved_ref_another, /* verifier */ nullptr); 433 434 std::string expected = "Unresolved Reference: java.lang.DoesNotExist"; 435 EXPECT_EQ(expected, unresolved_ref.Dump()); 436 expected = "Precise Reference: java.lang.String"; 437 EXPECT_EQ(expected, resolved_ref.Dump()); 438 expected ="Uninitialized Reference: java.lang.String Allocation PC: 10"; 439 EXPECT_EQ(expected, resolved_unintialiesd.Dump()); 440 expected = "Unresolved And Uninitialized Reference: java.lang.DoesNotExist Allocation PC: 12"; 441 EXPECT_EQ(expected, unresolved_unintialized.Dump()); 442 expected = "UnresolvedMergedReferences(Zero/null | Unresolved Reference: java.lang.DoesNotExist, Unresolved Reference: java.lang.DoesNotExistEither)"; 443 EXPECT_EQ(expected, unresolved_merged.Dump()); 444 } 445 446 TEST_F(RegTypeReferenceTest, JavalangString) { 447 // Add a class to the cache then look for the same class and make sure it is a 448 // Hit the second time. Then check for the same effect when using 449 // The JavaLangObject method instead of FromDescriptor. String class is final. 450 ArenaStack stack(Runtime::Current()->GetArenaPool()); 451 ScopedArenaAllocator allocator(&stack); 452 ScopedObjectAccess soa(Thread::Current()); 453 RegTypeCache cache(true, allocator); 454 const RegType& ref_type = cache.JavaLangString(); 455 const RegType& ref_type_2 = cache.JavaLangString(); 456 const RegType& ref_type_3 = cache.FromDescriptor(nullptr, "Ljava/lang/String;", true); 457 458 EXPECT_TRUE(ref_type.Equals(ref_type_2)); 459 EXPECT_TRUE(ref_type_2.Equals(ref_type_3)); 460 EXPECT_TRUE(ref_type.IsPreciseReference()); 461 462 // Create an uninitialized type out of this: 463 const RegType& ref_type_unintialized = cache.Uninitialized(ref_type, 0110ull); 464 EXPECT_TRUE(ref_type_unintialized.IsUninitializedReference()); 465 EXPECT_FALSE(ref_type_unintialized.IsUnresolvedAndUninitializedReference()); 466 } 467 468 TEST_F(RegTypeReferenceTest, JavalangObject) { 469 // Add a class to the cache then look for the same class and make sure it is a 470 // Hit the second time. Then I am checking for the same effect when using 471 // The JavaLangObject method instead of FromDescriptor. Object Class in not final. 472 ArenaStack stack(Runtime::Current()->GetArenaPool()); 473 ScopedArenaAllocator allocator(&stack); 474 ScopedObjectAccess soa(Thread::Current()); 475 RegTypeCache cache(true, allocator); 476 const RegType& ref_type = cache.JavaLangObject(true); 477 const RegType& ref_type_2 = cache.JavaLangObject(true); 478 const RegType& ref_type_3 = cache.FromDescriptor(nullptr, "Ljava/lang/Object;", true); 479 480 EXPECT_TRUE(ref_type.Equals(ref_type_2)); 481 EXPECT_TRUE(ref_type_3.Equals(ref_type_2)); 482 EXPECT_EQ(ref_type.GetId(), ref_type_3.GetId()); 483 } 484 TEST_F(RegTypeReferenceTest, Merging) { 485 // Tests merging logic 486 // String and object , LUB is object. 487 ScopedObjectAccess soa(Thread::Current()); 488 ArenaStack stack(Runtime::Current()->GetArenaPool()); 489 ScopedArenaAllocator allocator(&stack); 490 RegTypeCache cache_new(true, allocator); 491 const RegType& string = cache_new.JavaLangString(); 492 const RegType& Object = cache_new.JavaLangObject(true); 493 EXPECT_TRUE(string.Merge(Object, &cache_new, /* verifier */ nullptr).IsJavaLangObject()); 494 // Merge two unresolved types. 495 const RegType& ref_type_0 = cache_new.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true); 496 EXPECT_TRUE(ref_type_0.IsUnresolvedReference()); 497 const RegType& ref_type_1 = cache_new.FromDescriptor(nullptr, "Ljava/lang/DoesNotExistToo;", true); 498 EXPECT_FALSE(ref_type_0.Equals(ref_type_1)); 499 500 const RegType& merged = ref_type_1.Merge(ref_type_0, &cache_new, /* verifier */ nullptr); 501 EXPECT_TRUE(merged.IsUnresolvedMergedReference()); 502 RegType& merged_nonconst = const_cast<RegType&>(merged); 503 504 const BitVector& unresolved_parts = 505 down_cast<UnresolvedMergedType*>(&merged_nonconst)->GetUnresolvedTypes(); 506 EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_0.GetId())); 507 EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_1.GetId())); 508 } 509 510 TEST_F(RegTypeTest, MergingFloat) { 511 // Testing merging logic with float and float constants. 512 ArenaStack stack(Runtime::Current()->GetArenaPool()); 513 ScopedArenaAllocator allocator(&stack); 514 ScopedObjectAccess soa(Thread::Current()); 515 RegTypeCache cache_new(true, allocator); 516 517 constexpr int32_t kTestConstantValue = 10; 518 const RegType& float_type = cache_new.Float(); 519 const RegType& precise_cst = cache_new.FromCat1Const(kTestConstantValue, true); 520 const RegType& imprecise_cst = cache_new.FromCat1Const(kTestConstantValue, false); 521 { 522 // float MERGE precise cst => float. 523 const RegType& merged = float_type.Merge(precise_cst, &cache_new, /* verifier */ nullptr); 524 EXPECT_TRUE(merged.IsFloat()); 525 } 526 { 527 // precise cst MERGE float => float. 528 const RegType& merged = precise_cst.Merge(float_type, &cache_new, /* verifier */ nullptr); 529 EXPECT_TRUE(merged.IsFloat()); 530 } 531 { 532 // float MERGE imprecise cst => float. 533 const RegType& merged = float_type.Merge(imprecise_cst, &cache_new, /* verifier */ nullptr); 534 EXPECT_TRUE(merged.IsFloat()); 535 } 536 { 537 // imprecise cst MERGE float => float. 538 const RegType& merged = imprecise_cst.Merge(float_type, &cache_new, /* verifier */ nullptr); 539 EXPECT_TRUE(merged.IsFloat()); 540 } 541 } 542 543 TEST_F(RegTypeTest, MergingLong) { 544 // Testing merging logic with long and long constants. 545 ArenaStack stack(Runtime::Current()->GetArenaPool()); 546 ScopedArenaAllocator allocator(&stack); 547 ScopedObjectAccess soa(Thread::Current()); 548 RegTypeCache cache_new(true, allocator); 549 550 constexpr int32_t kTestConstantValue = 10; 551 const RegType& long_lo_type = cache_new.LongLo(); 552 const RegType& long_hi_type = cache_new.LongHi(); 553 const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true); 554 const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false); 555 const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true); 556 const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false); 557 { 558 // lo MERGE precise cst lo => lo. 559 const RegType& merged = long_lo_type.Merge(precise_cst_lo, &cache_new, /* verifier */ nullptr); 560 EXPECT_TRUE(merged.IsLongLo()); 561 } 562 { 563 // precise cst lo MERGE lo => lo. 564 const RegType& merged = precise_cst_lo.Merge(long_lo_type, &cache_new, /* verifier */ nullptr); 565 EXPECT_TRUE(merged.IsLongLo()); 566 } 567 { 568 // lo MERGE imprecise cst lo => lo. 569 const RegType& merged = long_lo_type.Merge( 570 imprecise_cst_lo, &cache_new, /* verifier */ nullptr); 571 EXPECT_TRUE(merged.IsLongLo()); 572 } 573 { 574 // imprecise cst lo MERGE lo => lo. 575 const RegType& merged = imprecise_cst_lo.Merge( 576 long_lo_type, &cache_new, /* verifier */ nullptr); 577 EXPECT_TRUE(merged.IsLongLo()); 578 } 579 { 580 // hi MERGE precise cst hi => hi. 581 const RegType& merged = long_hi_type.Merge(precise_cst_hi, &cache_new, /* verifier */ nullptr); 582 EXPECT_TRUE(merged.IsLongHi()); 583 } 584 { 585 // precise cst hi MERGE hi => hi. 586 const RegType& merged = precise_cst_hi.Merge(long_hi_type, &cache_new, /* verifier */ nullptr); 587 EXPECT_TRUE(merged.IsLongHi()); 588 } 589 { 590 // hi MERGE imprecise cst hi => hi. 591 const RegType& merged = long_hi_type.Merge( 592 imprecise_cst_hi, &cache_new, /* verifier */ nullptr); 593 EXPECT_TRUE(merged.IsLongHi()); 594 } 595 { 596 // imprecise cst hi MERGE hi => hi. 597 const RegType& merged = imprecise_cst_hi.Merge( 598 long_hi_type, &cache_new, /* verifier */ nullptr); 599 EXPECT_TRUE(merged.IsLongHi()); 600 } 601 } 602 603 TEST_F(RegTypeTest, MergingDouble) { 604 // Testing merging logic with double and double constants. 605 ArenaStack stack(Runtime::Current()->GetArenaPool()); 606 ScopedArenaAllocator allocator(&stack); 607 ScopedObjectAccess soa(Thread::Current()); 608 RegTypeCache cache_new(true, allocator); 609 610 constexpr int32_t kTestConstantValue = 10; 611 const RegType& double_lo_type = cache_new.DoubleLo(); 612 const RegType& double_hi_type = cache_new.DoubleHi(); 613 const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true); 614 const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false); 615 const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true); 616 const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false); 617 { 618 // lo MERGE precise cst lo => lo. 619 const RegType& merged = double_lo_type.Merge( 620 precise_cst_lo, &cache_new, /* verifier */ nullptr); 621 EXPECT_TRUE(merged.IsDoubleLo()); 622 } 623 { 624 // precise cst lo MERGE lo => lo. 625 const RegType& merged = precise_cst_lo.Merge( 626 double_lo_type, &cache_new, /* verifier */ nullptr); 627 EXPECT_TRUE(merged.IsDoubleLo()); 628 } 629 { 630 // lo MERGE imprecise cst lo => lo. 631 const RegType& merged = double_lo_type.Merge( 632 imprecise_cst_lo, &cache_new, /* verifier */ nullptr); 633 EXPECT_TRUE(merged.IsDoubleLo()); 634 } 635 { 636 // imprecise cst lo MERGE lo => lo. 637 const RegType& merged = imprecise_cst_lo.Merge( 638 double_lo_type, &cache_new, /* verifier */ nullptr); 639 EXPECT_TRUE(merged.IsDoubleLo()); 640 } 641 { 642 // hi MERGE precise cst hi => hi. 643 const RegType& merged = double_hi_type.Merge( 644 precise_cst_hi, &cache_new, /* verifier */ nullptr); 645 EXPECT_TRUE(merged.IsDoubleHi()); 646 } 647 { 648 // precise cst hi MERGE hi => hi. 649 const RegType& merged = precise_cst_hi.Merge( 650 double_hi_type, &cache_new, /* verifier */ nullptr); 651 EXPECT_TRUE(merged.IsDoubleHi()); 652 } 653 { 654 // hi MERGE imprecise cst hi => hi. 655 const RegType& merged = double_hi_type.Merge( 656 imprecise_cst_hi, &cache_new, /* verifier */ nullptr); 657 EXPECT_TRUE(merged.IsDoubleHi()); 658 } 659 { 660 // imprecise cst hi MERGE hi => hi. 661 const RegType& merged = imprecise_cst_hi.Merge( 662 double_hi_type, &cache_new, /* verifier */ nullptr); 663 EXPECT_TRUE(merged.IsDoubleHi()); 664 } 665 } 666 667 TEST_F(RegTypeTest, MergeSemiLatticeRef) { 668 // (Incomplete) semilattice: 669 // 670 // Excluded for now: * category-2 types 671 // * interfaces 672 // * all of category-1 primitive types, including constants. 673 // This is to demonstrate/codify the reference side, mostly. 674 // 675 // Note: It is not a real semilattice because int = float makes this wonky. :-( 676 // 677 // Conflict 678 // | 679 // #---------#--------------------------#-----------------------------# 680 // | | | 681 // | | Object 682 // | | | 683 // int uninit types #---------------#--------#------------------#---------# 684 // | | | | | | 685 // | unresolved-merge-types | Object[] char[] byte[] 686 // | | | | | | | | 687 // | unresolved-types | #------Number #---------# | | 688 // | | | | | | | | 689 // | | #--------Integer Number[] Number[][] | | 690 // | | | | | | | 691 // | #---------------#--------#---------#--------#---------# 692 // | | 693 // | null 694 // | | 695 // #--------------------------#----------------------------# 696 // | 697 // 0 698 699 ArenaStack stack(Runtime::Current()->GetArenaPool()); 700 ScopedArenaAllocator allocator(&stack); 701 ScopedObjectAccess soa(Thread::Current()); 702 703 // We cannot allow moving GC. Otherwise we'd have to ensure the reg types are updated (reference 704 // reg types store a class pointer in a GCRoot, which is normally updated through active verifiers 705 // being registered with their thread), which is unnecessarily complex. 706 Runtime::Current()->GetHeap()->IncrementDisableMovingGC(soa.Self()); 707 708 RegTypeCache cache(true, allocator); 709 710 const RegType& conflict = cache.Conflict(); 711 const RegType& zero = cache.Zero(); 712 const RegType& null = cache.Null(); 713 const RegType& int_type = cache.Integer(); 714 715 const RegType& obj = cache.JavaLangObject(false); 716 const RegType& obj_arr = cache.From(nullptr, "[Ljava/lang/Object;", false); 717 ASSERT_FALSE(obj_arr.IsUnresolvedReference()); 718 719 const RegType& unresolved_a = cache.From(nullptr, "Ldoes/not/resolve/A;", false); 720 ASSERT_TRUE(unresolved_a.IsUnresolvedReference()); 721 const RegType& unresolved_b = cache.From(nullptr, "Ldoes/not/resolve/B;", false); 722 ASSERT_TRUE(unresolved_b.IsUnresolvedReference()); 723 const RegType& unresolved_ab = cache.FromUnresolvedMerge(unresolved_a, unresolved_b, nullptr); 724 ASSERT_TRUE(unresolved_ab.IsUnresolvedMergedReference()); 725 726 const RegType& uninit_this = cache.UninitializedThisArgument(obj); 727 const RegType& uninit_obj_0 = cache.Uninitialized(obj, 0u); 728 const RegType& uninit_obj_1 = cache.Uninitialized(obj, 1u); 729 730 const RegType& uninit_unres_this = cache.UninitializedThisArgument(unresolved_a); 731 const RegType& uninit_unres_a_0 = cache.Uninitialized(unresolved_a, 0); 732 const RegType& uninit_unres_b_0 = cache.Uninitialized(unresolved_b, 0); 733 734 const RegType& number = cache.From(nullptr, "Ljava/lang/Number;", false); 735 ASSERT_FALSE(number.IsUnresolvedReference()); 736 const RegType& integer = cache.From(nullptr, "Ljava/lang/Integer;", false); 737 ASSERT_FALSE(integer.IsUnresolvedReference()); 738 739 const RegType& uninit_number_0 = cache.Uninitialized(number, 0u); 740 const RegType& uninit_integer_0 = cache.Uninitialized(integer, 0u); 741 742 const RegType& number_arr = cache.From(nullptr, "[Ljava/lang/Number;", false); 743 ASSERT_FALSE(number_arr.IsUnresolvedReference()); 744 const RegType& integer_arr = cache.From(nullptr, "[Ljava/lang/Integer;", false); 745 ASSERT_FALSE(integer_arr.IsUnresolvedReference()); 746 747 const RegType& number_arr_arr = cache.From(nullptr, "[[Ljava/lang/Number;", false); 748 ASSERT_FALSE(number_arr_arr.IsUnresolvedReference()); 749 750 const RegType& char_arr = cache.From(nullptr, "[C", false); 751 ASSERT_FALSE(char_arr.IsUnresolvedReference()); 752 const RegType& byte_arr = cache.From(nullptr, "[B", false); 753 ASSERT_FALSE(byte_arr.IsUnresolvedReference()); 754 755 const RegType& unresolved_a_num = cache.FromUnresolvedMerge(unresolved_a, number, nullptr); 756 ASSERT_TRUE(unresolved_a_num.IsUnresolvedMergedReference()); 757 const RegType& unresolved_b_num = cache.FromUnresolvedMerge(unresolved_b, number, nullptr); 758 ASSERT_TRUE(unresolved_b_num.IsUnresolvedMergedReference()); 759 const RegType& unresolved_ab_num = cache.FromUnresolvedMerge(unresolved_ab, number, nullptr); 760 ASSERT_TRUE(unresolved_ab_num.IsUnresolvedMergedReference()); 761 762 const RegType& unresolved_a_int = cache.FromUnresolvedMerge(unresolved_a, integer, nullptr); 763 ASSERT_TRUE(unresolved_a_int.IsUnresolvedMergedReference()); 764 const RegType& unresolved_b_int = cache.FromUnresolvedMerge(unresolved_b, integer, nullptr); 765 ASSERT_TRUE(unresolved_b_int.IsUnresolvedMergedReference()); 766 const RegType& unresolved_ab_int = cache.FromUnresolvedMerge(unresolved_ab, integer, nullptr); 767 ASSERT_TRUE(unresolved_ab_int.IsUnresolvedMergedReference()); 768 std::vector<const RegType*> uninitialized_types = { 769 &uninit_this, &uninit_obj_0, &uninit_obj_1, &uninit_number_0, &uninit_integer_0 770 }; 771 std::vector<const RegType*> unresolved_types = { 772 &unresolved_a, 773 &unresolved_b, 774 &unresolved_ab, 775 &unresolved_a_num, 776 &unresolved_b_num, 777 &unresolved_ab_num, 778 &unresolved_a_int, 779 &unresolved_b_int, 780 &unresolved_ab_int 781 }; 782 std::vector<const RegType*> uninit_unresolved_types = { 783 &uninit_unres_this, &uninit_unres_a_0, &uninit_unres_b_0 784 }; 785 std::vector<const RegType*> plain_nonobj_classes = { &number, &integer }; 786 std::vector<const RegType*> plain_nonobj_arr_classes = { 787 &number_arr, 788 &number_arr_arr, 789 &integer_arr, 790 &char_arr, 791 }; 792 // std::vector<const RegType*> others = { &conflict, &zero, &null, &obj, &int_type }; 793 794 std::vector<const RegType*> all_minus_uninit_conflict; 795 all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(), 796 unresolved_types.begin(), 797 unresolved_types.end()); 798 all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(), 799 plain_nonobj_classes.begin(), 800 plain_nonobj_classes.end()); 801 all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(), 802 plain_nonobj_arr_classes.begin(), 803 plain_nonobj_arr_classes.end()); 804 all_minus_uninit_conflict.push_back(&zero); 805 all_minus_uninit_conflict.push_back(&null); 806 all_minus_uninit_conflict.push_back(&obj); 807 808 std::vector<const RegType*> all_minus_uninit; 809 all_minus_uninit.insert(all_minus_uninit.end(), 810 all_minus_uninit_conflict.begin(), 811 all_minus_uninit_conflict.end()); 812 all_minus_uninit.push_back(&conflict); 813 814 815 std::vector<const RegType*> all; 816 all.insert(all.end(), uninitialized_types.begin(), uninitialized_types.end()); 817 all.insert(all.end(), uninit_unresolved_types.begin(), uninit_unresolved_types.end()); 818 all.insert(all.end(), all_minus_uninit.begin(), all_minus_uninit.end()); 819 all.push_back(&int_type); 820 821 auto check = [&](const RegType& in1, const RegType& in2, const RegType& expected_out) 822 REQUIRES_SHARED(Locks::mutator_lock_) { 823 const RegType& merge_result = in1.SafeMerge(in2, &cache, nullptr); 824 EXPECT_EQ(&expected_out, &merge_result) 825 << in1.Dump() << " x " << in2.Dump() << " = " << merge_result.Dump() 826 << " != " << expected_out.Dump(); 827 }; 828 829 // Identity. 830 { 831 for (auto r : all) { 832 check(*r, *r, *r); 833 } 834 } 835 836 // Define a covering relation through a list of Edges. We'll then derive LUBs from this and 837 // create checks for every pair of types. 838 839 struct Edge { 840 const RegType& from; 841 const RegType& to; 842 843 Edge(const RegType& from_, const RegType& to_) : from(from_), to(to_) {} 844 }; 845 std::vector<Edge> edges; 846 #define ADD_EDGE(from, to) edges.emplace_back((from), (to)) 847 848 // To Conflict. 849 { 850 for (auto r : uninitialized_types) { 851 ADD_EDGE(*r, conflict); 852 } 853 for (auto r : uninit_unresolved_types) { 854 ADD_EDGE(*r, conflict); 855 } 856 ADD_EDGE(obj, conflict); 857 ADD_EDGE(int_type, conflict); 858 } 859 860 ADD_EDGE(zero, null); 861 862 // Unresolved. 863 { 864 ADD_EDGE(null, unresolved_a); 865 ADD_EDGE(null, unresolved_b); 866 ADD_EDGE(unresolved_a, unresolved_ab); 867 ADD_EDGE(unresolved_b, unresolved_ab); 868 869 ADD_EDGE(number, unresolved_a_num); 870 ADD_EDGE(unresolved_a, unresolved_a_num); 871 ADD_EDGE(number, unresolved_b_num); 872 ADD_EDGE(unresolved_b, unresolved_b_num); 873 ADD_EDGE(number, unresolved_ab_num); 874 ADD_EDGE(unresolved_a_num, unresolved_ab_num); 875 ADD_EDGE(unresolved_b_num, unresolved_ab_num); 876 ADD_EDGE(unresolved_ab, unresolved_ab_num); 877 878 ADD_EDGE(integer, unresolved_a_int); 879 ADD_EDGE(unresolved_a, unresolved_a_int); 880 ADD_EDGE(integer, unresolved_b_int); 881 ADD_EDGE(unresolved_b, unresolved_b_int); 882 ADD_EDGE(integer, unresolved_ab_int); 883 ADD_EDGE(unresolved_a_int, unresolved_ab_int); 884 ADD_EDGE(unresolved_b_int, unresolved_ab_int); 885 ADD_EDGE(unresolved_ab, unresolved_ab_int); 886 887 ADD_EDGE(unresolved_a_int, unresolved_a_num); 888 ADD_EDGE(unresolved_b_int, unresolved_b_num); 889 ADD_EDGE(unresolved_ab_int, unresolved_ab_num); 890 891 ADD_EDGE(unresolved_ab_num, obj); 892 } 893 894 // Classes. 895 { 896 ADD_EDGE(null, integer); 897 ADD_EDGE(integer, number); 898 ADD_EDGE(number, obj); 899 } 900 901 // Arrays. 902 { 903 ADD_EDGE(integer_arr, number_arr); 904 ADD_EDGE(number_arr, obj_arr); 905 ADD_EDGE(obj_arr, obj); 906 ADD_EDGE(number_arr_arr, obj_arr); 907 908 ADD_EDGE(char_arr, obj); 909 ADD_EDGE(byte_arr, obj); 910 911 ADD_EDGE(null, integer_arr); 912 ADD_EDGE(null, number_arr_arr); 913 ADD_EDGE(null, char_arr); 914 ADD_EDGE(null, byte_arr); 915 } 916 917 // Primitive. 918 { 919 ADD_EDGE(zero, int_type); 920 } 921 #undef ADD_EDGE 922 923 // Create merge triples by using the covering relation established by edges to derive the 924 // expected merge for any pair of types. 925 926 // Expect merge(in1, in2) == out. 927 struct MergeExpectation { 928 const RegType& in1; 929 const RegType& in2; 930 const RegType& out; 931 932 MergeExpectation(const RegType& in1_, const RegType& in2_, const RegType& out_) 933 : in1(in1_), in2(in2_), out(out_) {} 934 }; 935 std::vector<MergeExpectation> expectations; 936 937 for (auto r1 : all) { 938 for (auto r2 : all) { 939 if (r1 == r2) { 940 continue; 941 } 942 943 // Very simple algorithm here that is usually used with adjacency lists. Our graph is 944 // small, it didn't make sense to have lists per node. Thus, the regular guarantees 945 // of O(n + |e|) don't apply, but that is acceptable. 946 // 947 // To compute r1 lub r2 = merge(r1, r2): 948 // 1) Generate the reachable set of r1, name it grey. 949 // 2) Mark all grey reachable nodes of r2 as black. 950 // 3) Find black nodes with no in-edges from other black nodes. 951 // 4) If |3)| == 1, that's the lub. 952 953 // Generic BFS of the graph induced by edges, starting at start. new_node will be called 954 // with any discovered node, in order. 955 auto bfs = [&](auto new_node, const RegType* start) { 956 std::unordered_set<const RegType*> seen; 957 std::queue<const RegType*> work_list; 958 work_list.push(start); 959 while (!work_list.empty()) { 960 const RegType* cur = work_list.front(); 961 work_list.pop(); 962 auto it = seen.find(cur); 963 if (it != seen.end()) { 964 continue; 965 } 966 seen.insert(cur); 967 new_node(cur); 968 969 for (const Edge& edge : edges) { 970 if (&edge.from == cur) { 971 work_list.push(&edge.to); 972 } 973 } 974 } 975 }; 976 977 std::unordered_set<const RegType*> grey; 978 auto compute_grey = [&](const RegType* cur) { 979 grey.insert(cur); // Mark discovered node as grey. 980 }; 981 bfs(compute_grey, r1); 982 983 std::set<const RegType*> black; 984 auto compute_black = [&](const RegType* cur) { 985 // Mark discovered grey node as black. 986 if (grey.find(cur) != grey.end()) { 987 black.insert(cur); 988 } 989 }; 990 bfs(compute_black, r2); 991 992 std::set<const RegType*> no_in_edge(black); // Copy of black, remove nodes with in-edges. 993 for (auto r : black) { 994 for (Edge& e : edges) { 995 if (&e.from == r) { 996 no_in_edge.erase(&e.to); // It doesn't matter whether "to" is black or not, just 997 // attempt to remove it. 998 } 999 } 1000 } 1001 1002 // Helper to print sets when something went wrong. 1003 auto print_set = [](auto& container) REQUIRES_SHARED(Locks::mutator_lock_) { 1004 std::string result; 1005 for (auto r : container) { 1006 result.append(" + "); 1007 result.append(r->Dump()); 1008 } 1009 return result; 1010 }; 1011 ASSERT_EQ(no_in_edge.size(), 1u) << r1->Dump() << " u " << r2->Dump() 1012 << " grey=" << print_set(grey) 1013 << " black=" << print_set(black) 1014 << " no-in-edge=" << print_set(no_in_edge); 1015 expectations.emplace_back(*r1, *r2, **no_in_edge.begin()); 1016 } 1017 } 1018 1019 // Evaluate merge expectations. The merge is expected to be commutative. 1020 1021 for (auto& triple : expectations) { 1022 check(triple.in1, triple.in2, triple.out); 1023 check(triple.in2, triple.in1, triple.out); 1024 } 1025 1026 Runtime::Current()->GetHeap()->DecrementDisableMovingGC(soa.Self()); 1027 } 1028 1029 TEST_F(RegTypeTest, ConstPrecision) { 1030 // Tests creating primitive types types. 1031 ArenaStack stack(Runtime::Current()->GetArenaPool()); 1032 ScopedArenaAllocator allocator(&stack); 1033 ScopedObjectAccess soa(Thread::Current()); 1034 RegTypeCache cache_new(true, allocator); 1035 const RegType& imprecise_const = cache_new.FromCat1Const(10, false); 1036 const RegType& precise_const = cache_new.FromCat1Const(10, true); 1037 1038 EXPECT_TRUE(imprecise_const.IsImpreciseConstant()); 1039 EXPECT_TRUE(precise_const.IsPreciseConstant()); 1040 EXPECT_FALSE(imprecise_const.Equals(precise_const)); 1041 } 1042 1043 class RegTypeOOMTest : public RegTypeTest { 1044 protected: 1045 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE { 1046 SetUpRuntimeOptionsForFillHeap(options); 1047 1048 // We must not appear to be a compiler, or we'll abort on the host. 1049 callbacks_.reset(); 1050 } 1051 }; 1052 1053 TEST_F(RegTypeOOMTest, ClassJoinOOM) { 1054 // TODO: Figure out why FillHeap isn't good enough under CMS. 1055 TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS(); 1056 1057 // Tests that we don't abort with OOMs. 1058 1059 ArenaStack stack(Runtime::Current()->GetArenaPool()); 1060 ScopedArenaAllocator allocator(&stack); 1061 ScopedObjectAccess soa(Thread::Current()); 1062 1063 // We cannot allow moving GC. Otherwise we'd have to ensure the reg types are updated (reference 1064 // reg types store a class pointer in a GCRoot, which is normally updated through active verifiers 1065 // being registered with their thread), which is unnecessarily complex. 1066 Runtime::Current()->GetHeap()->IncrementDisableMovingGC(soa.Self()); 1067 1068 // We merge nested array of primitive wrappers. These have a join type of an array of Number of 1069 // the same depth. We start with depth five, as we want at least two newly created classes to 1070 // test recursion (it's just more likely that nobody uses such deep arrays in runtime bringup). 1071 constexpr const char* kIntArrayFive = "[[[[[Ljava/lang/Integer;"; 1072 constexpr const char* kFloatArrayFive = "[[[[[Ljava/lang/Float;"; 1073 constexpr const char* kNumberArrayFour = "[[[[Ljava/lang/Number;"; 1074 constexpr const char* kNumberArrayFive = "[[[[[Ljava/lang/Number;"; 1075 1076 RegTypeCache cache(true, allocator); 1077 const RegType& int_array_array = cache.From(nullptr, kIntArrayFive, false); 1078 ASSERT_TRUE(int_array_array.HasClass()); 1079 const RegType& float_array_array = cache.From(nullptr, kFloatArrayFive, false); 1080 ASSERT_TRUE(float_array_array.HasClass()); 1081 1082 // Check assumptions: the joined classes don't exist, yet. 1083 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1084 ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFour, nullptr) == nullptr); 1085 ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFive, nullptr) == nullptr); 1086 1087 // Fill the heap. 1088 VariableSizedHandleScope hs(soa.Self()); 1089 FillHeap(soa.Self(), class_linker, &hs); 1090 1091 const RegType& join_type = int_array_array.Merge(float_array_array, &cache, nullptr); 1092 ASSERT_TRUE(join_type.IsUnresolvedReference()); 1093 1094 Runtime::Current()->GetHeap()->DecrementDisableMovingGC(soa.Self()); 1095 } 1096 1097 } // namespace verifier 1098 } // namespace art 1099