1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "stack_map.h" 18 19 #include "art_method.h" 20 #include "base/arena_bit_vector.h" 21 #include "base/malloc_arena_pool.h" 22 #include "stack_map_stream.h" 23 24 #include "gtest/gtest.h" 25 26 namespace art { 27 28 // Check that the stack mask of given stack map is identical 29 // to the given bit vector. Returns true if they are same. 30 static bool CheckStackMask( 31 const CodeInfo& code_info, 32 const StackMap& stack_map, 33 const BitVector& bit_vector) { 34 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map); 35 if (bit_vector.GetNumberOfBits() > stack_mask.size_in_bits()) { 36 return false; 37 } 38 for (size_t i = 0; i < stack_mask.size_in_bits(); ++i) { 39 if (stack_mask.LoadBit(i) != bit_vector.IsBitSet(i)) { 40 return false; 41 } 42 } 43 return true; 44 } 45 46 using Kind = DexRegisterLocation::Kind; 47 48 constexpr static uint32_t kPcAlign = GetInstructionSetInstructionAlignment(kRuntimeISA); 49 50 TEST(StackMapTest, Test1) { 51 MallocArenaPool pool; 52 ArenaStack arena_stack(&pool); 53 ScopedArenaAllocator allocator(&arena_stack); 54 StackMapStream stream(&allocator, kRuntimeISA); 55 stream.BeginMethod(32, 0, 0, 2); 56 57 ArenaBitVector sp_mask(&allocator, 0, false); 58 size_t number_of_dex_registers = 2; 59 stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); 60 stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. 61 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Short location. 62 stream.EndStackMapEntry(); 63 64 stream.EndMethod(); 65 ScopedArenaVector<uint8_t> memory = stream.Encode(); 66 67 CodeInfo code_info(memory.data()); 68 ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); 69 70 uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(); 71 ASSERT_EQ(2u, number_of_catalog_entries); 72 73 StackMap stack_map = code_info.GetStackMapAt(0); 74 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); 75 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign))); 76 ASSERT_EQ(0u, stack_map.GetDexPc()); 77 ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 78 ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map)); 79 80 ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask)); 81 82 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 83 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 84 ASSERT_EQ(number_of_dex_registers, dex_register_map.size()); 85 ASSERT_TRUE(dex_register_map[0].IsLive()); 86 ASSERT_TRUE(dex_register_map[1].IsLive()); 87 ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters()); 88 89 ASSERT_EQ(Kind::kInStack, dex_register_map[0].GetKind()); 90 ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind()); 91 ASSERT_EQ(0, dex_register_map[0].GetStackOffsetInBytes()); 92 ASSERT_EQ(-2, dex_register_map[1].GetConstant()); 93 94 DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0); 95 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1); 96 ASSERT_EQ(Kind::kInStack, location0.GetKind()); 97 ASSERT_EQ(Kind::kConstant, location1.GetKind()); 98 ASSERT_EQ(0, location0.GetValue()); 99 ASSERT_EQ(-2, location1.GetValue()); 100 101 ASSERT_FALSE(stack_map.HasInlineInfo()); 102 } 103 104 TEST(StackMapTest, Test2) { 105 MallocArenaPool pool; 106 ArenaStack arena_stack(&pool); 107 ScopedArenaAllocator allocator(&arena_stack); 108 StackMapStream stream(&allocator, kRuntimeISA); 109 stream.BeginMethod(32, 0, 0, 2); 110 ArtMethod art_method; 111 112 ArenaBitVector sp_mask1(&allocator, 0, true); 113 sp_mask1.SetBit(2); 114 sp_mask1.SetBit(4); 115 size_t number_of_dex_registers = 2; 116 size_t number_of_dex_registers_in_inline_info = 0; 117 stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1); 118 stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. 119 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 120 stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info); 121 stream.EndInlineInfoEntry(); 122 stream.BeginInlineInfoEntry(&art_method, 2, number_of_dex_registers_in_inline_info); 123 stream.EndInlineInfoEntry(); 124 stream.EndStackMapEntry(); 125 126 ArenaBitVector sp_mask2(&allocator, 0, true); 127 sp_mask2.SetBit(3); 128 sp_mask2.SetBit(8); 129 stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2); 130 stream.AddDexRegisterEntry(Kind::kInRegister, 18); // Short location. 131 stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location. 132 stream.EndStackMapEntry(); 133 134 ArenaBitVector sp_mask3(&allocator, 0, true); 135 sp_mask3.SetBit(1); 136 sp_mask3.SetBit(5); 137 stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3); 138 stream.AddDexRegisterEntry(Kind::kInRegister, 6); // Short location. 139 stream.AddDexRegisterEntry(Kind::kInRegisterHigh, 8); // Short location. 140 stream.EndStackMapEntry(); 141 142 ArenaBitVector sp_mask4(&allocator, 0, true); 143 sp_mask4.SetBit(6); 144 sp_mask4.SetBit(7); 145 stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4); 146 stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location, same in stack map 2. 147 stream.AddDexRegisterEntry(Kind::kInFpuRegisterHigh, 1); // Short location. 148 stream.EndStackMapEntry(); 149 150 stream.EndMethod(); 151 ScopedArenaVector<uint8_t> memory = stream.Encode(); 152 153 CodeInfo code_info(memory.data()); 154 ASSERT_EQ(4u, code_info.GetNumberOfStackMaps()); 155 156 uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(); 157 ASSERT_EQ(7u, number_of_catalog_entries); 158 159 // First stack map. 160 { 161 StackMap stack_map = code_info.GetStackMapAt(0); 162 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); 163 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign))); 164 ASSERT_EQ(0u, stack_map.GetDexPc()); 165 ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 166 ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map)); 167 168 ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1)); 169 170 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 171 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 172 ASSERT_EQ(number_of_dex_registers, dex_register_map.size()); 173 ASSERT_TRUE(dex_register_map[0].IsLive()); 174 ASSERT_TRUE(dex_register_map[1].IsLive()); 175 ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters()); 176 177 ASSERT_EQ(Kind::kInStack, dex_register_map[0].GetKind()); 178 ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind()); 179 ASSERT_EQ(0, dex_register_map[0].GetStackOffsetInBytes()); 180 ASSERT_EQ(-2, dex_register_map[1].GetConstant()); 181 182 DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0); 183 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1); 184 ASSERT_EQ(Kind::kInStack, location0.GetKind()); 185 ASSERT_EQ(Kind::kConstant, location1.GetKind()); 186 ASSERT_EQ(0, location0.GetValue()); 187 ASSERT_EQ(-2, location1.GetValue()); 188 189 ASSERT_TRUE(stack_map.HasInlineInfo()); 190 auto inline_infos = code_info.GetInlineInfosOf(stack_map); 191 ASSERT_EQ(2u, inline_infos.size()); 192 ASSERT_EQ(3u, inline_infos[0].GetDexPc()); 193 ASSERT_EQ(2u, inline_infos[1].GetDexPc()); 194 ASSERT_TRUE(inline_infos[0].EncodesArtMethod()); 195 ASSERT_TRUE(inline_infos[1].EncodesArtMethod()); 196 } 197 198 // Second stack map. 199 { 200 StackMap stack_map = code_info.GetStackMapAt(1); 201 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u))); 202 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u * kPcAlign))); 203 ASSERT_EQ(1u, stack_map.GetDexPc()); 204 ASSERT_EQ(128u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 205 ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(stack_map)); 206 207 ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask2)); 208 209 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 210 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 211 ASSERT_EQ(number_of_dex_registers, dex_register_map.size()); 212 ASSERT_TRUE(dex_register_map[0].IsLive()); 213 ASSERT_TRUE(dex_register_map[1].IsLive()); 214 ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters()); 215 216 ASSERT_EQ(Kind::kInRegister, dex_register_map[0].GetKind()); 217 ASSERT_EQ(Kind::kInFpuRegister, dex_register_map[1].GetKind()); 218 ASSERT_EQ(18, dex_register_map[0].GetMachineRegister()); 219 ASSERT_EQ(3, dex_register_map[1].GetMachineRegister()); 220 221 DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(2); 222 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(3); 223 ASSERT_EQ(Kind::kInRegister, location0.GetKind()); 224 ASSERT_EQ(Kind::kInFpuRegister, location1.GetKind()); 225 ASSERT_EQ(18, location0.GetValue()); 226 ASSERT_EQ(3, location1.GetValue()); 227 228 ASSERT_FALSE(stack_map.HasInlineInfo()); 229 } 230 231 // Third stack map. 232 { 233 StackMap stack_map = code_info.GetStackMapAt(2); 234 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u))); 235 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u * kPcAlign))); 236 ASSERT_EQ(2u, stack_map.GetDexPc()); 237 ASSERT_EQ(192u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 238 ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(stack_map)); 239 240 ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask3)); 241 242 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 243 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 244 ASSERT_EQ(number_of_dex_registers, dex_register_map.size()); 245 ASSERT_TRUE(dex_register_map[0].IsLive()); 246 ASSERT_TRUE(dex_register_map[1].IsLive()); 247 ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters()); 248 249 ASSERT_EQ(Kind::kInRegister, dex_register_map[0].GetKind()); 250 ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map[1].GetKind()); 251 ASSERT_EQ(6, dex_register_map[0].GetMachineRegister()); 252 ASSERT_EQ(8, dex_register_map[1].GetMachineRegister()); 253 254 DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(4); 255 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(5); 256 ASSERT_EQ(Kind::kInRegister, location0.GetKind()); 257 ASSERT_EQ(Kind::kInRegisterHigh, location1.GetKind()); 258 ASSERT_EQ(6, location0.GetValue()); 259 ASSERT_EQ(8, location1.GetValue()); 260 261 ASSERT_FALSE(stack_map.HasInlineInfo()); 262 } 263 264 // Fourth stack map. 265 { 266 StackMap stack_map = code_info.GetStackMapAt(3); 267 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u))); 268 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u * kPcAlign))); 269 ASSERT_EQ(3u, stack_map.GetDexPc()); 270 ASSERT_EQ(256u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 271 ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(stack_map)); 272 273 ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask4)); 274 275 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 276 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 277 ASSERT_EQ(number_of_dex_registers, dex_register_map.size()); 278 ASSERT_TRUE(dex_register_map[0].IsLive()); 279 ASSERT_TRUE(dex_register_map[1].IsLive()); 280 ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters()); 281 282 ASSERT_EQ(Kind::kInFpuRegister, dex_register_map[0].GetKind()); 283 ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map[1].GetKind()); 284 ASSERT_EQ(3, dex_register_map[0].GetMachineRegister()); 285 ASSERT_EQ(1, dex_register_map[1].GetMachineRegister()); 286 287 DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(3); 288 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(6); 289 ASSERT_EQ(Kind::kInFpuRegister, location0.GetKind()); 290 ASSERT_EQ(Kind::kInFpuRegisterHigh, location1.GetKind()); 291 ASSERT_EQ(3, location0.GetValue()); 292 ASSERT_EQ(1, location1.GetValue()); 293 294 ASSERT_FALSE(stack_map.HasInlineInfo()); 295 } 296 } 297 298 TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) { 299 MallocArenaPool pool; 300 ArenaStack arena_stack(&pool); 301 ScopedArenaAllocator allocator(&arena_stack); 302 StackMapStream stream(&allocator, kRuntimeISA); 303 stream.BeginMethod(32, 0, 0, 2); 304 ArtMethod art_method; 305 306 ArenaBitVector sp_mask1(&allocator, 0, true); 307 sp_mask1.SetBit(2); 308 sp_mask1.SetBit(4); 309 const size_t number_of_dex_registers = 2; 310 const size_t number_of_dex_registers_in_inline_info = 2; 311 stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1); 312 stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. 313 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 314 stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info); 315 stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. 316 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 317 stream.EndInlineInfoEntry(); 318 stream.EndStackMapEntry(); 319 320 stream.EndMethod(); 321 ScopedArenaVector<uint8_t> memory = stream.Encode(); 322 323 CodeInfo code_info(memory.data()); 324 ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); 325 326 uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(); 327 ASSERT_EQ(2u, number_of_catalog_entries); 328 329 // First stack map. 330 { 331 StackMap stack_map = code_info.GetStackMapAt(0); 332 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); 333 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign))); 334 ASSERT_EQ(0u, stack_map.GetDexPc()); 335 ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 336 ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map)); 337 338 ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1)); 339 340 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 341 DexRegisterMap map(code_info.GetDexRegisterMapOf(stack_map)); 342 ASSERT_EQ(number_of_dex_registers, map.size()); 343 ASSERT_TRUE(map[0].IsLive()); 344 ASSERT_TRUE(map[1].IsLive()); 345 ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters()); 346 347 ASSERT_EQ(Kind::kInStack, map[0].GetKind()); 348 ASSERT_EQ(Kind::kConstant, map[1].GetKind()); 349 ASSERT_EQ(0, map[0].GetStackOffsetInBytes()); 350 ASSERT_EQ(-2, map[1].GetConstant()); 351 352 DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0); 353 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1); 354 ASSERT_EQ(Kind::kInStack, location0.GetKind()); 355 ASSERT_EQ(Kind::kConstant, location1.GetKind()); 356 ASSERT_EQ(0, location0.GetValue()); 357 ASSERT_EQ(-2, location1.GetValue()); 358 } 359 } 360 361 TEST(StackMapTest, TestNonLiveDexRegisters) { 362 MallocArenaPool pool; 363 ArenaStack arena_stack(&pool); 364 ScopedArenaAllocator allocator(&arena_stack); 365 StackMapStream stream(&allocator, kRuntimeISA); 366 stream.BeginMethod(32, 0, 0, 2); 367 368 ArenaBitVector sp_mask(&allocator, 0, false); 369 uint32_t number_of_dex_registers = 2; 370 stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); 371 stream.AddDexRegisterEntry(Kind::kNone, 0); // No location. 372 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 373 stream.EndStackMapEntry(); 374 375 stream.EndMethod(); 376 ScopedArenaVector<uint8_t> memory = stream.Encode(); 377 378 CodeInfo code_info(memory.data()); 379 ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); 380 381 uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(); 382 ASSERT_EQ(1u, number_of_catalog_entries); 383 384 StackMap stack_map = code_info.GetStackMapAt(0); 385 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); 386 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign))); 387 ASSERT_EQ(0u, stack_map.GetDexPc()); 388 ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 389 ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map)); 390 391 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 392 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 393 ASSERT_EQ(number_of_dex_registers, dex_register_map.size()); 394 ASSERT_FALSE(dex_register_map[0].IsLive()); 395 ASSERT_TRUE(dex_register_map[1].IsLive()); 396 ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters()); 397 398 ASSERT_EQ(Kind::kNone, dex_register_map[0].GetKind()); 399 ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind()); 400 ASSERT_EQ(-2, dex_register_map[1].GetConstant()); 401 402 DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(0); 403 ASSERT_EQ(Kind::kConstant, location1.GetKind()); 404 ASSERT_EQ(-2, location1.GetValue()); 405 406 ASSERT_FALSE(stack_map.HasInlineInfo()); 407 } 408 409 TEST(StackMapTest, TestShareDexRegisterMap) { 410 MallocArenaPool pool; 411 ArenaStack arena_stack(&pool); 412 ScopedArenaAllocator allocator(&arena_stack); 413 StackMapStream stream(&allocator, kRuntimeISA); 414 stream.BeginMethod(32, 0, 0, 2); 415 416 ArenaBitVector sp_mask(&allocator, 0, false); 417 uint32_t number_of_dex_registers = 2; 418 // First stack map. 419 stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); 420 stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location. 421 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 422 stream.EndStackMapEntry(); 423 // Second stack map, which should share the same dex register map. 424 stream.BeginStackMapEntry(0, 65 * kPcAlign, 0x3, &sp_mask); 425 stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location. 426 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 427 stream.EndStackMapEntry(); 428 // Third stack map (doesn't share the dex register map). 429 stream.BeginStackMapEntry(0, 66 * kPcAlign, 0x3, &sp_mask); 430 stream.AddDexRegisterEntry(Kind::kInRegister, 2); // Short location. 431 stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. 432 stream.EndStackMapEntry(); 433 434 stream.EndMethod(); 435 ScopedArenaVector<uint8_t> memory = stream.Encode(); 436 437 CodeInfo ci(memory.data()); 438 439 // Verify first stack map. 440 StackMap sm0 = ci.GetStackMapAt(0); 441 DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0); 442 ASSERT_EQ(number_of_dex_registers, dex_registers0.size()); 443 ASSERT_EQ(0, dex_registers0[0].GetMachineRegister()); 444 ASSERT_EQ(-2, dex_registers0[1].GetConstant()); 445 446 // Verify second stack map. 447 StackMap sm1 = ci.GetStackMapAt(1); 448 DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1); 449 ASSERT_EQ(number_of_dex_registers, dex_registers1.size()); 450 ASSERT_EQ(0, dex_registers1[0].GetMachineRegister()); 451 ASSERT_EQ(-2, dex_registers1[1].GetConstant()); 452 453 // Verify third stack map. 454 StackMap sm2 = ci.GetStackMapAt(2); 455 DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2); 456 ASSERT_EQ(number_of_dex_registers, dex_registers2.size()); 457 ASSERT_EQ(2, dex_registers2[0].GetMachineRegister()); 458 ASSERT_EQ(-2, dex_registers2[1].GetConstant()); 459 460 // Verify dex register mask offsets. 461 ASSERT_FALSE(sm1.HasDexRegisterMaskIndex()); // No delta. 462 ASSERT_TRUE(sm2.HasDexRegisterMaskIndex()); // Has delta. 463 } 464 465 TEST(StackMapTest, TestNoDexRegisterMap) { 466 MallocArenaPool pool; 467 ArenaStack arena_stack(&pool); 468 ScopedArenaAllocator allocator(&arena_stack); 469 StackMapStream stream(&allocator, kRuntimeISA); 470 stream.BeginMethod(32, 0, 0, 1); 471 472 ArenaBitVector sp_mask(&allocator, 0, false); 473 uint32_t number_of_dex_registers = 0; 474 stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); 475 stream.EndStackMapEntry(); 476 477 number_of_dex_registers = 1; 478 stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask); 479 stream.AddDexRegisterEntry(Kind::kNone, 0); 480 stream.EndStackMapEntry(); 481 482 stream.EndMethod(); 483 ScopedArenaVector<uint8_t> memory = stream.Encode(); 484 485 CodeInfo code_info(memory.data()); 486 ASSERT_EQ(2u, code_info.GetNumberOfStackMaps()); 487 488 uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(); 489 ASSERT_EQ(0u, number_of_catalog_entries); 490 491 StackMap stack_map = code_info.GetStackMapAt(0); 492 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); 493 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign))); 494 ASSERT_EQ(0u, stack_map.GetDexPc()); 495 ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 496 ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map)); 497 498 ASSERT_FALSE(stack_map.HasDexRegisterMap()); 499 ASSERT_FALSE(stack_map.HasInlineInfo()); 500 501 stack_map = code_info.GetStackMapAt(1); 502 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1))); 503 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68 * kPcAlign))); 504 ASSERT_EQ(1u, stack_map.GetDexPc()); 505 ASSERT_EQ(68u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 506 ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(stack_map)); 507 508 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 509 ASSERT_FALSE(stack_map.HasInlineInfo()); 510 } 511 512 TEST(StackMapTest, InlineTest) { 513 MallocArenaPool pool; 514 ArenaStack arena_stack(&pool); 515 ScopedArenaAllocator allocator(&arena_stack); 516 StackMapStream stream(&allocator, kRuntimeISA); 517 stream.BeginMethod(32, 0, 0, 2); 518 ArtMethod art_method; 519 520 ArenaBitVector sp_mask1(&allocator, 0, true); 521 sp_mask1.SetBit(2); 522 sp_mask1.SetBit(4); 523 524 // First stack map. 525 stream.BeginStackMapEntry(0, 10 * kPcAlign, 0x3, &sp_mask1); 526 stream.AddDexRegisterEntry(Kind::kInStack, 0); 527 stream.AddDexRegisterEntry(Kind::kConstant, 4); 528 529 stream.BeginInlineInfoEntry(&art_method, 2, 1); 530 stream.AddDexRegisterEntry(Kind::kInStack, 8); 531 stream.EndInlineInfoEntry(); 532 stream.BeginInlineInfoEntry(&art_method, 3, 3); 533 stream.AddDexRegisterEntry(Kind::kInStack, 16); 534 stream.AddDexRegisterEntry(Kind::kConstant, 20); 535 stream.AddDexRegisterEntry(Kind::kInRegister, 15); 536 stream.EndInlineInfoEntry(); 537 538 stream.EndStackMapEntry(); 539 540 // Second stack map. 541 stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1); 542 stream.AddDexRegisterEntry(Kind::kInStack, 56); 543 stream.AddDexRegisterEntry(Kind::kConstant, 0); 544 545 stream.BeginInlineInfoEntry(&art_method, 2, 1); 546 stream.AddDexRegisterEntry(Kind::kInStack, 12); 547 stream.EndInlineInfoEntry(); 548 stream.BeginInlineInfoEntry(&art_method, 3, 3); 549 stream.AddDexRegisterEntry(Kind::kInStack, 80); 550 stream.AddDexRegisterEntry(Kind::kConstant, 10); 551 stream.AddDexRegisterEntry(Kind::kInRegister, 5); 552 stream.EndInlineInfoEntry(); 553 stream.BeginInlineInfoEntry(&art_method, 5, 0); 554 stream.EndInlineInfoEntry(); 555 556 stream.EndStackMapEntry(); 557 558 // Third stack map. 559 stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1); 560 stream.AddDexRegisterEntry(Kind::kNone, 0); 561 stream.AddDexRegisterEntry(Kind::kConstant, 4); 562 stream.EndStackMapEntry(); 563 564 // Fourth stack map. 565 stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1); 566 stream.AddDexRegisterEntry(Kind::kInStack, 56); 567 stream.AddDexRegisterEntry(Kind::kConstant, 0); 568 569 stream.BeginInlineInfoEntry(&art_method, 2, 0); 570 stream.EndInlineInfoEntry(); 571 stream.BeginInlineInfoEntry(&art_method, 5, 1); 572 stream.AddDexRegisterEntry(Kind::kInRegister, 2); 573 stream.EndInlineInfoEntry(); 574 stream.BeginInlineInfoEntry(&art_method, 10, 2); 575 stream.AddDexRegisterEntry(Kind::kNone, 0); 576 stream.AddDexRegisterEntry(Kind::kInRegister, 3); 577 stream.EndInlineInfoEntry(); 578 579 stream.EndStackMapEntry(); 580 581 stream.EndMethod(); 582 ScopedArenaVector<uint8_t> memory = stream.Encode(); 583 584 CodeInfo ci(memory.data()); 585 586 { 587 // Verify first stack map. 588 StackMap sm0 = ci.GetStackMapAt(0); 589 590 DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0); 591 ASSERT_EQ(2u, dex_registers0.size()); 592 ASSERT_EQ(0, dex_registers0[0].GetStackOffsetInBytes()); 593 ASSERT_EQ(4, dex_registers0[1].GetConstant()); 594 595 auto inline_infos = ci.GetInlineInfosOf(sm0); 596 ASSERT_EQ(2u, inline_infos.size()); 597 ASSERT_EQ(2u, inline_infos[0].GetDexPc()); 598 ASSERT_TRUE(inline_infos[0].EncodesArtMethod()); 599 ASSERT_EQ(3u, inline_infos[1].GetDexPc()); 600 ASSERT_TRUE(inline_infos[1].EncodesArtMethod()); 601 602 DexRegisterMap dex_registers1 = ci.GetInlineDexRegisterMapOf(sm0, inline_infos[0]); 603 ASSERT_EQ(1u, dex_registers1.size()); 604 ASSERT_EQ(8, dex_registers1[0].GetStackOffsetInBytes()); 605 606 DexRegisterMap dex_registers2 = ci.GetInlineDexRegisterMapOf(sm0, inline_infos[1]); 607 ASSERT_EQ(3u, dex_registers2.size()); 608 ASSERT_EQ(16, dex_registers2[0].GetStackOffsetInBytes()); 609 ASSERT_EQ(20, dex_registers2[1].GetConstant()); 610 ASSERT_EQ(15, dex_registers2[2].GetMachineRegister()); 611 } 612 613 { 614 // Verify second stack map. 615 StackMap sm1 = ci.GetStackMapAt(1); 616 617 DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1); 618 ASSERT_EQ(2u, dex_registers0.size()); 619 ASSERT_EQ(56, dex_registers0[0].GetStackOffsetInBytes()); 620 ASSERT_EQ(0, dex_registers0[1].GetConstant()); 621 622 auto inline_infos = ci.GetInlineInfosOf(sm1); 623 ASSERT_EQ(3u, inline_infos.size()); 624 ASSERT_EQ(2u, inline_infos[0].GetDexPc()); 625 ASSERT_TRUE(inline_infos[0].EncodesArtMethod()); 626 ASSERT_EQ(3u, inline_infos[1].GetDexPc()); 627 ASSERT_TRUE(inline_infos[1].EncodesArtMethod()); 628 ASSERT_EQ(5u, inline_infos[2].GetDexPc()); 629 ASSERT_TRUE(inline_infos[2].EncodesArtMethod()); 630 631 DexRegisterMap dex_registers1 = ci.GetInlineDexRegisterMapOf(sm1, inline_infos[0]); 632 ASSERT_EQ(1u, dex_registers1.size()); 633 ASSERT_EQ(12, dex_registers1[0].GetStackOffsetInBytes()); 634 635 DexRegisterMap dex_registers2 = ci.GetInlineDexRegisterMapOf(sm1, inline_infos[1]); 636 ASSERT_EQ(3u, dex_registers2.size()); 637 ASSERT_EQ(80, dex_registers2[0].GetStackOffsetInBytes()); 638 ASSERT_EQ(10, dex_registers2[1].GetConstant()); 639 ASSERT_EQ(5, dex_registers2[2].GetMachineRegister()); 640 } 641 642 { 643 // Verify third stack map. 644 StackMap sm2 = ci.GetStackMapAt(2); 645 646 DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2); 647 ASSERT_EQ(2u, dex_registers0.size()); 648 ASSERT_FALSE(dex_registers0[0].IsLive()); 649 ASSERT_EQ(4, dex_registers0[1].GetConstant()); 650 ASSERT_FALSE(sm2.HasInlineInfo()); 651 } 652 653 { 654 // Verify fourth stack map. 655 StackMap sm3 = ci.GetStackMapAt(3); 656 657 DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3); 658 ASSERT_EQ(2u, dex_registers0.size()); 659 ASSERT_EQ(56, dex_registers0[0].GetStackOffsetInBytes()); 660 ASSERT_EQ(0, dex_registers0[1].GetConstant()); 661 662 auto inline_infos = ci.GetInlineInfosOf(sm3); 663 ASSERT_EQ(3u, inline_infos.size()); 664 ASSERT_EQ(2u, inline_infos[0].GetDexPc()); 665 ASSERT_TRUE(inline_infos[0].EncodesArtMethod()); 666 ASSERT_EQ(5u, inline_infos[1].GetDexPc()); 667 ASSERT_TRUE(inline_infos[1].EncodesArtMethod()); 668 ASSERT_EQ(10u, inline_infos[2].GetDexPc()); 669 ASSERT_TRUE(inline_infos[2].EncodesArtMethod()); 670 671 DexRegisterMap dex_registers1 = ci.GetInlineDexRegisterMapOf(sm3, inline_infos[1]); 672 ASSERT_EQ(1u, dex_registers1.size()); 673 ASSERT_EQ(2, dex_registers1[0].GetMachineRegister()); 674 675 DexRegisterMap dex_registers2 = ci.GetInlineDexRegisterMapOf(sm3, inline_infos[2]); 676 ASSERT_EQ(2u, dex_registers2.size()); 677 ASSERT_FALSE(dex_registers2[0].IsLive()); 678 ASSERT_EQ(3, dex_registers2[1].GetMachineRegister()); 679 } 680 } 681 682 TEST(StackMapTest, PackedNativePcTest) { 683 // Test minimum alignments, and decoding. 684 uint32_t packed_thumb2 = 685 StackMap::PackNativePc(kThumb2InstructionAlignment, InstructionSet::kThumb2); 686 uint32_t packed_arm64 = 687 StackMap::PackNativePc(kArm64InstructionAlignment, InstructionSet::kArm64); 688 uint32_t packed_x86 = 689 StackMap::PackNativePc(kX86InstructionAlignment, InstructionSet::kX86); 690 uint32_t packed_x86_64 = 691 StackMap::PackNativePc(kX86_64InstructionAlignment, InstructionSet::kX86_64); 692 uint32_t packed_mips = 693 StackMap::PackNativePc(kMipsInstructionAlignment, InstructionSet::kMips); 694 uint32_t packed_mips64 = 695 StackMap::PackNativePc(kMips64InstructionAlignment, InstructionSet::kMips64); 696 EXPECT_EQ(StackMap::UnpackNativePc(packed_thumb2, InstructionSet::kThumb2), 697 kThumb2InstructionAlignment); 698 EXPECT_EQ(StackMap::UnpackNativePc(packed_arm64, InstructionSet::kArm64), 699 kArm64InstructionAlignment); 700 EXPECT_EQ(StackMap::UnpackNativePc(packed_x86, InstructionSet::kX86), 701 kX86InstructionAlignment); 702 EXPECT_EQ(StackMap::UnpackNativePc(packed_x86_64, InstructionSet::kX86_64), 703 kX86_64InstructionAlignment); 704 EXPECT_EQ(StackMap::UnpackNativePc(packed_mips, InstructionSet::kMips), 705 kMipsInstructionAlignment); 706 EXPECT_EQ(StackMap::UnpackNativePc(packed_mips64, InstructionSet::kMips64), 707 kMips64InstructionAlignment); 708 } 709 710 TEST(StackMapTest, TestDeduplicateStackMask) { 711 MallocArenaPool pool; 712 ArenaStack arena_stack(&pool); 713 ScopedArenaAllocator allocator(&arena_stack); 714 StackMapStream stream(&allocator, kRuntimeISA); 715 stream.BeginMethod(32, 0, 0, 0); 716 717 ArenaBitVector sp_mask(&allocator, 0, true); 718 sp_mask.SetBit(1); 719 sp_mask.SetBit(4); 720 stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask); 721 stream.EndStackMapEntry(); 722 stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask); 723 stream.EndStackMapEntry(); 724 725 stream.EndMethod(); 726 ScopedArenaVector<uint8_t> memory = stream.Encode(); 727 728 CodeInfo code_info(memory.data()); 729 ASSERT_EQ(2u, code_info.GetNumberOfStackMaps()); 730 731 StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4 * kPcAlign); 732 StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8 * kPcAlign); 733 EXPECT_EQ(stack_map1.GetStackMaskIndex(), 734 stack_map2.GetStackMaskIndex()); 735 } 736 737 TEST(StackMapTest, TestDedupeBitTables) { 738 MallocArenaPool pool; 739 ArenaStack arena_stack(&pool); 740 ScopedArenaAllocator allocator(&arena_stack); 741 StackMapStream stream(&allocator, kRuntimeISA); 742 stream.BeginMethod(32, 0, 0, 2); 743 744 stream.BeginStackMapEntry(0, 64 * kPcAlign); 745 stream.AddDexRegisterEntry(Kind::kInStack, 0); 746 stream.AddDexRegisterEntry(Kind::kConstant, -2); 747 stream.EndStackMapEntry(); 748 749 stream.EndMethod(); 750 ScopedArenaVector<uint8_t> memory = stream.Encode(); 751 752 std::vector<uint8_t> out; 753 CodeInfo::Deduper deduper(&out); 754 size_t deduped1 = deduper.Dedupe(memory.data()); 755 size_t deduped2 = deduper.Dedupe(memory.data()); 756 757 for (size_t deduped : { deduped1, deduped2 }) { 758 CodeInfo code_info(out.data() + deduped); 759 ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); 760 761 StackMap stack_map = code_info.GetStackMapAt(0); 762 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); 763 ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign))); 764 ASSERT_EQ(0u, stack_map.GetDexPc()); 765 ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA)); 766 767 ASSERT_TRUE(stack_map.HasDexRegisterMap()); 768 DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map); 769 770 ASSERT_EQ(Kind::kInStack, dex_register_map[0].GetKind()); 771 ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind()); 772 ASSERT_EQ(0, dex_register_map[0].GetStackOffsetInBytes()); 773 ASSERT_EQ(-2, dex_register_map[1].GetConstant()); 774 } 775 776 ASSERT_GT(memory.size() * 2, out.size()); 777 } 778 779 } // namespace art 780