Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_STACK_MAP_H_
     18 #define ART_RUNTIME_STACK_MAP_H_
     19 
     20 #include <limits>
     21 
     22 #include "arch/code_offset.h"
     23 #include "base/bit_utils.h"
     24 #include "base/bit_vector.h"
     25 #include "base/leb128.h"
     26 #include "bit_memory_region.h"
     27 #include "dex/dex_file_types.h"
     28 #include "memory_region.h"
     29 #include "method_info.h"
     30 
     31 namespace art {
     32 
     33 class VariableIndentationOutputStream;
     34 
     35 // Size of a frame slot, in bytes.  This constant is a signed value,
     36 // to please the compiler in arithmetic operations involving int32_t
     37 // (signed) values.
     38 static constexpr ssize_t kFrameSlotSize = 4;
     39 
     40 // Size of Dex virtual registers.
     41 static constexpr size_t kVRegSize = 4;
     42 
     43 class ArtMethod;
     44 class CodeInfo;
     45 class StackMapEncoding;
     46 struct CodeInfoEncoding;
     47 
     48 /**
     49  * Classes in the following file are wrapper on stack map information backed
     50  * by a MemoryRegion. As such they read and write to the region, they don't have
     51  * their own fields.
     52  */
     53 
     54 // Dex register location container used by DexRegisterMap and StackMapStream.
     55 class DexRegisterLocation {
     56  public:
     57   /*
     58    * The location kind used to populate the Dex register information in a
     59    * StackMapStream can either be:
     60    * - kStack: vreg stored on the stack, value holds the stack offset;
     61    * - kInRegister: vreg stored in low 32 bits of a core physical register,
     62    *                value holds the register number;
     63    * - kInRegisterHigh: vreg stored in high 32 bits of a core physical register,
     64    *                    value holds the register number;
     65    * - kInFpuRegister: vreg stored in low 32 bits of an FPU register,
     66    *                   value holds the register number;
     67    * - kInFpuRegisterHigh: vreg stored in high 32 bits of an FPU register,
     68    *                       value holds the register number;
     69    * - kConstant: value holds the constant;
     70    *
     71    * In addition, DexRegisterMap also uses these values:
     72    * - kInStackLargeOffset: value holds a "large" stack offset (greater than
     73    *   or equal to 128 bytes);
     74    * - kConstantLargeValue: value holds a "large" constant (lower than 0, or
     75    *   or greater than or equal to 32);
     76    * - kNone: the register has no location, meaning it has not been set.
     77    */
     78   enum class Kind : uint8_t {
     79     // Short location kinds, for entries fitting on one byte (3 bits
     80     // for the kind, 5 bits for the value) in a DexRegisterMap.
     81     kInStack = 0,             // 0b000
     82     kInRegister = 1,          // 0b001
     83     kInRegisterHigh = 2,      // 0b010
     84     kInFpuRegister = 3,       // 0b011
     85     kInFpuRegisterHigh = 4,   // 0b100
     86     kConstant = 5,            // 0b101
     87 
     88     // Large location kinds, requiring a 5-byte encoding (1 byte for the
     89     // kind, 4 bytes for the value).
     90 
     91     // Stack location at a large offset, meaning that the offset value
     92     // divided by the stack frame slot size (4 bytes) cannot fit on a
     93     // 5-bit unsigned integer (i.e., this offset value is greater than
     94     // or equal to 2^5 * 4 = 128 bytes).
     95     kInStackLargeOffset = 6,  // 0b110
     96 
     97     // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
     98     // lower than 0, or greater than or equal to 2^5 = 32).
     99     kConstantLargeValue = 7,  // 0b111
    100 
    101     // Entries with no location are not stored and do not need own marker.
    102     kNone = static_cast<uint8_t>(-1),
    103 
    104     kLastLocationKind = kConstantLargeValue
    105   };
    106 
    107   static_assert(
    108       sizeof(Kind) == 1u,
    109       "art::DexRegisterLocation::Kind has a size different from one byte.");
    110 
    111   static bool IsShortLocationKind(Kind kind) {
    112     switch (kind) {
    113       case Kind::kInStack:
    114       case Kind::kInRegister:
    115       case Kind::kInRegisterHigh:
    116       case Kind::kInFpuRegister:
    117       case Kind::kInFpuRegisterHigh:
    118       case Kind::kConstant:
    119         return true;
    120 
    121       case Kind::kInStackLargeOffset:
    122       case Kind::kConstantLargeValue:
    123         return false;
    124 
    125       case Kind::kNone:
    126         LOG(FATAL) << "Unexpected location kind";
    127     }
    128     UNREACHABLE();
    129   }
    130 
    131   // Convert `kind` to a "surface" kind, i.e. one that doesn't include
    132   // any value with a "large" qualifier.
    133   // TODO: Introduce another enum type for the surface kind?
    134   static Kind ConvertToSurfaceKind(Kind kind) {
    135     switch (kind) {
    136       case Kind::kInStack:
    137       case Kind::kInRegister:
    138       case Kind::kInRegisterHigh:
    139       case Kind::kInFpuRegister:
    140       case Kind::kInFpuRegisterHigh:
    141       case Kind::kConstant:
    142         return kind;
    143 
    144       case Kind::kInStackLargeOffset:
    145         return Kind::kInStack;
    146 
    147       case Kind::kConstantLargeValue:
    148         return Kind::kConstant;
    149 
    150       case Kind::kNone:
    151         return kind;
    152     }
    153     UNREACHABLE();
    154   }
    155 
    156   // Required by art::StackMapStream::LocationCatalogEntriesIndices.
    157   DexRegisterLocation() : kind_(Kind::kNone), value_(0) {}
    158 
    159   DexRegisterLocation(Kind kind, int32_t value) : kind_(kind), value_(value) {}
    160 
    161   static DexRegisterLocation None() {
    162     return DexRegisterLocation(Kind::kNone, 0);
    163   }
    164 
    165   // Get the "surface" kind of the location, i.e., the one that doesn't
    166   // include any value with a "large" qualifier.
    167   Kind GetKind() const {
    168     return ConvertToSurfaceKind(kind_);
    169   }
    170 
    171   // Get the value of the location.
    172   int32_t GetValue() const { return value_; }
    173 
    174   // Get the actual kind of the location.
    175   Kind GetInternalKind() const { return kind_; }
    176 
    177   bool operator==(DexRegisterLocation other) const {
    178     return kind_ == other.kind_ && value_ == other.value_;
    179   }
    180 
    181   bool operator!=(DexRegisterLocation other) const {
    182     return !(*this == other);
    183   }
    184 
    185  private:
    186   Kind kind_;
    187   int32_t value_;
    188 
    189   friend class DexRegisterLocationHashFn;
    190 };
    191 
    192 std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation::Kind& kind);
    193 
    194 /**
    195  * Store information on unique Dex register locations used in a method.
    196  * The information is of the form:
    197  *
    198  *   [DexRegisterLocation+].
    199  *
    200  * DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
    201  */
    202 class DexRegisterLocationCatalog {
    203  public:
    204   explicit DexRegisterLocationCatalog(MemoryRegion region) : region_(region) {}
    205 
    206   // Short (compressed) location, fitting on one byte.
    207   typedef uint8_t ShortLocation;
    208 
    209   void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
    210     DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
    211     int32_t value = dex_register_location.GetValue();
    212     if (DexRegisterLocation::IsShortLocationKind(kind)) {
    213       // Short location.  Compress the kind and the value as a single byte.
    214       if (kind == DexRegisterLocation::Kind::kInStack) {
    215         // Instead of storing stack offsets expressed in bytes for
    216         // short stack locations, store slot offsets.  A stack offset
    217         // is a multiple of 4 (kFrameSlotSize).  This means that by
    218         // dividing it by 4, we can fit values from the [0, 128)
    219         // interval in a short stack location, and not just values
    220         // from the [0, 32) interval.
    221         DCHECK_EQ(value % kFrameSlotSize, 0);
    222         value /= kFrameSlotSize;
    223       }
    224       DCHECK(IsShortValue(value)) << value;
    225       region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
    226     } else {
    227       // Large location.  Write the location on one byte and the value
    228       // on 4 bytes.
    229       DCHECK(!IsShortValue(value)) << value;
    230       if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
    231         // Also divide large stack offsets by 4 for the sake of consistency.
    232         DCHECK_EQ(value % kFrameSlotSize, 0);
    233         value /= kFrameSlotSize;
    234       }
    235       // Data can be unaligned as the written Dex register locations can
    236       // either be 1-byte or 5-byte wide.  Use
    237       // art::MemoryRegion::StoreUnaligned instead of
    238       // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
    239       region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
    240       region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
    241     }
    242   }
    243 
    244   // Find the offset of the location catalog entry number `location_catalog_entry_index`.
    245   size_t FindLocationOffset(size_t location_catalog_entry_index) const {
    246     size_t offset = kFixedSize;
    247     // Skip the first `location_catalog_entry_index - 1` entries.
    248     for (uint16_t i = 0; i < location_catalog_entry_index; ++i) {
    249       // Read the first next byte and inspect its first 3 bits to decide
    250       // whether it is a short or a large location.
    251       DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
    252       if (DexRegisterLocation::IsShortLocationKind(kind)) {
    253         // Short location.  Skip the current byte.
    254         offset += SingleShortEntrySize();
    255       } else {
    256         // Large location.  Skip the 5 next bytes.
    257         offset += SingleLargeEntrySize();
    258       }
    259     }
    260     return offset;
    261   }
    262 
    263   // Get the internal kind of entry at `location_catalog_entry_index`.
    264   DexRegisterLocation::Kind GetLocationInternalKind(size_t location_catalog_entry_index) const {
    265     if (location_catalog_entry_index == kNoLocationEntryIndex) {
    266       return DexRegisterLocation::Kind::kNone;
    267     }
    268     return ExtractKindAtOffset(FindLocationOffset(location_catalog_entry_index));
    269   }
    270 
    271   // Get the (surface) kind and value of entry at `location_catalog_entry_index`.
    272   DexRegisterLocation GetDexRegisterLocation(size_t location_catalog_entry_index) const {
    273     if (location_catalog_entry_index == kNoLocationEntryIndex) {
    274       return DexRegisterLocation::None();
    275     }
    276     size_t offset = FindLocationOffset(location_catalog_entry_index);
    277     // Read the first byte and inspect its first 3 bits to get the location.
    278     ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
    279     DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
    280     if (DexRegisterLocation::IsShortLocationKind(kind)) {
    281       // Short location.  Extract the value from the remaining 5 bits.
    282       int32_t value = ExtractValueFromShortLocation(first_byte);
    283       if (kind == DexRegisterLocation::Kind::kInStack) {
    284         // Convert the stack slot (short) offset to a byte offset value.
    285         value *= kFrameSlotSize;
    286       }
    287       return DexRegisterLocation(kind, value);
    288     } else {
    289       // Large location.  Read the four next bytes to get the value.
    290       int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
    291       if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
    292         // Convert the stack slot (large) offset to a byte offset value.
    293         value *= kFrameSlotSize;
    294       }
    295       return DexRegisterLocation(kind, value);
    296     }
    297   }
    298 
    299   // Compute the compressed kind of `location`.
    300   static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
    301     DexRegisterLocation::Kind kind = location.GetInternalKind();
    302     switch (kind) {
    303       case DexRegisterLocation::Kind::kInStack:
    304         return IsShortStackOffsetValue(location.GetValue())
    305             ? DexRegisterLocation::Kind::kInStack
    306             : DexRegisterLocation::Kind::kInStackLargeOffset;
    307 
    308       case DexRegisterLocation::Kind::kInRegister:
    309       case DexRegisterLocation::Kind::kInRegisterHigh:
    310         DCHECK_GE(location.GetValue(), 0);
    311         DCHECK_LT(location.GetValue(), 1 << kValueBits);
    312         return kind;
    313 
    314       case DexRegisterLocation::Kind::kInFpuRegister:
    315       case DexRegisterLocation::Kind::kInFpuRegisterHigh:
    316         DCHECK_GE(location.GetValue(), 0);
    317         DCHECK_LT(location.GetValue(), 1 << kValueBits);
    318         return kind;
    319 
    320       case DexRegisterLocation::Kind::kConstant:
    321         return IsShortConstantValue(location.GetValue())
    322             ? DexRegisterLocation::Kind::kConstant
    323             : DexRegisterLocation::Kind::kConstantLargeValue;
    324 
    325       case DexRegisterLocation::Kind::kConstantLargeValue:
    326       case DexRegisterLocation::Kind::kInStackLargeOffset:
    327       case DexRegisterLocation::Kind::kNone:
    328         LOG(FATAL) << "Unexpected location kind " << kind;
    329     }
    330     UNREACHABLE();
    331   }
    332 
    333   // Can `location` be turned into a short location?
    334   static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
    335     DexRegisterLocation::Kind kind = location.GetInternalKind();
    336     switch (kind) {
    337       case DexRegisterLocation::Kind::kInStack:
    338         return IsShortStackOffsetValue(location.GetValue());
    339 
    340       case DexRegisterLocation::Kind::kInRegister:
    341       case DexRegisterLocation::Kind::kInRegisterHigh:
    342       case DexRegisterLocation::Kind::kInFpuRegister:
    343       case DexRegisterLocation::Kind::kInFpuRegisterHigh:
    344         return true;
    345 
    346       case DexRegisterLocation::Kind::kConstant:
    347         return IsShortConstantValue(location.GetValue());
    348 
    349       case DexRegisterLocation::Kind::kConstantLargeValue:
    350       case DexRegisterLocation::Kind::kInStackLargeOffset:
    351       case DexRegisterLocation::Kind::kNone:
    352         LOG(FATAL) << "Unexpected location kind " << kind;
    353     }
    354     UNREACHABLE();
    355   }
    356 
    357   static size_t EntrySize(const DexRegisterLocation& location) {
    358     return CanBeEncodedAsShortLocation(location) ? SingleShortEntrySize() : SingleLargeEntrySize();
    359   }
    360 
    361   static size_t SingleShortEntrySize() {
    362     return sizeof(ShortLocation);
    363   }
    364 
    365   static size_t SingleLargeEntrySize() {
    366     return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
    367   }
    368 
    369   size_t Size() const {
    370     return region_.size();
    371   }
    372 
    373   void Dump(VariableIndentationOutputStream* vios,
    374             const CodeInfo& code_info);
    375 
    376   // Special (invalid) Dex register location catalog entry index meaning
    377   // that there is no location for a given Dex register (i.e., it is
    378   // mapped to a DexRegisterLocation::Kind::kNone location).
    379   static constexpr size_t kNoLocationEntryIndex = -1;
    380 
    381  private:
    382   static constexpr int kFixedSize = 0;
    383 
    384   // Width of the kind "field" in a short location, in bits.
    385   static constexpr size_t kKindBits = 3;
    386   // Width of the value "field" in a short location, in bits.
    387   static constexpr size_t kValueBits = 5;
    388 
    389   static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
    390   static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
    391   static constexpr size_t kKindOffset = 0;
    392   static constexpr size_t kValueOffset = kKindBits;
    393 
    394   static bool IsShortStackOffsetValue(int32_t value) {
    395     DCHECK_EQ(value % kFrameSlotSize, 0);
    396     return IsShortValue(value / kFrameSlotSize);
    397   }
    398 
    399   static bool IsShortConstantValue(int32_t value) {
    400     return IsShortValue(value);
    401   }
    402 
    403   static bool IsShortValue(int32_t value) {
    404     return IsUint<kValueBits>(value);
    405   }
    406 
    407   static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
    408     uint8_t kind_integer_value = static_cast<uint8_t>(kind);
    409     DCHECK(IsUint<kKindBits>(kind_integer_value)) << kind_integer_value;
    410     DCHECK(IsShortValue(value)) << value;
    411     return (kind_integer_value & kKindMask) << kKindOffset
    412         | (value & kValueMask) << kValueOffset;
    413   }
    414 
    415   static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
    416     uint8_t kind = (location >> kKindOffset) & kKindMask;
    417     DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
    418     // We do not encode kNone locations in the stack map.
    419     DCHECK_NE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kNone));
    420     return static_cast<DexRegisterLocation::Kind>(kind);
    421   }
    422 
    423   static int32_t ExtractValueFromShortLocation(ShortLocation location) {
    424     return (location >> kValueOffset) & kValueMask;
    425   }
    426 
    427   // Extract a location kind from the byte at position `offset`.
    428   DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
    429     ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
    430     return ExtractKindFromShortLocation(first_byte);
    431   }
    432 
    433   MemoryRegion region_;
    434 
    435   friend class CodeInfo;
    436   friend class StackMapStream;
    437 };
    438 
    439 /* Information on Dex register locations for a specific PC, mapping a
    440  * stack map's Dex register to a location entry in a DexRegisterLocationCatalog.
    441  * The information is of the form:
    442  *
    443  *   [live_bit_mask, entries*]
    444  *
    445  * where entries are concatenated unsigned integer values encoded on a number
    446  * of bits (fixed per DexRegisterMap instances of a CodeInfo object) depending
    447  * on the number of entries in the Dex register location catalog
    448  * (see DexRegisterMap::SingleEntrySizeInBits).  The map is 1-byte aligned.
    449  */
    450 class DexRegisterMap {
    451  public:
    452   explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
    453   DexRegisterMap() {}
    454 
    455   bool IsValid() const { return region_.pointer() != nullptr; }
    456 
    457   // Get the surface kind of Dex register `dex_register_number`.
    458   DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number,
    459                                             uint16_t number_of_dex_registers,
    460                                             const CodeInfo& code_info,
    461                                             const CodeInfoEncoding& enc) const {
    462     return DexRegisterLocation::ConvertToSurfaceKind(
    463         GetLocationInternalKind(dex_register_number, number_of_dex_registers, code_info, enc));
    464   }
    465 
    466   // Get the internal kind of Dex register `dex_register_number`.
    467   DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number,
    468                                                     uint16_t number_of_dex_registers,
    469                                                     const CodeInfo& code_info,
    470                                                     const CodeInfoEncoding& enc) const;
    471 
    472   // Get the Dex register location `dex_register_number`.
    473   DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number,
    474                                              uint16_t number_of_dex_registers,
    475                                              const CodeInfo& code_info,
    476                                              const CodeInfoEncoding& enc) const;
    477 
    478   int32_t GetStackOffsetInBytes(uint16_t dex_register_number,
    479                                 uint16_t number_of_dex_registers,
    480                                 const CodeInfo& code_info,
    481                                 const CodeInfoEncoding& enc) const {
    482     DexRegisterLocation location =
    483         GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info, enc);
    484     DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
    485     // GetDexRegisterLocation returns the offset in bytes.
    486     return location.GetValue();
    487   }
    488 
    489   int32_t GetConstant(uint16_t dex_register_number,
    490                       uint16_t number_of_dex_registers,
    491                       const CodeInfo& code_info,
    492                       const CodeInfoEncoding& enc) const {
    493     DexRegisterLocation location =
    494         GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info, enc);
    495     DCHECK_EQ(location.GetKind(), DexRegisterLocation::Kind::kConstant);
    496     return location.GetValue();
    497   }
    498 
    499   int32_t GetMachineRegister(uint16_t dex_register_number,
    500                              uint16_t number_of_dex_registers,
    501                              const CodeInfo& code_info,
    502                              const CodeInfoEncoding& enc) const {
    503     DexRegisterLocation location =
    504         GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info, enc);
    505     DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister ||
    506            location.GetInternalKind() == DexRegisterLocation::Kind::kInRegisterHigh ||
    507            location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister ||
    508            location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegisterHigh)
    509         << location.GetInternalKind();
    510     return location.GetValue();
    511   }
    512 
    513   // Get the index of the entry in the Dex register location catalog
    514   // corresponding to `dex_register_number`.
    515   size_t GetLocationCatalogEntryIndex(uint16_t dex_register_number,
    516                                       uint16_t number_of_dex_registers,
    517                                       size_t number_of_location_catalog_entries) const {
    518     if (!IsDexRegisterLive(dex_register_number)) {
    519       return DexRegisterLocationCatalog::kNoLocationEntryIndex;
    520     }
    521 
    522     if (number_of_location_catalog_entries == 1) {
    523       // We do not allocate space for location maps in the case of a
    524       // single-entry location catalog, as it is useless.  The only valid
    525       // entry index is 0;
    526       return 0;
    527     }
    528 
    529     // The bit offset of the beginning of the map locations.
    530     size_t map_locations_offset_in_bits =
    531         GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
    532     size_t index_in_dex_register_map = GetIndexInDexRegisterMap(dex_register_number);
    533     DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
    534     // The bit size of an entry.
    535     size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
    536     // The bit offset where `index_in_dex_register_map` is located.
    537     size_t entry_offset_in_bits =
    538         map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
    539     size_t location_catalog_entry_index =
    540         region_.LoadBits(entry_offset_in_bits, map_entry_size_in_bits);
    541     DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
    542     return location_catalog_entry_index;
    543   }
    544 
    545   // Map entry at `index_in_dex_register_map` to `location_catalog_entry_index`.
    546   void SetLocationCatalogEntryIndex(size_t index_in_dex_register_map,
    547                                     size_t location_catalog_entry_index,
    548                                     uint16_t number_of_dex_registers,
    549                                     size_t number_of_location_catalog_entries) {
    550     DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
    551     DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
    552 
    553     if (number_of_location_catalog_entries == 1) {
    554       // We do not allocate space for location maps in the case of a
    555       // single-entry location catalog, as it is useless.
    556       return;
    557     }
    558 
    559     // The bit offset of the beginning of the map locations.
    560     size_t map_locations_offset_in_bits =
    561         GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
    562     // The bit size of an entry.
    563     size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
    564     // The bit offset where `index_in_dex_register_map` is located.
    565     size_t entry_offset_in_bits =
    566         map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
    567     region_.StoreBits(entry_offset_in_bits, location_catalog_entry_index, map_entry_size_in_bits);
    568   }
    569 
    570   void SetLiveBitMask(uint16_t number_of_dex_registers,
    571                       const BitVector& live_dex_registers_mask) {
    572     size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
    573     for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
    574       region_.StoreBit(live_bit_mask_offset_in_bits + i, live_dex_registers_mask.IsBitSet(i));
    575     }
    576   }
    577 
    578   ALWAYS_INLINE bool IsDexRegisterLive(uint16_t dex_register_number) const {
    579     size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
    580     return region_.LoadBit(live_bit_mask_offset_in_bits + dex_register_number);
    581   }
    582 
    583   size_t GetNumberOfLiveDexRegisters(uint16_t number_of_dex_registers) const {
    584     size_t number_of_live_dex_registers = 0;
    585     for (size_t i = 0; i < number_of_dex_registers; ++i) {
    586       if (IsDexRegisterLive(i)) {
    587         ++number_of_live_dex_registers;
    588       }
    589     }
    590     return number_of_live_dex_registers;
    591   }
    592 
    593   static size_t GetLiveBitMaskOffset() {
    594     return kFixedSize;
    595   }
    596 
    597   // Compute the size of the live register bit mask (in bytes), for a
    598   // method having `number_of_dex_registers` Dex registers.
    599   static size_t GetLiveBitMaskSize(uint16_t number_of_dex_registers) {
    600     return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
    601   }
    602 
    603   static size_t GetLocationMappingDataOffset(uint16_t number_of_dex_registers) {
    604     return GetLiveBitMaskOffset() + GetLiveBitMaskSize(number_of_dex_registers);
    605   }
    606 
    607   size_t GetLocationMappingDataSize(uint16_t number_of_dex_registers,
    608                                     size_t number_of_location_catalog_entries) const {
    609     size_t location_mapping_data_size_in_bits =
    610         GetNumberOfLiveDexRegisters(number_of_dex_registers)
    611         * SingleEntrySizeInBits(number_of_location_catalog_entries);
    612     return RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
    613   }
    614 
    615   // Return the size of a map entry in bits.  Note that if
    616   // `number_of_location_catalog_entries` equals 1, this function returns 0,
    617   // which is fine, as there is no need to allocate a map for a
    618   // single-entry location catalog; the only valid location catalog entry index
    619   // for a live register in this case is 0 and there is no need to
    620   // store it.
    621   static size_t SingleEntrySizeInBits(size_t number_of_location_catalog_entries) {
    622     // Handle the case of 0, as we cannot pass 0 to art::WhichPowerOf2.
    623     return number_of_location_catalog_entries == 0
    624         ? 0u
    625         : WhichPowerOf2(RoundUpToPowerOfTwo(number_of_location_catalog_entries));
    626   }
    627 
    628   // Return the size of the DexRegisterMap object, in bytes.
    629   size_t Size() const {
    630     return region_.size();
    631   }
    632 
    633   void Dump(VariableIndentationOutputStream* vios,
    634             const CodeInfo& code_info, uint16_t number_of_dex_registers) const;
    635 
    636  private:
    637   // Return the index in the Dex register map corresponding to the Dex
    638   // register number `dex_register_number`.
    639   size_t GetIndexInDexRegisterMap(uint16_t dex_register_number) const {
    640     if (!IsDexRegisterLive(dex_register_number)) {
    641       return kInvalidIndexInDexRegisterMap;
    642     }
    643     return GetNumberOfLiveDexRegisters(dex_register_number);
    644   }
    645 
    646   // Special (invalid) Dex register map entry index meaning that there
    647   // is no index in the map for a given Dex register (i.e., it must
    648   // have been mapped to a DexRegisterLocation::Kind::kNone location).
    649   static constexpr size_t kInvalidIndexInDexRegisterMap = -1;
    650 
    651   static constexpr int kFixedSize = 0;
    652 
    653   MemoryRegion region_;
    654 
    655   friend class CodeInfo;
    656   friend class StackMapStream;
    657 };
    658 
    659 // Represents bit range of bit-packed integer field.
    660 // We reuse the idea from ULEB128p1 to support encoding of -1 (aka 0xFFFFFFFF).
    661 // If min_value is set to -1, we implicitly subtract one from any loaded value,
    662 // and add one to any stored value. This is generalized to any negative values.
    663 // In other words, min_value acts as a base and the stored value is added to it.
    664 struct FieldEncoding {
    665   FieldEncoding(size_t start_offset, size_t end_offset, int32_t min_value = 0)
    666       : start_offset_(start_offset), end_offset_(end_offset), min_value_(min_value) {
    667     DCHECK_LE(start_offset_, end_offset_);
    668     DCHECK_LE(BitSize(), 32u);
    669   }
    670 
    671   ALWAYS_INLINE size_t BitSize() const { return end_offset_ - start_offset_; }
    672 
    673   template <typename Region>
    674   ALWAYS_INLINE int32_t Load(const Region& region) const {
    675     DCHECK_LE(end_offset_, region.size_in_bits());
    676     return static_cast<int32_t>(region.LoadBits(start_offset_, BitSize())) + min_value_;
    677   }
    678 
    679   template <typename Region>
    680   ALWAYS_INLINE void Store(Region region, int32_t value) const {
    681     region.StoreBits(start_offset_, value - min_value_, BitSize());
    682     DCHECK_EQ(Load(region), value);
    683   }
    684 
    685  private:
    686   size_t start_offset_;
    687   size_t end_offset_;
    688   int32_t min_value_;
    689 };
    690 
    691 class StackMapEncoding {
    692  public:
    693   StackMapEncoding()
    694       : dex_pc_bit_offset_(0),
    695         dex_register_map_bit_offset_(0),
    696         inline_info_bit_offset_(0),
    697         register_mask_index_bit_offset_(0),
    698         stack_mask_index_bit_offset_(0),
    699         total_bit_size_(0) {}
    700 
    701   // Set stack map bit layout based on given sizes.
    702   // Returns the size of stack map in bits.
    703   size_t SetFromSizes(size_t native_pc_max,
    704                       size_t dex_pc_max,
    705                       size_t dex_register_map_size,
    706                       size_t number_of_inline_info,
    707                       size_t number_of_register_masks,
    708                       size_t number_of_stack_masks) {
    709     total_bit_size_ = 0;
    710     DCHECK_EQ(kNativePcBitOffset, total_bit_size_);
    711     total_bit_size_ += MinimumBitsToStore(native_pc_max);
    712 
    713     dex_pc_bit_offset_ = total_bit_size_;
    714     // Note: We're not encoding the dex pc if there is none. That's the case
    715     // for an intrinsified native method, such as String.charAt().
    716     if (dex_pc_max != dex::kDexNoIndex) {
    717       total_bit_size_ += MinimumBitsToStore(1 /* kNoDexPc */ + dex_pc_max);
    718     }
    719 
    720     // We also need +1 for kNoDexRegisterMap, but since the size is strictly
    721     // greater than any offset we might try to encode, we already implicitly have it.
    722     dex_register_map_bit_offset_ = total_bit_size_;
    723     total_bit_size_ += MinimumBitsToStore(dex_register_map_size);
    724 
    725     // We also need +1 for kNoInlineInfo, but since the inline_info_size is strictly
    726     // greater than the offset we might try to encode, we already implicitly have it.
    727     // If inline_info_size is zero, we can encode only kNoInlineInfo (in zero bits).
    728     inline_info_bit_offset_ = total_bit_size_;
    729     total_bit_size_ += MinimumBitsToStore(number_of_inline_info);
    730 
    731     register_mask_index_bit_offset_ = total_bit_size_;
    732     total_bit_size_ += MinimumBitsToStore(number_of_register_masks);
    733 
    734     stack_mask_index_bit_offset_ = total_bit_size_;
    735     total_bit_size_ += MinimumBitsToStore(number_of_stack_masks);
    736 
    737     return total_bit_size_;
    738   }
    739 
    740   ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const {
    741     return FieldEncoding(kNativePcBitOffset, dex_pc_bit_offset_);
    742   }
    743   ALWAYS_INLINE FieldEncoding GetDexPcEncoding() const {
    744     return FieldEncoding(dex_pc_bit_offset_, dex_register_map_bit_offset_, -1 /* min_value */);
    745   }
    746   ALWAYS_INLINE FieldEncoding GetDexRegisterMapEncoding() const {
    747     return FieldEncoding(dex_register_map_bit_offset_, inline_info_bit_offset_, -1 /* min_value */);
    748   }
    749   ALWAYS_INLINE FieldEncoding GetInlineInfoEncoding() const {
    750     return FieldEncoding(inline_info_bit_offset_,
    751                          register_mask_index_bit_offset_,
    752                          -1 /* min_value */);
    753   }
    754   ALWAYS_INLINE FieldEncoding GetRegisterMaskIndexEncoding() const {
    755     return FieldEncoding(register_mask_index_bit_offset_, stack_mask_index_bit_offset_);
    756   }
    757   ALWAYS_INLINE FieldEncoding GetStackMaskIndexEncoding() const {
    758     return FieldEncoding(stack_mask_index_bit_offset_, total_bit_size_);
    759   }
    760   ALWAYS_INLINE size_t BitSize() const {
    761     return total_bit_size_;
    762   }
    763 
    764   // Encode the encoding into the vector.
    765   template<typename Vector>
    766   void Encode(Vector* dest) const {
    767     static_assert(alignof(StackMapEncoding) == 1, "Should not require alignment");
    768     const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
    769     dest->insert(dest->end(), ptr, ptr + sizeof(*this));
    770   }
    771 
    772   // Decode the encoding from a pointer, updates the pointer.
    773   void Decode(const uint8_t** ptr) {
    774     *this = *reinterpret_cast<const StackMapEncoding*>(*ptr);
    775     *ptr += sizeof(*this);
    776   }
    777 
    778   void Dump(VariableIndentationOutputStream* vios) const;
    779 
    780  private:
    781   static constexpr size_t kNativePcBitOffset = 0;
    782   uint8_t dex_pc_bit_offset_;
    783   uint8_t dex_register_map_bit_offset_;
    784   uint8_t inline_info_bit_offset_;
    785   uint8_t register_mask_index_bit_offset_;
    786   uint8_t stack_mask_index_bit_offset_;
    787   uint8_t total_bit_size_;
    788 };
    789 
    790 /**
    791  * A Stack Map holds compilation information for a specific PC necessary for:
    792  * - Mapping it to a dex PC,
    793  * - Knowing which stack entries are objects,
    794  * - Knowing which registers hold objects,
    795  * - Knowing the inlining information,
    796  * - Knowing the values of dex registers.
    797  *
    798  * The information is of the form:
    799  *
    800  *   [native_pc_offset, dex_pc, dex_register_map_offset, inlining_info_index, register_mask_index,
    801  *   stack_mask_index].
    802  */
    803 class StackMap {
    804  public:
    805   StackMap() {}
    806   explicit StackMap(BitMemoryRegion region) : region_(region) {}
    807 
    808   ALWAYS_INLINE bool IsValid() const { return region_.pointer() != nullptr; }
    809 
    810   ALWAYS_INLINE uint32_t GetDexPc(const StackMapEncoding& encoding) const {
    811     return encoding.GetDexPcEncoding().Load(region_);
    812   }
    813 
    814   ALWAYS_INLINE void SetDexPc(const StackMapEncoding& encoding, uint32_t dex_pc) {
    815     encoding.GetDexPcEncoding().Store(region_, dex_pc);
    816   }
    817 
    818   ALWAYS_INLINE uint32_t GetNativePcOffset(const StackMapEncoding& encoding,
    819                                            InstructionSet instruction_set) const {
    820     CodeOffset offset(
    821         CodeOffset::FromCompressedOffset(encoding.GetNativePcEncoding().Load(region_)));
    822     return offset.Uint32Value(instruction_set);
    823   }
    824 
    825   ALWAYS_INLINE void SetNativePcCodeOffset(const StackMapEncoding& encoding,
    826                                            CodeOffset native_pc_offset) {
    827     encoding.GetNativePcEncoding().Store(region_, native_pc_offset.CompressedValue());
    828   }
    829 
    830   ALWAYS_INLINE uint32_t GetDexRegisterMapOffset(const StackMapEncoding& encoding) const {
    831     return encoding.GetDexRegisterMapEncoding().Load(region_);
    832   }
    833 
    834   ALWAYS_INLINE void SetDexRegisterMapOffset(const StackMapEncoding& encoding, uint32_t offset) {
    835     encoding.GetDexRegisterMapEncoding().Store(region_, offset);
    836   }
    837 
    838   ALWAYS_INLINE uint32_t GetInlineInfoIndex(const StackMapEncoding& encoding) const {
    839     return encoding.GetInlineInfoEncoding().Load(region_);
    840   }
    841 
    842   ALWAYS_INLINE void SetInlineInfoIndex(const StackMapEncoding& encoding, uint32_t index) {
    843     encoding.GetInlineInfoEncoding().Store(region_, index);
    844   }
    845 
    846   ALWAYS_INLINE uint32_t GetRegisterMaskIndex(const StackMapEncoding& encoding) const {
    847     return encoding.GetRegisterMaskIndexEncoding().Load(region_);
    848   }
    849 
    850   ALWAYS_INLINE void SetRegisterMaskIndex(const StackMapEncoding& encoding, uint32_t mask) {
    851     encoding.GetRegisterMaskIndexEncoding().Store(region_, mask);
    852   }
    853 
    854   ALWAYS_INLINE uint32_t GetStackMaskIndex(const StackMapEncoding& encoding) const {
    855     return encoding.GetStackMaskIndexEncoding().Load(region_);
    856   }
    857 
    858   ALWAYS_INLINE void SetStackMaskIndex(const StackMapEncoding& encoding, uint32_t mask) {
    859     encoding.GetStackMaskIndexEncoding().Store(region_, mask);
    860   }
    861 
    862   ALWAYS_INLINE bool HasDexRegisterMap(const StackMapEncoding& encoding) const {
    863     return GetDexRegisterMapOffset(encoding) != kNoDexRegisterMap;
    864   }
    865 
    866   ALWAYS_INLINE bool HasInlineInfo(const StackMapEncoding& encoding) const {
    867     return GetInlineInfoIndex(encoding) != kNoInlineInfo;
    868   }
    869 
    870   ALWAYS_INLINE bool Equals(const StackMap& other) const {
    871     return region_.pointer() == other.region_.pointer() &&
    872            region_.size() == other.region_.size() &&
    873            region_.BitOffset() == other.region_.BitOffset();
    874   }
    875 
    876   void Dump(VariableIndentationOutputStream* vios,
    877             const CodeInfo& code_info,
    878             const CodeInfoEncoding& encoding,
    879             const MethodInfo& method_info,
    880             uint32_t code_offset,
    881             uint16_t number_of_dex_registers,
    882             InstructionSet instruction_set,
    883             const std::string& header_suffix = "") const;
    884 
    885   // Special (invalid) offset for the DexRegisterMapOffset field meaning
    886   // that there is no Dex register map for this stack map.
    887   static constexpr uint32_t kNoDexRegisterMap = -1;
    888 
    889   // Special (invalid) offset for the InlineDescriptorOffset field meaning
    890   // that there is no inline info for this stack map.
    891   static constexpr uint32_t kNoInlineInfo = -1;
    892 
    893  private:
    894   static constexpr int kFixedSize = 0;
    895 
    896   BitMemoryRegion region_;
    897 
    898   friend class StackMapStream;
    899 };
    900 
    901 class InlineInfoEncoding {
    902  public:
    903   void SetFromSizes(size_t method_index_idx_max,
    904                     size_t dex_pc_max,
    905                     size_t extra_data_max,
    906                     size_t dex_register_map_size) {
    907     total_bit_size_ = kMethodIndexBitOffset;
    908     total_bit_size_ += MinimumBitsToStore(method_index_idx_max);
    909 
    910     dex_pc_bit_offset_ = dchecked_integral_cast<uint8_t>(total_bit_size_);
    911     // Note: We're not encoding the dex pc if there is none. That's the case
    912     // for an intrinsified native method, such as String.charAt().
    913     if (dex_pc_max != dex::kDexNoIndex) {
    914       total_bit_size_ += MinimumBitsToStore(1 /* kNoDexPc */ + dex_pc_max);
    915     }
    916 
    917     extra_data_bit_offset_ = dchecked_integral_cast<uint8_t>(total_bit_size_);
    918     total_bit_size_ += MinimumBitsToStore(extra_data_max);
    919 
    920     // We also need +1 for kNoDexRegisterMap, but since the size is strictly
    921     // greater than any offset we might try to encode, we already implicitly have it.
    922     dex_register_map_bit_offset_ = dchecked_integral_cast<uint8_t>(total_bit_size_);
    923     total_bit_size_ += MinimumBitsToStore(dex_register_map_size);
    924   }
    925 
    926   ALWAYS_INLINE FieldEncoding GetMethodIndexIdxEncoding() const {
    927     return FieldEncoding(kMethodIndexBitOffset, dex_pc_bit_offset_);
    928   }
    929   ALWAYS_INLINE FieldEncoding GetDexPcEncoding() const {
    930     return FieldEncoding(dex_pc_bit_offset_, extra_data_bit_offset_, -1 /* min_value */);
    931   }
    932   ALWAYS_INLINE FieldEncoding GetExtraDataEncoding() const {
    933     return FieldEncoding(extra_data_bit_offset_, dex_register_map_bit_offset_);
    934   }
    935   ALWAYS_INLINE FieldEncoding GetDexRegisterMapEncoding() const {
    936     return FieldEncoding(dex_register_map_bit_offset_, total_bit_size_, -1 /* min_value */);
    937   }
    938   ALWAYS_INLINE size_t BitSize() const {
    939     return total_bit_size_;
    940   }
    941 
    942   void Dump(VariableIndentationOutputStream* vios) const;
    943 
    944   // Encode the encoding into the vector.
    945   template<typename Vector>
    946   void Encode(Vector* dest) const {
    947     static_assert(alignof(InlineInfoEncoding) == 1, "Should not require alignment");
    948     const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
    949     dest->insert(dest->end(), ptr, ptr + sizeof(*this));
    950   }
    951 
    952   // Decode the encoding from a pointer, updates the pointer.
    953   void Decode(const uint8_t** ptr) {
    954     *this = *reinterpret_cast<const InlineInfoEncoding*>(*ptr);
    955     *ptr += sizeof(*this);
    956   }
    957 
    958  private:
    959   static constexpr uint8_t kIsLastBitOffset = 0;
    960   static constexpr uint8_t kMethodIndexBitOffset = 1;
    961   uint8_t dex_pc_bit_offset_;
    962   uint8_t extra_data_bit_offset_;
    963   uint8_t dex_register_map_bit_offset_;
    964   uint8_t total_bit_size_;
    965 };
    966 
    967 /**
    968  * Inline information for a specific PC. The information is of the form:
    969  *
    970  *   [is_last,
    971  *    method_index (or ArtMethod high bits),
    972  *    dex_pc,
    973  *    extra_data (ArtMethod low bits or 1),
    974  *    dex_register_map_offset]+.
    975  */
    976 class InlineInfo {
    977  public:
    978   explicit InlineInfo(BitMemoryRegion region) : region_(region) {}
    979 
    980   ALWAYS_INLINE uint32_t GetDepth(const InlineInfoEncoding& encoding) const {
    981     size_t depth = 0;
    982     while (!GetRegionAtDepth(encoding, depth++).LoadBit(0)) { }  // Check is_last bit.
    983     return depth;
    984   }
    985 
    986   ALWAYS_INLINE void SetDepth(const InlineInfoEncoding& encoding, uint32_t depth) {
    987     DCHECK_GT(depth, 0u);
    988     for (size_t d = 0; d < depth; ++d) {
    989       GetRegionAtDepth(encoding, d).StoreBit(0, d == depth - 1);  // Set is_last bit.
    990     }
    991   }
    992 
    993   ALWAYS_INLINE uint32_t GetMethodIndexIdxAtDepth(const InlineInfoEncoding& encoding,
    994                                                   uint32_t depth) const {
    995     DCHECK(!EncodesArtMethodAtDepth(encoding, depth));
    996     return encoding.GetMethodIndexIdxEncoding().Load(GetRegionAtDepth(encoding, depth));
    997   }
    998 
    999   ALWAYS_INLINE void SetMethodIndexIdxAtDepth(const InlineInfoEncoding& encoding,
   1000                                               uint32_t depth,
   1001                                               uint32_t index) {
   1002     encoding.GetMethodIndexIdxEncoding().Store(GetRegionAtDepth(encoding, depth), index);
   1003   }
   1004 
   1005 
   1006   ALWAYS_INLINE uint32_t GetMethodIndexAtDepth(const InlineInfoEncoding& encoding,
   1007                                                const MethodInfo& method_info,
   1008                                                uint32_t depth) const {
   1009     return method_info.GetMethodIndex(GetMethodIndexIdxAtDepth(encoding, depth));
   1010   }
   1011 
   1012   ALWAYS_INLINE uint32_t GetDexPcAtDepth(const InlineInfoEncoding& encoding,
   1013                                          uint32_t depth) const {
   1014     return encoding.GetDexPcEncoding().Load(GetRegionAtDepth(encoding, depth));
   1015   }
   1016 
   1017   ALWAYS_INLINE void SetDexPcAtDepth(const InlineInfoEncoding& encoding,
   1018                                      uint32_t depth,
   1019                                      uint32_t dex_pc) {
   1020     encoding.GetDexPcEncoding().Store(GetRegionAtDepth(encoding, depth), dex_pc);
   1021   }
   1022 
   1023   ALWAYS_INLINE bool EncodesArtMethodAtDepth(const InlineInfoEncoding& encoding,
   1024                                              uint32_t depth) const {
   1025     return (encoding.GetExtraDataEncoding().Load(GetRegionAtDepth(encoding, depth)) & 1) == 0;
   1026   }
   1027 
   1028   ALWAYS_INLINE void SetExtraDataAtDepth(const InlineInfoEncoding& encoding,
   1029                                          uint32_t depth,
   1030                                          uint32_t extra_data) {
   1031     encoding.GetExtraDataEncoding().Store(GetRegionAtDepth(encoding, depth), extra_data);
   1032   }
   1033 
   1034   ALWAYS_INLINE ArtMethod* GetArtMethodAtDepth(const InlineInfoEncoding& encoding,
   1035                                                uint32_t depth) const {
   1036     uint32_t low_bits = encoding.GetExtraDataEncoding().Load(GetRegionAtDepth(encoding, depth));
   1037     uint32_t high_bits = encoding.GetMethodIndexIdxEncoding().Load(
   1038         GetRegionAtDepth(encoding, depth));
   1039     if (high_bits == 0) {
   1040       return reinterpret_cast<ArtMethod*>(low_bits);
   1041     } else {
   1042       uint64_t address = high_bits;
   1043       address = address << 32;
   1044       return reinterpret_cast<ArtMethod*>(address | low_bits);
   1045     }
   1046   }
   1047 
   1048   ALWAYS_INLINE uint32_t GetDexRegisterMapOffsetAtDepth(const InlineInfoEncoding& encoding,
   1049                                                         uint32_t depth) const {
   1050     return encoding.GetDexRegisterMapEncoding().Load(GetRegionAtDepth(encoding, depth));
   1051   }
   1052 
   1053   ALWAYS_INLINE void SetDexRegisterMapOffsetAtDepth(const InlineInfoEncoding& encoding,
   1054                                                     uint32_t depth,
   1055                                                     uint32_t offset) {
   1056     encoding.GetDexRegisterMapEncoding().Store(GetRegionAtDepth(encoding, depth), offset);
   1057   }
   1058 
   1059   ALWAYS_INLINE bool HasDexRegisterMapAtDepth(const InlineInfoEncoding& encoding,
   1060                                               uint32_t depth) const {
   1061     return GetDexRegisterMapOffsetAtDepth(encoding, depth) != StackMap::kNoDexRegisterMap;
   1062   }
   1063 
   1064   void Dump(VariableIndentationOutputStream* vios,
   1065             const CodeInfo& info,
   1066             const MethodInfo& method_info,
   1067             uint16_t* number_of_dex_registers) const;
   1068 
   1069  private:
   1070   ALWAYS_INLINE BitMemoryRegion GetRegionAtDepth(const InlineInfoEncoding& encoding,
   1071                                                  uint32_t depth) const {
   1072     size_t entry_size = encoding.BitSize();
   1073     DCHECK_GT(entry_size, 0u);
   1074     return region_.Subregion(depth * entry_size, entry_size);
   1075   }
   1076 
   1077   BitMemoryRegion region_;
   1078 };
   1079 
   1080 // Bit sized region encoding, may be more than 255 bits.
   1081 class BitRegionEncoding {
   1082  public:
   1083   uint32_t num_bits = 0;
   1084 
   1085   ALWAYS_INLINE size_t BitSize() const {
   1086     return num_bits;
   1087   }
   1088 
   1089   template<typename Vector>
   1090   void Encode(Vector* dest) const {
   1091     EncodeUnsignedLeb128(dest, num_bits);  // Use leb in case num_bits is greater than 255.
   1092   }
   1093 
   1094   void Decode(const uint8_t** ptr) {
   1095     num_bits = DecodeUnsignedLeb128(ptr);
   1096   }
   1097 };
   1098 
   1099 // A table of bit sized encodings.
   1100 template <typename Encoding>
   1101 struct BitEncodingTable {
   1102   static constexpr size_t kInvalidOffset = static_cast<size_t>(-1);
   1103   // How the encoding is laid out (serialized).
   1104   Encoding encoding;
   1105 
   1106   // Number of entries in the table (serialized).
   1107   size_t num_entries;
   1108 
   1109   // Bit offset for the base of the table (computed).
   1110   size_t bit_offset = kInvalidOffset;
   1111 
   1112   template<typename Vector>
   1113   void Encode(Vector* dest) const {
   1114     EncodeUnsignedLeb128(dest, num_entries);
   1115     encoding.Encode(dest);
   1116   }
   1117 
   1118   ALWAYS_INLINE void Decode(const uint8_t** ptr) {
   1119     num_entries = DecodeUnsignedLeb128(ptr);
   1120     encoding.Decode(ptr);
   1121   }
   1122 
   1123   // Set the bit offset in the table and adds the space used by the table to offset.
   1124   void UpdateBitOffset(size_t* offset) {
   1125     DCHECK(offset != nullptr);
   1126     bit_offset = *offset;
   1127     *offset += encoding.BitSize() * num_entries;
   1128   }
   1129 
   1130   // Return the bit region for the map at index i.
   1131   ALWAYS_INLINE BitMemoryRegion BitRegion(MemoryRegion region, size_t index) const {
   1132     DCHECK_NE(bit_offset, kInvalidOffset) << "Invalid table offset";
   1133     DCHECK_LT(index, num_entries);
   1134     const size_t map_size = encoding.BitSize();
   1135     return BitMemoryRegion(region, bit_offset + index * map_size, map_size);
   1136   }
   1137 };
   1138 
   1139 // A byte sized table of possible variable sized encodings.
   1140 struct ByteSizedTable {
   1141   static constexpr size_t kInvalidOffset = static_cast<size_t>(-1);
   1142 
   1143   // Number of entries in the table (serialized).
   1144   size_t num_entries = 0;
   1145 
   1146   // Number of bytes of the table (serialized).
   1147   size_t num_bytes;
   1148 
   1149   // Bit offset for the base of the table (computed).
   1150   size_t byte_offset = kInvalidOffset;
   1151 
   1152   template<typename Vector>
   1153   void Encode(Vector* dest) const {
   1154     EncodeUnsignedLeb128(dest, num_entries);
   1155     EncodeUnsignedLeb128(dest, num_bytes);
   1156   }
   1157 
   1158   ALWAYS_INLINE void Decode(const uint8_t** ptr) {
   1159     num_entries = DecodeUnsignedLeb128(ptr);
   1160     num_bytes = DecodeUnsignedLeb128(ptr);
   1161   }
   1162 
   1163   // Set the bit offset of the table. Adds the total bit size of the table to offset.
   1164   void UpdateBitOffset(size_t* offset) {
   1165     DCHECK(offset != nullptr);
   1166     DCHECK_ALIGNED(*offset, kBitsPerByte);
   1167     byte_offset = *offset / kBitsPerByte;
   1168     *offset += num_bytes * kBitsPerByte;
   1169   }
   1170 };
   1171 
   1172 // Format is [native pc, invoke type, method index].
   1173 class InvokeInfoEncoding {
   1174  public:
   1175   void SetFromSizes(size_t native_pc_max,
   1176                     size_t invoke_type_max,
   1177                     size_t method_index_max) {
   1178     total_bit_size_ = 0;
   1179     DCHECK_EQ(kNativePcBitOffset, total_bit_size_);
   1180     total_bit_size_ += MinimumBitsToStore(native_pc_max);
   1181     invoke_type_bit_offset_ = total_bit_size_;
   1182     total_bit_size_ += MinimumBitsToStore(invoke_type_max);
   1183     method_index_bit_offset_ = total_bit_size_;
   1184     total_bit_size_ += MinimumBitsToStore(method_index_max);
   1185   }
   1186 
   1187   ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const {
   1188     return FieldEncoding(kNativePcBitOffset, invoke_type_bit_offset_);
   1189   }
   1190 
   1191   ALWAYS_INLINE FieldEncoding GetInvokeTypeEncoding() const {
   1192     return FieldEncoding(invoke_type_bit_offset_, method_index_bit_offset_);
   1193   }
   1194 
   1195   ALWAYS_INLINE FieldEncoding GetMethodIndexEncoding() const {
   1196     return FieldEncoding(method_index_bit_offset_, total_bit_size_);
   1197   }
   1198 
   1199   ALWAYS_INLINE size_t BitSize() const {
   1200     return total_bit_size_;
   1201   }
   1202 
   1203   template<typename Vector>
   1204   void Encode(Vector* dest) const {
   1205     static_assert(alignof(InvokeInfoEncoding) == 1, "Should not require alignment");
   1206     const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
   1207     dest->insert(dest->end(), ptr, ptr + sizeof(*this));
   1208   }
   1209 
   1210   void Decode(const uint8_t** ptr) {
   1211     *this = *reinterpret_cast<const InvokeInfoEncoding*>(*ptr);
   1212     *ptr += sizeof(*this);
   1213   }
   1214 
   1215  private:
   1216   static constexpr uint8_t kNativePcBitOffset = 0;
   1217   uint8_t invoke_type_bit_offset_;
   1218   uint8_t method_index_bit_offset_;
   1219   uint8_t total_bit_size_;
   1220 };
   1221 
   1222 class InvokeInfo {
   1223  public:
   1224   explicit InvokeInfo(BitMemoryRegion region) : region_(region) {}
   1225 
   1226   ALWAYS_INLINE uint32_t GetNativePcOffset(const InvokeInfoEncoding& encoding,
   1227                                            InstructionSet instruction_set) const {
   1228     CodeOffset offset(
   1229         CodeOffset::FromCompressedOffset(encoding.GetNativePcEncoding().Load(region_)));
   1230     return offset.Uint32Value(instruction_set);
   1231   }
   1232 
   1233   ALWAYS_INLINE void SetNativePcCodeOffset(const InvokeInfoEncoding& encoding,
   1234                                            CodeOffset native_pc_offset) {
   1235     encoding.GetNativePcEncoding().Store(region_, native_pc_offset.CompressedValue());
   1236   }
   1237 
   1238   ALWAYS_INLINE uint32_t GetInvokeType(const InvokeInfoEncoding& encoding) const {
   1239     return encoding.GetInvokeTypeEncoding().Load(region_);
   1240   }
   1241 
   1242   ALWAYS_INLINE void SetInvokeType(const InvokeInfoEncoding& encoding, uint32_t invoke_type) {
   1243     encoding.GetInvokeTypeEncoding().Store(region_, invoke_type);
   1244   }
   1245 
   1246   ALWAYS_INLINE uint32_t GetMethodIndexIdx(const InvokeInfoEncoding& encoding) const {
   1247     return encoding.GetMethodIndexEncoding().Load(region_);
   1248   }
   1249 
   1250   ALWAYS_INLINE void SetMethodIndexIdx(const InvokeInfoEncoding& encoding,
   1251                                        uint32_t method_index_idx) {
   1252     encoding.GetMethodIndexEncoding().Store(region_, method_index_idx);
   1253   }
   1254 
   1255   ALWAYS_INLINE uint32_t GetMethodIndex(const InvokeInfoEncoding& encoding,
   1256                                         MethodInfo method_info) const {
   1257     return method_info.GetMethodIndex(GetMethodIndexIdx(encoding));
   1258   }
   1259 
   1260   bool IsValid() const { return region_.pointer() != nullptr; }
   1261 
   1262  private:
   1263   BitMemoryRegion region_;
   1264 };
   1265 
   1266 // Most of the fields are encoded as ULEB128 to save space.
   1267 struct CodeInfoEncoding {
   1268   using SizeType = uint32_t;
   1269 
   1270   static constexpr SizeType kInvalidSize = std::numeric_limits<SizeType>::max();
   1271 
   1272   // Byte sized tables go first to avoid unnecessary alignment bits.
   1273   ByteSizedTable dex_register_map;
   1274   ByteSizedTable location_catalog;
   1275   BitEncodingTable<StackMapEncoding> stack_map;
   1276   BitEncodingTable<BitRegionEncoding> register_mask;
   1277   BitEncodingTable<BitRegionEncoding> stack_mask;
   1278   BitEncodingTable<InvokeInfoEncoding> invoke_info;
   1279   BitEncodingTable<InlineInfoEncoding> inline_info;
   1280 
   1281   CodeInfoEncoding() {}
   1282 
   1283   explicit CodeInfoEncoding(const void* data) {
   1284     const uint8_t* ptr = reinterpret_cast<const uint8_t*>(data);
   1285     dex_register_map.Decode(&ptr);
   1286     location_catalog.Decode(&ptr);
   1287     stack_map.Decode(&ptr);
   1288     register_mask.Decode(&ptr);
   1289     stack_mask.Decode(&ptr);
   1290     invoke_info.Decode(&ptr);
   1291     if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
   1292       inline_info.Decode(&ptr);
   1293     } else {
   1294       inline_info = BitEncodingTable<InlineInfoEncoding>();
   1295     }
   1296     cache_header_size =
   1297         dchecked_integral_cast<SizeType>(ptr - reinterpret_cast<const uint8_t*>(data));
   1298     ComputeTableOffsets();
   1299   }
   1300 
   1301   // Compress is not const since it calculates cache_header_size. This is used by PrepareForFillIn.
   1302   template<typename Vector>
   1303   void Compress(Vector* dest) {
   1304     dex_register_map.Encode(dest);
   1305     location_catalog.Encode(dest);
   1306     stack_map.Encode(dest);
   1307     register_mask.Encode(dest);
   1308     stack_mask.Encode(dest);
   1309     invoke_info.Encode(dest);
   1310     if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
   1311       inline_info.Encode(dest);
   1312     }
   1313     cache_header_size = dest->size();
   1314   }
   1315 
   1316   ALWAYS_INLINE void ComputeTableOffsets() {
   1317     // Skip the header.
   1318     size_t bit_offset = HeaderSize() * kBitsPerByte;
   1319     // The byte tables must be aligned so they must go first.
   1320     dex_register_map.UpdateBitOffset(&bit_offset);
   1321     location_catalog.UpdateBitOffset(&bit_offset);
   1322     // Other tables don't require alignment.
   1323     stack_map.UpdateBitOffset(&bit_offset);
   1324     register_mask.UpdateBitOffset(&bit_offset);
   1325     stack_mask.UpdateBitOffset(&bit_offset);
   1326     invoke_info.UpdateBitOffset(&bit_offset);
   1327     inline_info.UpdateBitOffset(&bit_offset);
   1328     cache_non_header_size = RoundUp(bit_offset, kBitsPerByte) / kBitsPerByte - HeaderSize();
   1329   }
   1330 
   1331   ALWAYS_INLINE size_t HeaderSize() const {
   1332     DCHECK_NE(cache_header_size, kInvalidSize) << "Uninitialized";
   1333     return cache_header_size;
   1334   }
   1335 
   1336   ALWAYS_INLINE size_t NonHeaderSize() const {
   1337     DCHECK_NE(cache_non_header_size, kInvalidSize) << "Uninitialized";
   1338     return cache_non_header_size;
   1339   }
   1340 
   1341  private:
   1342   // Computed fields (not serialized).
   1343   // Header size in bytes, cached to avoid needing to re-decoding the encoding in HeaderSize.
   1344   SizeType cache_header_size = kInvalidSize;
   1345   // Non header size in bytes, cached to avoid needing to re-decoding the encoding in NonHeaderSize.
   1346   SizeType cache_non_header_size = kInvalidSize;
   1347 };
   1348 
   1349 /**
   1350  * Wrapper around all compiler information collected for a method.
   1351  * The information is of the form:
   1352  *
   1353  *   [CodeInfoEncoding, DexRegisterMap+, DexLocationCatalog+, StackMap+, RegisterMask+, StackMask+,
   1354  *    InlineInfo*]
   1355  *
   1356  * where CodeInfoEncoding is of the form:
   1357  *
   1358  *   [ByteSizedTable(dex_register_map), ByteSizedTable(location_catalog),
   1359  *    BitEncodingTable<StackMapEncoding>, BitEncodingTable<BitRegionEncoding>,
   1360  *    BitEncodingTable<BitRegionEncoding>, BitEncodingTable<InlineInfoEncoding>]
   1361  */
   1362 class CodeInfo {
   1363  public:
   1364   explicit CodeInfo(MemoryRegion region) : region_(region) {
   1365   }
   1366 
   1367   explicit CodeInfo(const void* data) {
   1368     CodeInfoEncoding encoding = CodeInfoEncoding(data);
   1369     region_ = MemoryRegion(const_cast<void*>(data),
   1370                            encoding.HeaderSize() + encoding.NonHeaderSize());
   1371   }
   1372 
   1373   CodeInfoEncoding ExtractEncoding() const {
   1374     CodeInfoEncoding encoding(region_.begin());
   1375     AssertValidStackMap(encoding);
   1376     return encoding;
   1377   }
   1378 
   1379   bool HasInlineInfo(const CodeInfoEncoding& encoding) const {
   1380     return encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0;
   1381   }
   1382 
   1383   DexRegisterLocationCatalog GetDexRegisterLocationCatalog(const CodeInfoEncoding& encoding) const {
   1384     return DexRegisterLocationCatalog(region_.Subregion(encoding.location_catalog.byte_offset,
   1385                                                         encoding.location_catalog.num_bytes));
   1386   }
   1387 
   1388   ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const CodeInfoEncoding& encoding) const {
   1389     return encoding.stack_mask.encoding.BitSize();
   1390   }
   1391 
   1392   ALWAYS_INLINE StackMap GetStackMapAt(size_t index, const CodeInfoEncoding& encoding) const {
   1393     return StackMap(encoding.stack_map.BitRegion(region_, index));
   1394   }
   1395 
   1396   BitMemoryRegion GetStackMask(size_t index, const CodeInfoEncoding& encoding) const {
   1397     return encoding.stack_mask.BitRegion(region_, index);
   1398   }
   1399 
   1400   BitMemoryRegion GetStackMaskOf(const CodeInfoEncoding& encoding,
   1401                                  const StackMap& stack_map) const {
   1402     return GetStackMask(stack_map.GetStackMaskIndex(encoding.stack_map.encoding), encoding);
   1403   }
   1404 
   1405   BitMemoryRegion GetRegisterMask(size_t index, const CodeInfoEncoding& encoding) const {
   1406     return encoding.register_mask.BitRegion(region_, index);
   1407   }
   1408 
   1409   uint32_t GetRegisterMaskOf(const CodeInfoEncoding& encoding, const StackMap& stack_map) const {
   1410     size_t index = stack_map.GetRegisterMaskIndex(encoding.stack_map.encoding);
   1411     return GetRegisterMask(index, encoding).LoadBits(0u, encoding.register_mask.encoding.BitSize());
   1412   }
   1413 
   1414   uint32_t GetNumberOfLocationCatalogEntries(const CodeInfoEncoding& encoding) const {
   1415     return encoding.location_catalog.num_entries;
   1416   }
   1417 
   1418   uint32_t GetDexRegisterLocationCatalogSize(const CodeInfoEncoding& encoding) const {
   1419     return encoding.location_catalog.num_bytes;
   1420   }
   1421 
   1422   uint32_t GetNumberOfStackMaps(const CodeInfoEncoding& encoding) const {
   1423     return encoding.stack_map.num_entries;
   1424   }
   1425 
   1426   // Get the size of all the stack maps of this CodeInfo object, in bits. Not byte aligned.
   1427   ALWAYS_INLINE size_t GetStackMapsSizeInBits(const CodeInfoEncoding& encoding) const {
   1428     return encoding.stack_map.encoding.BitSize() * GetNumberOfStackMaps(encoding);
   1429   }
   1430 
   1431   InvokeInfo GetInvokeInfo(const CodeInfoEncoding& encoding, size_t index) const {
   1432     return InvokeInfo(encoding.invoke_info.BitRegion(region_, index));
   1433   }
   1434 
   1435   DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
   1436                                      const CodeInfoEncoding& encoding,
   1437                                      size_t number_of_dex_registers) const {
   1438     if (!stack_map.HasDexRegisterMap(encoding.stack_map.encoding)) {
   1439       return DexRegisterMap();
   1440     }
   1441     const uint32_t offset = encoding.dex_register_map.byte_offset +
   1442         stack_map.GetDexRegisterMapOffset(encoding.stack_map.encoding);
   1443     size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
   1444     return DexRegisterMap(region_.Subregion(offset, size));
   1445   }
   1446 
   1447   size_t GetDexRegisterMapsSize(const CodeInfoEncoding& encoding,
   1448                                 uint32_t number_of_dex_registers) const {
   1449     size_t total = 0;
   1450     for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
   1451       StackMap stack_map = GetStackMapAt(i, encoding);
   1452       DexRegisterMap map(GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers));
   1453       total += map.Size();
   1454     }
   1455     return total;
   1456   }
   1457 
   1458   // Return the `DexRegisterMap` pointed by `inline_info` at depth `depth`.
   1459   DexRegisterMap GetDexRegisterMapAtDepth(uint8_t depth,
   1460                                           InlineInfo inline_info,
   1461                                           const CodeInfoEncoding& encoding,
   1462                                           uint32_t number_of_dex_registers) const {
   1463     if (!inline_info.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, depth)) {
   1464       return DexRegisterMap();
   1465     } else {
   1466       uint32_t offset = encoding.dex_register_map.byte_offset +
   1467           inline_info.GetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding, depth);
   1468       size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
   1469       return DexRegisterMap(region_.Subregion(offset, size));
   1470     }
   1471   }
   1472 
   1473   InlineInfo GetInlineInfo(size_t index, const CodeInfoEncoding& encoding) const {
   1474     // Since we do not know the depth, we just return the whole remaining map. The caller may
   1475     // access the inline info for arbitrary depths. To return the precise inline info we would need
   1476     // to count the depth before returning.
   1477     // TODO: Clean this up.
   1478     const size_t bit_offset = encoding.inline_info.bit_offset +
   1479         index * encoding.inline_info.encoding.BitSize();
   1480     return InlineInfo(BitMemoryRegion(region_, bit_offset, region_.size_in_bits() - bit_offset));
   1481   }
   1482 
   1483   InlineInfo GetInlineInfoOf(StackMap stack_map, const CodeInfoEncoding& encoding) const {
   1484     DCHECK(stack_map.HasInlineInfo(encoding.stack_map.encoding));
   1485     uint32_t index = stack_map.GetInlineInfoIndex(encoding.stack_map.encoding);
   1486     return GetInlineInfo(index, encoding);
   1487   }
   1488 
   1489   StackMap GetStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
   1490     for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
   1491       StackMap stack_map = GetStackMapAt(i, encoding);
   1492       if (stack_map.GetDexPc(encoding.stack_map.encoding) == dex_pc) {
   1493         return stack_map;
   1494       }
   1495     }
   1496     return StackMap();
   1497   }
   1498 
   1499   // Searches the stack map list backwards because catch stack maps are stored
   1500   // at the end.
   1501   StackMap GetCatchStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
   1502     for (size_t i = GetNumberOfStackMaps(encoding); i > 0; --i) {
   1503       StackMap stack_map = GetStackMapAt(i - 1, encoding);
   1504       if (stack_map.GetDexPc(encoding.stack_map.encoding) == dex_pc) {
   1505         return stack_map;
   1506       }
   1507     }
   1508     return StackMap();
   1509   }
   1510 
   1511   StackMap GetOsrStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
   1512     size_t e = GetNumberOfStackMaps(encoding);
   1513     if (e == 0) {
   1514       // There cannot be OSR stack map if there is no stack map.
   1515       return StackMap();
   1516     }
   1517     // Walk over all stack maps. If two consecutive stack maps are identical, then we
   1518     // have found a stack map suitable for OSR.
   1519     const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
   1520     for (size_t i = 0; i < e - 1; ++i) {
   1521       StackMap stack_map = GetStackMapAt(i, encoding);
   1522       if (stack_map.GetDexPc(stack_map_encoding) == dex_pc) {
   1523         StackMap other = GetStackMapAt(i + 1, encoding);
   1524         if (other.GetDexPc(stack_map_encoding) == dex_pc &&
   1525             other.GetNativePcOffset(stack_map_encoding, kRuntimeISA) ==
   1526                 stack_map.GetNativePcOffset(stack_map_encoding, kRuntimeISA)) {
   1527           DCHECK_EQ(other.GetDexRegisterMapOffset(stack_map_encoding),
   1528                     stack_map.GetDexRegisterMapOffset(stack_map_encoding));
   1529           DCHECK(!stack_map.HasInlineInfo(stack_map_encoding));
   1530           if (i < e - 2) {
   1531             // Make sure there are not three identical stack maps following each other.
   1532             DCHECK_NE(
   1533                 stack_map.GetNativePcOffset(stack_map_encoding, kRuntimeISA),
   1534                 GetStackMapAt(i + 2, encoding).GetNativePcOffset(stack_map_encoding, kRuntimeISA));
   1535           }
   1536           return stack_map;
   1537         }
   1538       }
   1539     }
   1540     return StackMap();
   1541   }
   1542 
   1543   StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset,
   1544                                         const CodeInfoEncoding& encoding) const {
   1545     // TODO: Safepoint stack maps are sorted by native_pc_offset but catch stack
   1546     //       maps are not. If we knew that the method does not have try/catch,
   1547     //       we could do binary search.
   1548     for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
   1549       StackMap stack_map = GetStackMapAt(i, encoding);
   1550       if (stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA) ==
   1551           native_pc_offset) {
   1552         return stack_map;
   1553       }
   1554     }
   1555     return StackMap();
   1556   }
   1557 
   1558   InvokeInfo GetInvokeInfoForNativePcOffset(uint32_t native_pc_offset,
   1559                                             const CodeInfoEncoding& encoding) {
   1560     for (size_t index = 0; index < encoding.invoke_info.num_entries; index++) {
   1561       InvokeInfo item = GetInvokeInfo(encoding, index);
   1562       if (item.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA) == native_pc_offset) {
   1563         return item;
   1564       }
   1565     }
   1566     return InvokeInfo(BitMemoryRegion());
   1567   }
   1568 
   1569   // Dump this CodeInfo object on `os`.  `code_offset` is the (absolute)
   1570   // native PC of the compiled method and `number_of_dex_registers` the
   1571   // number of Dex virtual registers used in this method.  If
   1572   // `dump_stack_maps` is true, also dump the stack maps and the
   1573   // associated Dex register maps.
   1574   void Dump(VariableIndentationOutputStream* vios,
   1575             uint32_t code_offset,
   1576             uint16_t number_of_dex_registers,
   1577             bool dump_stack_maps,
   1578             InstructionSet instruction_set,
   1579             const MethodInfo& method_info) const;
   1580 
   1581   // Check that the code info has valid stack map and abort if it does not.
   1582   void AssertValidStackMap(const CodeInfoEncoding& encoding) const {
   1583     if (region_.size() != 0 && region_.size_in_bits() < GetStackMapsSizeInBits(encoding)) {
   1584       LOG(FATAL) << region_.size() << "\n"
   1585                  << encoding.HeaderSize() << "\n"
   1586                  << encoding.NonHeaderSize() << "\n"
   1587                  << encoding.location_catalog.num_entries << "\n"
   1588                  << encoding.stack_map.num_entries << "\n"
   1589                  << encoding.stack_map.encoding.BitSize();
   1590     }
   1591   }
   1592 
   1593  private:
   1594   // Compute the size of the Dex register map associated to the stack map at
   1595   // `dex_register_map_offset_in_code_info`.
   1596   size_t ComputeDexRegisterMapSizeOf(const CodeInfoEncoding& encoding,
   1597                                      uint32_t dex_register_map_offset_in_code_info,
   1598                                      uint16_t number_of_dex_registers) const {
   1599     // Offset where the actual mapping data starts within art::DexRegisterMap.
   1600     size_t location_mapping_data_offset_in_dex_register_map =
   1601         DexRegisterMap::GetLocationMappingDataOffset(number_of_dex_registers);
   1602     // Create a temporary art::DexRegisterMap to be able to call
   1603     // art::DexRegisterMap::GetNumberOfLiveDexRegisters and
   1604     DexRegisterMap dex_register_map_without_locations(
   1605         MemoryRegion(region_.Subregion(dex_register_map_offset_in_code_info,
   1606                                        location_mapping_data_offset_in_dex_register_map)));
   1607     size_t number_of_live_dex_registers =
   1608         dex_register_map_without_locations.GetNumberOfLiveDexRegisters(number_of_dex_registers);
   1609     size_t location_mapping_data_size_in_bits =
   1610         DexRegisterMap::SingleEntrySizeInBits(GetNumberOfLocationCatalogEntries(encoding))
   1611         * number_of_live_dex_registers;
   1612     size_t location_mapping_data_size_in_bytes =
   1613         RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
   1614     size_t dex_register_map_size =
   1615         location_mapping_data_offset_in_dex_register_map + location_mapping_data_size_in_bytes;
   1616     return dex_register_map_size;
   1617   }
   1618 
   1619   // Compute the size of a Dex register location catalog starting at offset `origin`
   1620   // in `region_` and containing `number_of_dex_locations` entries.
   1621   size_t ComputeDexRegisterLocationCatalogSize(uint32_t origin,
   1622                                                uint32_t number_of_dex_locations) const {
   1623     // TODO: Ideally, we would like to use art::DexRegisterLocationCatalog::Size or
   1624     // art::DexRegisterLocationCatalog::FindLocationOffset, but the
   1625     // DexRegisterLocationCatalog is not yet built.  Try to factor common code.
   1626     size_t offset = origin + DexRegisterLocationCatalog::kFixedSize;
   1627 
   1628     // Skip the first `number_of_dex_locations - 1` entries.
   1629     for (uint16_t i = 0; i < number_of_dex_locations; ++i) {
   1630       // Read the first next byte and inspect its first 3 bits to decide
   1631       // whether it is a short or a large location.
   1632       DexRegisterLocationCatalog::ShortLocation first_byte =
   1633           region_.LoadUnaligned<DexRegisterLocationCatalog::ShortLocation>(offset);
   1634       DexRegisterLocation::Kind kind =
   1635           DexRegisterLocationCatalog::ExtractKindFromShortLocation(first_byte);
   1636       if (DexRegisterLocation::IsShortLocationKind(kind)) {
   1637         // Short location.  Skip the current byte.
   1638         offset += DexRegisterLocationCatalog::SingleShortEntrySize();
   1639       } else {
   1640         // Large location.  Skip the 5 next bytes.
   1641         offset += DexRegisterLocationCatalog::SingleLargeEntrySize();
   1642       }
   1643     }
   1644     size_t size = offset - origin;
   1645     return size;
   1646   }
   1647 
   1648   MemoryRegion region_;
   1649   friend class StackMapStream;
   1650 };
   1651 
   1652 #undef ELEMENT_BYTE_OFFSET_AFTER
   1653 #undef ELEMENT_BIT_OFFSET_AFTER
   1654 
   1655 }  // namespace art
   1656 
   1657 #endif  // ART_RUNTIME_STACK_MAP_H_
   1658