Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2017 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_SUBTYPE_CHECK_H_
     18 #define ART_RUNTIME_SUBTYPE_CHECK_H_
     19 
     20 #include "subtype_check_bits_and_status.h"
     21 #include "subtype_check_info.h"
     22 
     23 #include "base/mutex.h"
     24 #include "mirror/class.h"
     25 #include "runtime.h"
     26 
     27 // Build flag for the bitstring subtype check runtime hooks.
     28 constexpr bool kBitstringSubtypeCheckEnabled = false;
     29 
     30 /**
     31  * Any node in a tree can have its path (from the root to the node) represented as a string by
     32  * concatenating the path of the parent to that of the current node.
     33  *
     34  * We can annotate each node with a `sibling-label` which is some value unique amongst all of the
     35  * node's siblings. As a special case, the root is empty.
     36  *
     37  *           (none)
     38  *        /    |     \
     39  *       A     B      C
     40  *     /   \
     41  *    A    B
     42  *          |
     43  *          A
     44  *          |
     45  *          A
     46  *          |
     47  *          A
     48  *
     49  * Given these sibling-labels, we can now encode the path from any node to the root by starting at
     50  * the node and going up to the root, marking each node with this `path-label`. The special
     51  * character $ means "end of path".
     52  *
     53  *             $
     54  *        /    |      \
     55  *       A$    B$     C$
     56  *     /    \
     57  *   AA$   BA$
     58  *           |
     59  *           ABA$
     60  *           |
     61  *           AABA$
     62  *           |
     63  *           AABA$
     64  *
     65  * Given the above `path-label` we can express if any two nodes are an offspring of the other
     66  * through a O(1) expression:
     67  *
     68  *    x <: y :=
     69  *      suffix(x, y) == y
     70  *
     71  * In the above example suffix(x,y) means the suffix of x that is as long as y (right-padded with
     72  * $s if x is shorter than y) :
     73  *
     74  *    suffix(x,y) := x(x.length - y.length .. 0]
     75  *                     + repeat($, max(y.length - x.length, 0))
     76  *
     77  * A few generalities here to elaborate:
     78  *
     79  * - There can be at most D levels in the tree.
     80  * - Each level L has an alphabet A, and the maximum number of
     81  *   nodes is determined by |A|
     82  * - The alphabet A can be a subset, superset, equal, or unique with respect to the other alphabets
     83  *   without loss of generality. (In practice it would almost always be a subset of the previous
     84  *   levels alphabet as we assume most classes have less children the deeper they are.)
     85  * - The `sibling-label` doesnt need to be stored as an explicit value. It can a temporary when
     86  *   visiting every immediate child of a node. Only the `path-label` needs to be actually stored for
     87  *   every node.
     88  *
     89  * The path can also be reversed, and use a prefix instead of a suffix to define the subchild
     90  * relation.
     91  *
     92  *             $
     93  *        /    |      \    \
     94  *       A$    B$     C$    D$
     95  *     /    \
     96  *   AA$   AB$
     97  *            |
     98  *            ABA$
     99  *            |
    100  *            ABAA$
    101  *            |
    102  *            ABAAA$
    103  *
    104  *    x <: y :=
    105  *      prefix(x, y) == y
    106  *
    107  *    prefix(x,y) := x[0 .. y.length)
    108  *                     + repeat($, max(y.length - x.length, 0))
    109  *
    110  * In a dynamic tree, new nodes can be inserted at any time. This means if a minimal alphabet is
    111  * selected to contain the initial tree hierarchy, later node insertions will be illegal because
    112  * there is no more room to encode the path.
    113  *
    114  * In this simple example with an alphabet A,B,C and max level 1:
    115  *
    116  *     Level
    117  *     0:               $
    118  *              /     |     \     \
    119  *     1:      A$     B$     C$    D$   (illegal)
    120  *              |
    121  *     2:      AA$  (illegal)
    122  *
    123  * Attempting to insert the sibling D at Level 1 would be illegal because the Alphabet(1) is
    124  * {A,B,C} and inserting an extra node would mean the `sibling-label` is no longer unique.
    125  * Attempting to insert AA$ is illegal because the level 2 is more than the max level 1.
    126  *
    127  * One solution to this would be to revisit the entire graph, select a larger alphabet to that
    128  * every `sibling-label` is unique, pick a larger max level count, and then store the updated
    129  * `path-label` accordingly.
    130  *
    131  * The more common approach would instead be to select a set of alphabets and max levels statically,
    132  * with large enough sizes, for example:
    133  *
    134  *     Alphabets = {{A,B,C,D}, {A,B,C}, {A,B}, {A}}
    135  *     Max Levels = |Alphabets|
    136  *
    137  * Which would allow up to 4 levels with each successive level having 1 less max siblings.
    138  *
    139  * Attempting to insert a new node into the graph which does not fit into that levels alphabet
    140  * would be represented by re-using the `path-label` of the parent. Such a `path_label` would be
    141  * considered truncated (because it would only have a prefix of the full path from the root to the
    142  * node).
    143  *
    144  *    Level
    145  *    0:             $
    146  *             /     |     \     \
    147  *    1:      A$     B$     C$    $   (same as parent)
    148  *             |
    149  *    2:      A$ (same as parent)
    150  *
    151  * The updated relation for offspring is then:
    152  *
    153  *    x <: y :=
    154  *      if !truncated_path(y):
    155  *        return prefix(x, y) == y               // O(1)
    156  *      else:
    157  *        return slow_check_is_offspring(x, y)   // worse than O(1)
    158  *
    159  * (Example definition of truncated_path -- any semantically equivalent way to check that the
    160  *  sibling's `sibling-label` is not unique will do)
    161  *
    162  *    truncated_path(y) :=
    163  *      return y == parent(y)
    164  *
    165  * (Example definition. Any slower-than-O(1) definition will do here. This is the traversing
    166  * superclass hierarchy solution)
    167  *
    168  *    slow_check_is_offspring(x, y) :=
    169  *      if not x: return false
    170  *      else: return x == y || recursive_is_offspring(parent(x), y)
    171  *
    172  * In which case slow_check_is_offspring is some non-O(1) way to check if x and is an offspring of y.
    173  *
    174  * In addition, note that it doesnt matter if the "x" from above is a unique sibling or not; the
    175  * relation will still be correct.
    176  *
    177  * ------------------------------------------------------------------------------------------------
    178  *
    179  * Leveraging truncated paths to minimize path lengths.
    180  *
    181  * As observed above, for any x <: y, it is sufficient to have a full path only for y,
    182  * and x can be truncated (to its nearest ancestor's full path).
    183  *
    184  * We call a node that stores a full path "Assigned", and a node that stores a truncated path
    185  * either "Initialized" or "Overflowed."
    186  *
    187  * "Initialized" means it is still possible to assign a full path to the node, and "Overflowed"
    188  * means there is insufficient characters in the alphabet left.
    189  *
    190  * In this example, assume that we attempt to "Assign" all non-leafs if possible. Leafs
    191  * always get truncated (as either Initialized or Overflowed).
    192  *
    193  *     Alphabets = {{A,B,C,D}, {A,B}}
    194  *     Max Levels = |Alphabets|
    195  *
    196  *    Level
    197  *    0:             $
    198  *             /     |     \     \     \
    199  *    1:      A$     B$     C$    D$    $ (Overflowed: Too wide)
    200  *            |             |
    201  *    2:     AA$            C$ (Initialized)
    202  *            |
    203  *    3:     AA$ (Overflowed: Too deep)
    204  *
    205  * (All un-annotated nodes are "Assigned").
    206  * Above, the node at level 3 becomes overflowed because it exceeds the max levels. The
    207  * right-most node at level 1 becomes overflowed because there's no characters in the alphabet
    208  * left in that level.
    209  *
    210  * The "C$" node is Initialized at level 2, but it can still be promoted to "Assigned" later on
    211  * if we wanted to.
    212  *
    213  * In particular, this is the strategy we use in our implementation
    214  * (SubtypeCheck::EnsureInitialized, SubtypeCheck::EnsureAssigned).
    215  *
    216  * Since the # of characters in our alphabet (BitString) is very limited, we want to avoid
    217  * allocating a character to a node until its absolutely necessary.
    218  *
    219  * All node targets (in `src <: target`) get Assigned, and any parent of an Initialized
    220  * node also gets Assigned.
    221  */
    222 namespace art {
    223 
    224 struct MockSubtypeCheck;  // Forward declaration for testing.
    225 
    226 // This class is using a template parameter to enable testability without losing performance.
    227 // ClassPtr is almost always `mirror::Class*` or `ObjPtr<mirror::Class>`.
    228 template <typename ClassPtr /* Pointer-like type to Class */>
    229 struct SubtypeCheck {
    230   // Force this class's SubtypeCheckInfo state into at least Initialized.
    231   // As a side-effect, all parent classes also become Assigned|Overflowed.
    232   //
    233   // Cost: O(Depth(Class))
    234   //
    235   // Post-condition: State is >= Initialized.
    236   // Returns: The precise SubtypeCheckInfo::State.
    237   static SubtypeCheckInfo::State EnsureInitialized(ClassPtr klass)
    238       REQUIRES(Locks::subtype_check_lock_)
    239       REQUIRES_SHARED(Locks::mutator_lock_) {
    240     return InitializeOrAssign(klass, /*assign*/false).GetState();
    241   }
    242 
    243   // Force this class's SubtypeCheckInfo state into Assigned|Overflowed.
    244   // As a side-effect, all parent classes also become Assigned|Overflowed.
    245   //
    246   // Cost: O(Depth(Class))
    247   //
    248   // Post-condition: State is Assigned|Overflowed.
    249   // Returns: The precise SubtypeCheckInfo::State.
    250   static SubtypeCheckInfo::State EnsureAssigned(ClassPtr klass)
    251       REQUIRES(Locks::subtype_check_lock_)
    252       REQUIRES_SHARED(Locks::mutator_lock_) {
    253     return InitializeOrAssign(klass, /*assign*/true).GetState();
    254   }
    255 
    256   // Resets the SubtypeCheckInfo into the Uninitialized state.
    257   //
    258   // Intended only for the AOT image writer.
    259   // This is a static function to avoid calling klass.Depth(), which is unsupported
    260   // in some portions of the image writer.
    261   //
    262   // Cost: O(1).
    263   //
    264   // Returns: A state that is always Uninitialized.
    265   static SubtypeCheckInfo::State ForceUninitialize(ClassPtr klass)
    266     REQUIRES(Locks::subtype_check_lock_)
    267     REQUIRES_SHARED(Locks::mutator_lock_) {
    268     // Trying to do this in a real runtime will break thread safety invariants
    269     // of existing live objects in the class hierarchy.
    270     // This is only safe as the last step when the classes are about to be
    271     // written out as an image and IsSubClass is never used again.
    272     DCHECK(Runtime::Current() == nullptr || Runtime::Current()->IsAotCompiler())
    273       << "This only makes sense when compiling an app image.";
    274 
    275     // Directly read/write the class field here.
    276     // As this method is used by image_writer on a copy,
    277     // the Class* there is not a real class and using it for anything
    278     // more complicated (e.g. ObjPtr or Depth call) will fail dchecks.
    279 
    280     // OK. zero-initializing subtype_check_info_ puts us into the kUninitialized state.
    281     SubtypeCheckBits scb_uninitialized = SubtypeCheckBits{};
    282     WriteSubtypeCheckBits(klass, scb_uninitialized);
    283 
    284     // Do not use "SubtypeCheckInfo" API here since that requires Depth()
    285     // which would cause a dcheck failure.
    286     return SubtypeCheckInfo::kUninitialized;
    287   }
    288 
    289   // Retrieve the path to root bitstring as a plain uintN_t value that is amenable to
    290   // be used by a fast check "encoded_src & mask_target == encoded_target".
    291   //
    292   // Cost: O(Depth(Class)).
    293   //
    294   // Returns the encoded_src value. Must be >= Initialized (EnsureInitialized).
    295   static BitString::StorageType GetEncodedPathToRootForSource(ClassPtr klass)
    296       REQUIRES(Locks::subtype_check_lock_)
    297       REQUIRES_SHARED(Locks::mutator_lock_) {
    298     DCHECK_NE(SubtypeCheckInfo::kUninitialized, GetSubtypeCheckInfo(klass).GetState());
    299     return GetSubtypeCheckInfo(klass).GetEncodedPathToRoot();
    300   }
    301 
    302   // Retrieve the path to root bitstring as a plain uintN_t value that is amenable to
    303   // be used by a fast check "encoded_src & mask_target == encoded_target".
    304   //
    305   // Cost: O(Depth(Class)).
    306   //
    307   // Returns the encoded_target value. Must be Assigned (EnsureAssigned).
    308   static BitString::StorageType GetEncodedPathToRootForTarget(ClassPtr klass)
    309       REQUIRES(Locks::subtype_check_lock_)
    310       REQUIRES_SHARED(Locks::mutator_lock_) {
    311     DCHECK_EQ(SubtypeCheckInfo::kAssigned, GetSubtypeCheckInfo(klass).GetState());
    312     return GetSubtypeCheckInfo(klass).GetEncodedPathToRoot();
    313   }
    314 
    315   // Retrieve the path to root bitstring mask as a plain uintN_t value that is amenable to
    316   // be used by a fast check "encoded_src & mask_target == encoded_target".
    317   //
    318   // Cost: O(Depth(Class)).
    319   //
    320   // Returns the mask_target value. Must be Assigned (EnsureAssigned).
    321   static BitString::StorageType GetEncodedPathToRootMask(ClassPtr klass)
    322       REQUIRES(Locks::subtype_check_lock_)
    323       REQUIRES_SHARED(Locks::mutator_lock_) {
    324     DCHECK_EQ(SubtypeCheckInfo::kAssigned, GetSubtypeCheckInfo(klass).GetState());
    325     return GetSubtypeCheckInfo(klass).GetEncodedPathToRootMask();
    326   }
    327 
    328   // Is the source class a subclass of the target?
    329   //
    330   // The source state must be at least Initialized, and the target state
    331   // must be Assigned, otherwise the result will return kUnknownSubtypeOf.
    332   //
    333   // See EnsureInitialized and EnsureAssigned. Ideally,
    334   // EnsureInitialized will be called previously on all possible sources,
    335   // and EnsureAssigned will be called previously on all possible targets.
    336   //
    337   // Runtime cost: O(Depth(Class)), but would be O(1) if depth was known.
    338   //
    339   // If the result is known, return kSubtypeOf or kNotSubtypeOf.
    340   static SubtypeCheckInfo::Result IsSubtypeOf(ClassPtr source, ClassPtr target)
    341       REQUIRES_SHARED(Locks::mutator_lock_) {
    342     SubtypeCheckInfo sci = GetSubtypeCheckInfo(source);
    343     SubtypeCheckInfo target_sci = GetSubtypeCheckInfo(target);
    344 
    345     return sci.IsSubtypeOf(target_sci);
    346   }
    347 
    348   // Print SubtypeCheck bitstring and overflow to a stream (e.g. for oatdump).
    349   static std::ostream& Dump(ClassPtr klass, std::ostream& os)
    350       REQUIRES_SHARED(Locks::mutator_lock_) {
    351     return os << GetSubtypeCheckInfo(klass);
    352   }
    353 
    354   static void WriteStatus(ClassPtr klass, ClassStatus status)
    355       REQUIRES_SHARED(Locks::mutator_lock_) {
    356     WriteStatusImpl(klass, status);
    357   }
    358 
    359  private:
    360   static ClassPtr GetParentClass(ClassPtr klass)
    361       REQUIRES_SHARED(Locks::mutator_lock_) {
    362     DCHECK(klass->HasSuperClass());
    363     return ClassPtr(klass->GetSuperClass());
    364   }
    365 
    366   static SubtypeCheckInfo InitializeOrAssign(ClassPtr klass, bool assign)
    367       REQUIRES(Locks::subtype_check_lock_)
    368       REQUIRES_SHARED(Locks::mutator_lock_) {
    369     if (UNLIKELY(!klass->HasSuperClass())) {
    370       // Object root always goes directly from Uninitialized -> Assigned.
    371 
    372       const SubtypeCheckInfo root_sci = GetSubtypeCheckInfo(klass);
    373       if (root_sci.GetState() != SubtypeCheckInfo::kUninitialized) {
    374         return root_sci;  // No change needed.
    375       }
    376 
    377       const SubtypeCheckInfo new_root_sci = root_sci.CreateRoot();
    378       SetSubtypeCheckInfo(klass, new_root_sci);
    379 
    380       // The object root is always in the Uninitialized|Assigned state.
    381       DCHECK_EQ(SubtypeCheckInfo::kAssigned, GetSubtypeCheckInfo(klass).GetState())
    382           << "Invalid object root state, must be Assigned";
    383       return new_root_sci;
    384     }
    385 
    386     // Force all ancestors to Assigned | Overflowed.
    387     ClassPtr parent_klass = GetParentClass(klass);
    388     size_t parent_depth = InitializeOrAssign(parent_klass, /*assign*/true).GetDepth();
    389     if (kIsDebugBuild) {
    390       SubtypeCheckInfo::State parent_state = GetSubtypeCheckInfo(parent_klass).GetState();
    391       DCHECK(parent_state == SubtypeCheckInfo::kAssigned ||
    392           parent_state == SubtypeCheckInfo::kOverflowed)
    393           << "Expected parent Assigned|Overflowed, but was: " << parent_state;
    394     }
    395 
    396     // Read.
    397     SubtypeCheckInfo sci = GetSubtypeCheckInfo(klass, parent_depth + 1u);
    398     SubtypeCheckInfo parent_sci = GetSubtypeCheckInfo(parent_klass, parent_depth);
    399 
    400     // Modify.
    401     const SubtypeCheckInfo::State sci_state = sci.GetState();
    402     // Skip doing any work if the state is already up-to-date.
    403     //   - assign == false -> Initialized or higher.
    404     //   - assign == true  -> Assigned or higher.
    405     if (sci_state == SubtypeCheckInfo::kUninitialized ||
    406         (sci_state == SubtypeCheckInfo::kInitialized && assign)) {
    407       // Copy parent path into the child.
    408       //
    409       // If assign==true, this also appends Parent.Next value to the end.
    410       // Then the Parent.Next value is incremented to avoid allocating
    411       // the same value again to another node.
    412       sci = parent_sci.CreateChild(assign);  // Note: Parent could be mutated.
    413     } else {
    414       // Nothing to do, already >= Initialized.
    415       return sci;
    416     }
    417 
    418     // Post-condition: EnsureAssigned -> Assigned|Overflowed.
    419     // Post-condition: EnsureInitialized -> Not Uninitialized.
    420     DCHECK_NE(sci.GetState(), SubtypeCheckInfo::kUninitialized);
    421 
    422     if (assign) {
    423       DCHECK_NE(sci.GetState(), SubtypeCheckInfo::kInitialized);
    424     }
    425 
    426     // Write.
    427     SetSubtypeCheckInfo(klass, sci);                     // self
    428     SetSubtypeCheckInfo(parent_klass, parent_sci);       // parent
    429 
    430     return sci;
    431   }
    432 
    433   static SubtypeCheckBitsAndStatus ReadField(ClassPtr klass)
    434       REQUIRES_SHARED(Locks::mutator_lock_) {
    435     SubtypeCheckBitsAndStatus current_bits_and_status;
    436 
    437     int32_t int32_data = klass->GetField32Volatile(klass->StatusOffset());
    438     current_bits_and_status.int32_alias_ = int32_data;
    439 
    440     if (kIsDebugBuild) {
    441       SubtypeCheckBitsAndStatus tmp;
    442       memcpy(&tmp, &int32_data, sizeof(tmp));
    443       DCHECK_EQ(0, memcmp(&tmp, &current_bits_and_status, sizeof(tmp))) << int32_data;
    444     }
    445     return current_bits_and_status;
    446   }
    447 
    448   static void WriteSubtypeCheckBits(ClassPtr klass, const SubtypeCheckBits& new_bits)
    449       REQUIRES(Locks::subtype_check_lock_)
    450       REQUIRES_SHARED(Locks::mutator_lock_) {
    451     // Use a "CAS" to write the SubtypeCheckBits in the class.
    452     // Although we have exclusive access to the bitstrings, because
    453     // ClassStatus and SubtypeCheckBits share the same word, another thread could
    454     // potentially overwrite that word still.
    455 
    456     SubtypeCheckBitsAndStatus new_value;
    457     ClassStatus old_status;
    458     SubtypeCheckBitsAndStatus full_old;
    459     while (true) {
    460       // TODO: Atomic compare-and-swap does not update the 'expected' parameter,
    461       // so we have to read it as a separate step instead.
    462       SubtypeCheckBitsAndStatus old_value = ReadField(klass);
    463 
    464       {
    465         SubtypeCheckBits old_bits = old_value.subtype_check_info_;
    466         if (memcmp(&old_bits, &new_bits, sizeof(old_bits)) == 0) {
    467           // Avoid dirtying memory when the data hasn't changed.
    468           return;
    469         }
    470       }
    471 
    472       full_old = old_value;
    473       old_status = old_value.status_;
    474 
    475       new_value = old_value;
    476       new_value.subtype_check_info_ = new_bits;
    477 
    478       if (kIsDebugBuild) {
    479         int32_t int32_data = 0;
    480         memcpy(&int32_data, &new_value, sizeof(int32_t));
    481         DCHECK_EQ(int32_data, new_value.int32_alias_) << int32_data;
    482 
    483         DCHECK_EQ(old_status, new_value.status_)
    484           << "full new: " << bit_cast<uint32_t>(new_value)
    485           << ", full old: " << bit_cast<uint32_t>(full_old);
    486       }
    487 
    488       if (CasFieldWeakSequentiallyConsistent32(klass,
    489                                                klass->StatusOffset(),
    490                                                old_value.int32_alias_,
    491                                                new_value.int32_alias_)) {
    492         break;
    493       }
    494     }
    495   }
    496 
    497   static void WriteStatusImpl(ClassPtr klass, ClassStatus status)
    498       REQUIRES_SHARED(Locks::mutator_lock_) {
    499     // Despite not having a lock annotation, this is done with mutual exclusion.
    500     // See Class::SetStatus for more details.
    501     SubtypeCheckBitsAndStatus new_value;
    502     ClassStatus old_status;
    503     while (true) {
    504       // TODO: Atomic compare-and-swap does not update the 'expected' parameter,
    505       // so we have to read it as a separate step instead.
    506       SubtypeCheckBitsAndStatus old_value = ReadField(klass);
    507       old_status = old_value.status_;
    508 
    509       if (memcmp(&old_status, &status, sizeof(status)) == 0) {
    510         // Avoid dirtying memory when the data hasn't changed.
    511         return;
    512       }
    513 
    514       new_value = old_value;
    515       new_value.status_ = status;
    516 
    517       if (CasFieldWeakSequentiallyConsistent32(klass,
    518                                                klass->StatusOffset(),
    519                                                old_value.int32_alias_,
    520                                                new_value.int32_alias_)) {
    521         break;
    522       }
    523     }
    524   }
    525 
    526   static bool CasFieldWeakSequentiallyConsistent32(ClassPtr klass,
    527                                                    MemberOffset offset,
    528                                                    int32_t old_value,
    529                                                    int32_t new_value)
    530       REQUIRES_SHARED(Locks::mutator_lock_) {
    531     if (Runtime::Current() != nullptr && Runtime::Current()->IsActiveTransaction()) {
    532       return klass->template
    533           CasFieldWeakSequentiallyConsistent32</*kTransactionActive*/true>(offset,
    534                                                                            old_value,
    535                                                                            new_value);
    536     } else {
    537       return klass->template
    538           CasFieldWeakSequentiallyConsistent32</*kTransactionActive*/false>(offset,
    539                                                                             old_value,
    540                                                                             new_value);
    541     }
    542   }
    543 
    544   // Get the SubtypeCheckInfo for a klass. O(Depth(Class)) since
    545   // it also requires calling klass->Depth.
    546   //
    547   // Anything calling this function will also be O(Depth(Class)).
    548   static SubtypeCheckInfo GetSubtypeCheckInfo(ClassPtr klass)
    549         REQUIRES_SHARED(Locks::mutator_lock_) {
    550     return GetSubtypeCheckInfo(klass, klass->Depth());
    551   }
    552 
    553   // Get the SubtypeCheckInfo for a klass with known depth.
    554   static SubtypeCheckInfo GetSubtypeCheckInfo(ClassPtr klass, size_t depth)
    555         REQUIRES_SHARED(Locks::mutator_lock_) {
    556     DCHECK_EQ(depth, klass->Depth());
    557     SubtypeCheckBitsAndStatus current_bits_and_status = ReadField(klass);
    558 
    559     const SubtypeCheckInfo current =
    560         SubtypeCheckInfo::Create(current_bits_and_status.subtype_check_info_, depth);
    561     return current;
    562   }
    563 
    564   static void SetSubtypeCheckInfo(ClassPtr klass, const SubtypeCheckInfo& new_sci)
    565         REQUIRES(Locks::subtype_check_lock_)
    566         REQUIRES_SHARED(Locks::mutator_lock_) {
    567     SubtypeCheckBits new_bits = new_sci.GetSubtypeCheckBits();
    568     WriteSubtypeCheckBits(klass, new_bits);
    569   }
    570 
    571   // Tests can inherit this class. Normal code should use static methods.
    572   SubtypeCheck() = default;
    573   SubtypeCheck(const SubtypeCheck& other) = default;
    574   SubtypeCheck(SubtypeCheck&& other) = default;
    575   ~SubtypeCheck() = default;
    576 
    577   friend struct MockSubtypeCheck;
    578 };
    579 
    580 }  // namespace art
    581 
    582 #endif  // ART_RUNTIME_SUBTYPE_CHECK_H_
    583