Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
     18 #define ART_RUNTIME_INSTRUMENTATION_H_
     19 
     20 #include <stdint.h>
     21 #include <list>
     22 #include <map>
     23 
     24 #include "atomic.h"
     25 #include "instruction_set.h"
     26 #include "base/macros.h"
     27 #include "base/mutex.h"
     28 #include "gc_root.h"
     29 #include "object_callbacks.h"
     30 
     31 namespace art {
     32 namespace mirror {
     33   class ArtField;
     34   class ArtMethod;
     35   class Class;
     36   class Object;
     37   class Throwable;
     38 }  // namespace mirror
     39 union JValue;
     40 class Thread;
     41 class ThrowLocation;
     42 
     43 namespace instrumentation {
     44 
     45 // Interpreter handler tables.
     46 enum InterpreterHandlerTable {
     47   kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
     48   kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
     49                                   // enabled.
     50   kNumHandlerTables
     51 };
     52 
     53 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
     54 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
     55 // occurred upon. The thread may or may not be Thread::Current().
     56 struct InstrumentationListener {
     57   InstrumentationListener() {}
     58   virtual ~InstrumentationListener() {}
     59 
     60   // Call-back for when a method is entered.
     61   virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
     62                              mirror::ArtMethod* method,
     63                              uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
     64 
     65   // Call-back for when a method is exited.
     66   // TODO: its likely passing the return value would be useful, however, we may need to get and
     67   //       parse the shorty to determine what kind of register holds the result.
     68   virtual void MethodExited(Thread* thread, mirror::Object* this_object,
     69                             mirror::ArtMethod* method, uint32_t dex_pc,
     70                             const JValue& return_value)
     71       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
     72 
     73   // Call-back for when a method is popped due to an exception throw. A method will either cause a
     74   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
     75   virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
     76                             mirror::ArtMethod* method, uint32_t dex_pc)
     77       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
     78 
     79   // Call-back for when the dex pc moves in a method.
     80   virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
     81                           mirror::ArtMethod* method, uint32_t new_dex_pc)
     82       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
     83 
     84   // Call-back for when we read from a field.
     85   virtual void FieldRead(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
     86                          uint32_t dex_pc, mirror::ArtField* field) = 0;
     87 
     88   // Call-back for when we write into a field.
     89   virtual void FieldWritten(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
     90                             uint32_t dex_pc, mirror::ArtField* field, const JValue& field_value) = 0;
     91 
     92   // Call-back when an exception is caught.
     93   virtual void ExceptionCaught(Thread* thread, const ThrowLocation& throw_location,
     94                                mirror::ArtMethod* catch_method, uint32_t catch_dex_pc,
     95                                mirror::Throwable* exception_object)
     96       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
     97 };
     98 
     99 // Instrumentation is a catch-all for when extra information is required from the runtime. The
    100 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
    101 // to method entry and exit, it may also force execution to be switched to the interpreter and
    102 // trigger deoptimization.
    103 class Instrumentation {
    104  public:
    105   enum InstrumentationEvent {
    106     kMethodEntered =   1 << 0,
    107     kMethodExited =    1 << 1,
    108     kMethodUnwind =    1 << 2,
    109     kDexPcMoved =      1 << 3,
    110     kFieldRead =       1 << 4,
    111     kFieldWritten =    1 << 5,
    112     kExceptionCaught = 1 << 6,
    113   };
    114 
    115   Instrumentation();
    116 
    117   // Add a listener to be notified of the masked together sent of instrumentation events. This
    118   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
    119   // for saying you should have suspended all threads (installing stubs while threads are running
    120   // will break).
    121   void AddListener(InstrumentationListener* listener, uint32_t events)
    122       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    123       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
    124 
    125   // Removes a listener possibly removing instrumentation stubs.
    126   void RemoveListener(InstrumentationListener* listener, uint32_t events)
    127       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    128       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
    129 
    130   // Deoptimization.
    131   void EnableDeoptimization()
    132       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    133       LOCKS_EXCLUDED(deoptimized_methods_lock_);
    134   void DisableDeoptimization()
    135       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    136       LOCKS_EXCLUDED(deoptimized_methods_lock_);
    137   bool AreAllMethodsDeoptimized() const {
    138     return interpreter_stubs_installed_;
    139   }
    140   bool ShouldNotifyMethodEnterExitEvents() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    141 
    142   // Executes everything with interpreter.
    143   void DeoptimizeEverything()
    144       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    145       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
    146 
    147   // Executes everything with compiled code (or interpreter if there is no code).
    148   void UndeoptimizeEverything()
    149       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    150       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
    151 
    152   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
    153   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
    154   // once its declaring class is initialized.
    155   void Deoptimize(mirror::ArtMethod* method)
    156       LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
    157       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
    158 
    159   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
    160   // (except a class initializer) set to the resolution trampoline will be updated only once its
    161   // declaring class is initialized.
    162   void Undeoptimize(mirror::ArtMethod* method)
    163       LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
    164       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
    165 
    166   bool IsDeoptimized(mirror::ArtMethod* method)
    167       LOCKS_EXCLUDED(deoptimized_methods_lock_)
    168       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    169 
    170   // Enable method tracing by installing instrumentation entry/exit stubs.
    171   void EnableMethodTracing()
    172       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    173       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
    174 
    175   // Disable method tracing by uninstalling instrumentation entry/exit stubs.
    176   void DisableMethodTracing()
    177       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    178       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
    179 
    180   InterpreterHandlerTable GetInterpreterHandlerTable() const
    181       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    182     return interpreter_handler_table_;
    183   }
    184 
    185   void InstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::instrument_entrypoints_lock_);
    186   void UninstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::instrument_entrypoints_lock_);
    187   void InstrumentQuickAllocEntryPointsLocked()
    188       EXCLUSIVE_LOCKS_REQUIRED(Locks::instrument_entrypoints_lock_)
    189       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::runtime_shutdown_lock_);
    190   void UninstrumentQuickAllocEntryPointsLocked()
    191       EXCLUSIVE_LOCKS_REQUIRED(Locks::instrument_entrypoints_lock_)
    192       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::runtime_shutdown_lock_);
    193   void ResetQuickAllocEntryPoints() EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_);
    194 
    195   // Update the code of a method respecting any installed stubs.
    196   void UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
    197                          const void* portable_code, bool have_portable_code)
    198       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    199 
    200   // Get the quick code for the given method. More efficient than asking the class linker as it
    201   // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
    202   // installed.
    203   const void* GetQuickCodeFor(mirror::ArtMethod* method) const
    204       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    205 
    206   void ForceInterpretOnly() {
    207     interpret_only_ = true;
    208     forced_interpret_only_ = true;
    209   }
    210 
    211   // Called by ArtMethod::Invoke to determine dispatch mechanism.
    212   bool InterpretOnly() const {
    213     return interpret_only_;
    214   }
    215 
    216   bool IsForcedInterpretOnly() const {
    217     return forced_interpret_only_;
    218   }
    219 
    220   bool ShouldPortableCodeDeoptimize() const {
    221     return instrumentation_stubs_installed_;
    222   }
    223 
    224   bool AreExitStubsInstalled() const {
    225     return instrumentation_stubs_installed_;
    226   }
    227 
    228   bool HasMethodEntryListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    229     return have_method_entry_listeners_;
    230   }
    231 
    232   bool HasMethodExitListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    233     return have_method_exit_listeners_;
    234   }
    235 
    236   bool HasDexPcListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    237     return have_dex_pc_listeners_;
    238   }
    239 
    240   bool HasFieldReadListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    241     return have_field_read_listeners_;
    242   }
    243 
    244   bool HasFieldWriteListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    245     return have_field_write_listeners_;
    246   }
    247 
    248   bool HasExceptionCaughtListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    249     return have_exception_caught_listeners_;
    250   }
    251 
    252   bool IsActive() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    253     return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
    254         have_field_read_listeners_ || have_field_write_listeners_ ||
    255         have_exception_caught_listeners_ || have_method_unwind_listeners_;
    256   }
    257 
    258   // Inform listeners that a method has been entered. A dex PC is provided as we may install
    259   // listeners into executing code and get method enter events for methods already on the stack.
    260   void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
    261                         mirror::ArtMethod* method, uint32_t dex_pc) const
    262       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    263     if (UNLIKELY(HasMethodEntryListeners())) {
    264       MethodEnterEventImpl(thread, this_object, method, dex_pc);
    265     }
    266   }
    267 
    268   // Inform listeners that a method has been exited.
    269   void MethodExitEvent(Thread* thread, mirror::Object* this_object,
    270                        mirror::ArtMethod* method, uint32_t dex_pc,
    271                        const JValue& return_value) const
    272       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    273     if (UNLIKELY(HasMethodExitListeners())) {
    274       MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
    275     }
    276   }
    277 
    278   // Inform listeners that a method has been exited due to an exception.
    279   void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
    280                          mirror::ArtMethod* method, uint32_t dex_pc) const
    281       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    282 
    283   // Inform listeners that the dex pc has moved (only supported by the interpreter).
    284   void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
    285                        mirror::ArtMethod* method, uint32_t dex_pc) const
    286       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    287     if (UNLIKELY(HasDexPcListeners())) {
    288       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
    289     }
    290   }
    291 
    292   // Inform listeners that we read a field (only supported by the interpreter).
    293   void FieldReadEvent(Thread* thread, mirror::Object* this_object,
    294                       mirror::ArtMethod* method, uint32_t dex_pc,
    295                       mirror::ArtField* field) const
    296       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    297     if (UNLIKELY(HasFieldReadListeners())) {
    298       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
    299     }
    300   }
    301 
    302   // Inform listeners that we write a field (only supported by the interpreter).
    303   void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
    304                        mirror::ArtMethod* method, uint32_t dex_pc,
    305                        mirror::ArtField* field, const JValue& field_value) const
    306       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    307     if (UNLIKELY(HasFieldWriteListeners())) {
    308       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
    309     }
    310   }
    311 
    312   // Inform listeners that an exception was caught.
    313   void ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
    314                             mirror::ArtMethod* catch_method, uint32_t catch_dex_pc,
    315                             mirror::Throwable* exception_object) const
    316       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    317 
    318   // Called when an instrumented method is entered. The intended link register (lr) is saved so
    319   // that returning causes a branch to the method exit stub. Generates method enter events.
    320   void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
    321                                      mirror::ArtMethod* method, uintptr_t lr,
    322                                      bool interpreter_entry)
    323       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    324 
    325   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
    326   // returning the intended link register. Generates method exit events.
    327   TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
    328                                              uint64_t gpr_result, uint64_t fpr_result)
    329       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    330 
    331   // Pops an instrumentation frame from the current thread and generate an unwind event.
    332   void PopMethodForUnwind(Thread* self, bool is_deoptimization) const
    333       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    334 
    335   // Call back for configure stubs.
    336   bool InstallStubsForClass(mirror::Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    337 
    338   void InstallStubsForMethod(mirror::ArtMethod* method)
    339       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    340 
    341   void VisitRoots(RootCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    342       LOCKS_EXCLUDED(deoptimized_methods_lock_);
    343 
    344  private:
    345   // Does the job of installing or removing instrumentation code within methods.
    346   void ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter)
    347       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
    348       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_,
    349                      deoptimized_methods_lock_);
    350 
    351   void UpdateInterpreterHandlerTable() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
    352     interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
    353   }
    354 
    355   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
    356   // exclusive access to mutator lock which you can't get if the runtime isn't started.
    357   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
    358 
    359   void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
    360                             mirror::ArtMethod* method, uint32_t dex_pc) const
    361       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    362   void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
    363                            mirror::ArtMethod* method,
    364                            uint32_t dex_pc, const JValue& return_value) const
    365       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    366   void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
    367                            mirror::ArtMethod* method, uint32_t dex_pc) const
    368       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    369   void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
    370                            mirror::ArtMethod* method, uint32_t dex_pc,
    371                            mirror::ArtField* field) const
    372       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    373   void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
    374                            mirror::ArtMethod* method, uint32_t dex_pc,
    375                            mirror::ArtField* field, const JValue& field_value) const
    376       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    377 
    378   // Read barrier-aware utility functions for accessing deoptimized_methods_
    379   bool AddDeoptimizedMethod(mirror::ArtMethod* method)
    380       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    381       EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
    382   bool FindDeoptimizedMethod(mirror::ArtMethod* method)
    383       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    384       SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
    385   bool RemoveDeoptimizedMethod(mirror::ArtMethod* method)
    386       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    387       EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
    388   mirror::ArtMethod* BeginDeoptimizedMethod()
    389       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    390       SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
    391   bool IsDeoptimizedMethodsEmpty() const
    392       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    393       SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
    394 
    395   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
    396   bool instrumentation_stubs_installed_;
    397 
    398   // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
    399   bool entry_exit_stubs_installed_;
    400 
    401   // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
    402   bool interpreter_stubs_installed_;
    403 
    404   // Do we need the fidelity of events that we only get from running within the interpreter?
    405   bool interpret_only_;
    406 
    407   // Did the runtime request we only run in the interpreter? ie -Xint mode.
    408   bool forced_interpret_only_;
    409 
    410   // Do we have any listeners for method entry events? Short-cut to avoid taking the
    411   // instrumentation_lock_.
    412   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
    413 
    414   // Do we have any listeners for method exit events? Short-cut to avoid taking the
    415   // instrumentation_lock_.
    416   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
    417 
    418   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
    419   // instrumentation_lock_.
    420   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
    421 
    422   // Do we have any listeners for dex move events? Short-cut to avoid taking the
    423   // instrumentation_lock_.
    424   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
    425 
    426   // Do we have any listeners for field read events? Short-cut to avoid taking the
    427   // instrumentation_lock_.
    428   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
    429 
    430   // Do we have any listeners for field write events? Short-cut to avoid taking the
    431   // instrumentation_lock_.
    432   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
    433 
    434   // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
    435   bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
    436 
    437   // The event listeners, written to with the mutator_lock_ exclusively held.
    438   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
    439   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
    440   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
    441   std::shared_ptr<std::list<InstrumentationListener*>> dex_pc_listeners_
    442       GUARDED_BY(Locks::mutator_lock_);
    443   std::shared_ptr<std::list<InstrumentationListener*>> field_read_listeners_
    444       GUARDED_BY(Locks::mutator_lock_);
    445   std::shared_ptr<std::list<InstrumentationListener*>> field_write_listeners_
    446       GUARDED_BY(Locks::mutator_lock_);
    447   std::shared_ptr<std::list<InstrumentationListener*>> exception_caught_listeners_
    448       GUARDED_BY(Locks::mutator_lock_);
    449 
    450   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
    451   // only.
    452   mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    453   std::multimap<int32_t, GcRoot<mirror::ArtMethod>> deoptimized_methods_
    454       GUARDED_BY(deoptimized_methods_lock_);
    455   bool deoptimization_enabled_;
    456 
    457   // Current interpreter handler table. This is updated each time the thread state flags are
    458   // modified.
    459   InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
    460 
    461   // Greater than 0 if quick alloc entry points instrumented.
    462   size_t quick_alloc_entry_points_instrumentation_counter_
    463       GUARDED_BY(Locks::instrument_entrypoints_lock_);
    464 
    465   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
    466 };
    467 
    468 // An element in the instrumentation side stack maintained in art::Thread.
    469 struct InstrumentationStackFrame {
    470   InstrumentationStackFrame(mirror::Object* this_object, mirror::ArtMethod* method,
    471                             uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
    472       : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
    473         interpreter_entry_(interpreter_entry) {
    474   }
    475 
    476   std::string Dump() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    477 
    478   mirror::Object* this_object_;
    479   mirror::ArtMethod* method_;
    480   uintptr_t return_pc_;
    481   size_t frame_id_;
    482   bool interpreter_entry_;
    483 };
    484 
    485 }  // namespace instrumentation
    486 }  // namespace art
    487 
    488 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
    489