Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
     18 #define ART_RUNTIME_INSTRUMENTATION_H_
     19 
     20 #include <stdint.h>
     21 #include <list>
     22 #include <unordered_set>
     23 
     24 #include "arch/instruction_set.h"
     25 #include "base/enums.h"
     26 #include "base/macros.h"
     27 #include "base/mutex.h"
     28 #include "gc_root.h"
     29 #include "safe_map.h"
     30 
     31 namespace art {
     32 namespace mirror {
     33   class Class;
     34   class Object;
     35   class Throwable;
     36 }  // namespace mirror
     37 class ArtField;
     38 class ArtMethod;
     39 template <typename T> class Handle;
     40 union JValue;
     41 class Thread;
     42 
     43 namespace instrumentation {
     44 
     45 // Interpreter handler tables.
     46 enum InterpreterHandlerTable {
     47   kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
     48   kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
     49                                   // enabled.
     50   kNumHandlerTables
     51 };
     52 
     53 // Do we want to deoptimize for method entry and exit listeners or just try to intercept
     54 // invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
     55 // application's performance.
     56 static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
     57 
     58 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
     59 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
     60 // occurred upon. The thread may or may not be Thread::Current().
     61 struct InstrumentationListener {
     62   InstrumentationListener() {}
     63   virtual ~InstrumentationListener() {}
     64 
     65   // Call-back for when a method is entered.
     66   virtual void MethodEntered(Thread* thread,
     67                              Handle<mirror::Object> this_object,
     68                              ArtMethod* method,
     69                              uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) = 0;
     70 
     71   virtual void MethodExited(Thread* thread,
     72                             Handle<mirror::Object> this_object,
     73                             ArtMethod* method,
     74                             uint32_t dex_pc,
     75                             Handle<mirror::Object> return_value)
     76       REQUIRES_SHARED(Locks::mutator_lock_);
     77 
     78   // Call-back for when a method is exited. The implementor should either handler-ize the return
     79   // value (if appropriate) or use the alternate MethodExited callback instead if they need to
     80   // go through a suspend point.
     81   virtual void MethodExited(Thread* thread,
     82                             Handle<mirror::Object> this_object,
     83                             ArtMethod* method,
     84                             uint32_t dex_pc,
     85                             const JValue& return_value)
     86       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
     87 
     88   // Call-back for when a method is popped due to an exception throw. A method will either cause a
     89   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
     90   virtual void MethodUnwind(Thread* thread,
     91                             Handle<mirror::Object> this_object,
     92                             ArtMethod* method,
     93                             uint32_t dex_pc)
     94       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
     95 
     96   // Call-back for when the dex pc moves in a method.
     97   virtual void DexPcMoved(Thread* thread,
     98                           Handle<mirror::Object> this_object,
     99                           ArtMethod* method,
    100                           uint32_t new_dex_pc)
    101       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
    102 
    103   // Call-back for when we read from a field.
    104   virtual void FieldRead(Thread* thread,
    105                          Handle<mirror::Object> this_object,
    106                          ArtMethod* method,
    107                          uint32_t dex_pc,
    108                          ArtField* field) = 0;
    109 
    110   virtual void FieldWritten(Thread* thread,
    111                             Handle<mirror::Object> this_object,
    112                             ArtMethod* method,
    113                             uint32_t dex_pc,
    114                             ArtField* field,
    115                             Handle<mirror::Object> field_value)
    116       REQUIRES_SHARED(Locks::mutator_lock_);
    117 
    118   // Call-back for when we write into a field.
    119   virtual void FieldWritten(Thread* thread,
    120                             Handle<mirror::Object> this_object,
    121                             ArtMethod* method,
    122                             uint32_t dex_pc,
    123                             ArtField* field,
    124                             const JValue& field_value)
    125       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
    126 
    127   // Call-back when an exception is caught.
    128   virtual void ExceptionCaught(Thread* thread,
    129                                Handle<mirror::Throwable> exception_object)
    130       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
    131 
    132   // Call-back for when we execute a branch.
    133   virtual void Branch(Thread* thread,
    134                       ArtMethod* method,
    135                       uint32_t dex_pc,
    136                       int32_t dex_pc_offset)
    137       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
    138 
    139   // Call-back for when we get an invokevirtual or an invokeinterface.
    140   virtual void InvokeVirtualOrInterface(Thread* thread,
    141                                         Handle<mirror::Object> this_object,
    142                                         ArtMethod* caller,
    143                                         uint32_t dex_pc,
    144                                         ArtMethod* callee)
    145       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
    146 };
    147 
    148 // Instrumentation is a catch-all for when extra information is required from the runtime. The
    149 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
    150 // to method entry and exit, it may also force execution to be switched to the interpreter and
    151 // trigger deoptimization.
    152 class Instrumentation {
    153  public:
    154   enum InstrumentationEvent {
    155     kMethodEntered = 0x1,
    156     kMethodExited = 0x2,
    157     kMethodUnwind = 0x4,
    158     kDexPcMoved = 0x8,
    159     kFieldRead = 0x10,
    160     kFieldWritten = 0x20,
    161     kExceptionCaught = 0x40,
    162     kBranch = 0x80,
    163     kInvokeVirtualOrInterface = 0x100,
    164   };
    165 
    166   enum class InstrumentationLevel {
    167     kInstrumentNothing,                   // execute without instrumentation
    168     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
    169     kInstrumentWithInterpreter            // execute with interpreter
    170   };
    171 
    172   Instrumentation();
    173 
    174   // Add a listener to be notified of the masked together sent of instrumentation events. This
    175   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
    176   // for saying you should have suspended all threads (installing stubs while threads are running
    177   // will break).
    178   void AddListener(InstrumentationListener* listener, uint32_t events)
    179       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
    180 
    181   // Removes a listener possibly removing instrumentation stubs.
    182   void RemoveListener(InstrumentationListener* listener, uint32_t events)
    183       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
    184 
    185   // Deoptimization.
    186   void EnableDeoptimization()
    187       REQUIRES(Locks::mutator_lock_)
    188       REQUIRES(!deoptimized_methods_lock_);
    189   // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
    190   void DisableDeoptimization(const char* key)
    191       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
    192       REQUIRES(!deoptimized_methods_lock_);
    193 
    194   bool AreAllMethodsDeoptimized() const {
    195     return interpreter_stubs_installed_;
    196   }
    197   bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
    198 
    199   // Executes everything with interpreter.
    200   void DeoptimizeEverything(const char* key)
    201       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
    202       REQUIRES(!Locks::thread_list_lock_,
    203                !Locks::classlinker_classes_lock_,
    204                !deoptimized_methods_lock_);
    205 
    206   // Executes everything with compiled code (or interpreter if there is no code). May visit class
    207   // linker classes through ConfigureStubs.
    208   void UndeoptimizeEverything(const char* key)
    209       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
    210       REQUIRES(!Locks::thread_list_lock_,
    211                !Locks::classlinker_classes_lock_,
    212                !deoptimized_methods_lock_);
    213 
    214   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
    215   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
    216   // once its declaring class is initialized.
    217   void Deoptimize(ArtMethod* method)
    218       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
    219 
    220   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
    221   // (except a class initializer) set to the resolution trampoline will be updated only once its
    222   // declaring class is initialized.
    223   void Undeoptimize(ArtMethod* method)
    224       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
    225 
    226   // Indicates whether the method has been deoptimized so it is executed with the interpreter.
    227   bool IsDeoptimized(ArtMethod* method)
    228       REQUIRES(!deoptimized_methods_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    229 
    230   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
    231   void EnableMethodTracing(const char* key,
    232                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
    233       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
    234       REQUIRES(!Locks::thread_list_lock_,
    235                !Locks::classlinker_classes_lock_,
    236                !deoptimized_methods_lock_);
    237 
    238   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
    239   void DisableMethodTracing(const char* key)
    240       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
    241       REQUIRES(!Locks::thread_list_lock_,
    242                !Locks::classlinker_classes_lock_,
    243                !deoptimized_methods_lock_);
    244 
    245   InterpreterHandlerTable GetInterpreterHandlerTable() const
    246       REQUIRES_SHARED(Locks::mutator_lock_) {
    247     return interpreter_handler_table_;
    248   }
    249 
    250   void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
    251   void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
    252   void InstrumentQuickAllocEntryPointsLocked()
    253       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
    254                !Locks::runtime_shutdown_lock_);
    255   void UninstrumentQuickAllocEntryPointsLocked()
    256       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
    257                !Locks::runtime_shutdown_lock_);
    258   void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
    259 
    260   // Update the code of a method respecting any installed stubs.
    261   void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
    262       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
    263 
    264   // Update the code of a method respecting any installed stubs from debugger.
    265   void UpdateMethodsCodeForJavaDebuggable(ArtMethod* method, const void* quick_code)
    266       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
    267 
    268   // Get the quick code for the given method. More efficient than asking the class linker as it
    269   // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
    270   // installed.
    271   const void* GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const
    272       REQUIRES_SHARED(Locks::mutator_lock_);
    273 
    274   void ForceInterpretOnly() {
    275     interpret_only_ = true;
    276     forced_interpret_only_ = true;
    277   }
    278 
    279   // Called by ArtMethod::Invoke to determine dispatch mechanism.
    280   bool InterpretOnly() const {
    281     return interpret_only_;
    282   }
    283 
    284   bool IsForcedInterpretOnly() const {
    285     return forced_interpret_only_;
    286   }
    287 
    288   // Code is in boot image oat file which isn't compiled as debuggable.
    289   // Need debug version (interpreter or jitted) if that's the case.
    290   bool NeedDebugVersionFor(ArtMethod* method) const
    291       REQUIRES_SHARED(Locks::mutator_lock_);
    292 
    293   bool AreExitStubsInstalled() const {
    294     return instrumentation_stubs_installed_;
    295   }
    296 
    297   bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    298     return have_method_entry_listeners_;
    299   }
    300 
    301   bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    302     return have_method_exit_listeners_;
    303   }
    304 
    305   bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    306     return have_method_unwind_listeners_;
    307   }
    308 
    309   bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    310     return have_dex_pc_listeners_;
    311   }
    312 
    313   bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    314     return have_field_read_listeners_;
    315   }
    316 
    317   bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    318     return have_field_write_listeners_;
    319   }
    320 
    321   bool HasExceptionCaughtListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    322     return have_exception_caught_listeners_;
    323   }
    324 
    325   bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    326     return have_branch_listeners_;
    327   }
    328 
    329   bool HasInvokeVirtualOrInterfaceListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
    330     return have_invoke_virtual_or_interface_listeners_;
    331   }
    332 
    333   bool IsActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
    334     return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
    335         have_field_read_listeners_ || have_field_write_listeners_ ||
    336         have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
    337         have_branch_listeners_ || have_invoke_virtual_or_interface_listeners_;
    338   }
    339 
    340   // Any instrumentation *other* than what is needed for Jit profiling active?
    341   bool NonJitProfilingActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
    342     return have_dex_pc_listeners_ || have_method_exit_listeners_ ||
    343         have_field_read_listeners_ || have_field_write_listeners_ ||
    344         have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
    345         have_branch_listeners_;
    346   }
    347 
    348   // Inform listeners that a method has been entered. A dex PC is provided as we may install
    349   // listeners into executing code and get method enter events for methods already on the stack.
    350   void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
    351                         ArtMethod* method, uint32_t dex_pc) const
    352       REQUIRES_SHARED(Locks::mutator_lock_) {
    353     if (UNLIKELY(HasMethodEntryListeners())) {
    354       MethodEnterEventImpl(thread, this_object, method, dex_pc);
    355     }
    356   }
    357 
    358   // Inform listeners that a method has been exited.
    359   void MethodExitEvent(Thread* thread,
    360                        mirror::Object* this_object,
    361                        ArtMethod* method,
    362                        uint32_t dex_pc,
    363                        const JValue& return_value) const
    364       REQUIRES_SHARED(Locks::mutator_lock_) {
    365     if (UNLIKELY(HasMethodExitListeners())) {
    366       MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
    367     }
    368   }
    369 
    370   // Inform listeners that a method has been exited due to an exception.
    371   void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
    372                          ArtMethod* method, uint32_t dex_pc) const
    373       REQUIRES_SHARED(Locks::mutator_lock_);
    374 
    375   // Inform listeners that the dex pc has moved (only supported by the interpreter).
    376   void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
    377                        ArtMethod* method, uint32_t dex_pc) const
    378       REQUIRES_SHARED(Locks::mutator_lock_) {
    379     if (UNLIKELY(HasDexPcListeners())) {
    380       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
    381     }
    382   }
    383 
    384   // Inform listeners that a branch has been taken (only supported by the interpreter).
    385   void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
    386       REQUIRES_SHARED(Locks::mutator_lock_) {
    387     if (UNLIKELY(HasBranchListeners())) {
    388       BranchImpl(thread, method, dex_pc, offset);
    389     }
    390   }
    391 
    392   // Inform listeners that we read a field (only supported by the interpreter).
    393   void FieldReadEvent(Thread* thread, mirror::Object* this_object,
    394                       ArtMethod* method, uint32_t dex_pc,
    395                       ArtField* field) const
    396       REQUIRES_SHARED(Locks::mutator_lock_) {
    397     if (UNLIKELY(HasFieldReadListeners())) {
    398       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
    399     }
    400   }
    401 
    402   // Inform listeners that we write a field (only supported by the interpreter).
    403   void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
    404                        ArtMethod* method, uint32_t dex_pc,
    405                        ArtField* field, const JValue& field_value) const
    406       REQUIRES_SHARED(Locks::mutator_lock_) {
    407     if (UNLIKELY(HasFieldWriteListeners())) {
    408       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
    409     }
    410   }
    411 
    412   void InvokeVirtualOrInterface(Thread* thread,
    413                                 mirror::Object* this_object,
    414                                 ArtMethod* caller,
    415                                 uint32_t dex_pc,
    416                                 ArtMethod* callee) const
    417       REQUIRES_SHARED(Locks::mutator_lock_) {
    418     if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
    419       InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
    420     }
    421   }
    422 
    423   // Inform listeners that an exception was caught.
    424   void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
    425       REQUIRES_SHARED(Locks::mutator_lock_);
    426 
    427   // Called when an instrumented method is entered. The intended link register (lr) is saved so
    428   // that returning causes a branch to the method exit stub. Generates method enter events.
    429   void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
    430                                      ArtMethod* method, uintptr_t lr,
    431                                      bool interpreter_entry)
    432       REQUIRES_SHARED(Locks::mutator_lock_);
    433 
    434   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
    435   // returning the intended link register. Generates method exit events. The gpr_result and
    436   // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
    437   // result values of the function are stored. Both pointers must always be valid but the values
    438   // held there will only be meaningful if interpreted as the appropriate type given the function
    439   // being returned from.
    440   TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
    441                                              uint64_t* gpr_result, uint64_t* fpr_result)
    442       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
    443 
    444   // Pops an instrumentation frame from the current thread and generate an unwind event.
    445   // Returns the return pc for the instrumentation frame that's popped.
    446   uintptr_t PopMethodForUnwind(Thread* self, bool is_deoptimization) const
    447       REQUIRES_SHARED(Locks::mutator_lock_);
    448 
    449   // Call back for configure stubs.
    450   void InstallStubsForClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_)
    451       REQUIRES(!deoptimized_methods_lock_);
    452 
    453   void InstallStubsForMethod(ArtMethod* method)
    454       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
    455 
    456   // Install instrumentation exit stub on every method of the stack of the given thread.
    457   // This is used by the debugger to cause a deoptimization of the thread's stack after updating
    458   // local variable(s).
    459   void InstrumentThreadStack(Thread* thread)
    460       REQUIRES_SHARED(Locks::mutator_lock_)
    461       REQUIRES(!Locks::thread_list_lock_);
    462 
    463   static size_t ComputeFrameId(Thread* self,
    464                                size_t frame_depth,
    465                                size_t inlined_frames_before_frame)
    466       REQUIRES_SHARED(Locks::mutator_lock_);
    467 
    468   // Does not hold lock, used to check if someone changed from not instrumented to instrumented
    469   // during a GC suspend point.
    470   bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
    471     return alloc_entrypoints_instrumented_;
    472   }
    473 
    474   InstrumentationLevel GetCurrentInstrumentationLevel() const;
    475 
    476  private:
    477   // Returns true if moving to the given instrumentation level requires the installation of stubs.
    478   // False otherwise.
    479   bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
    480 
    481   // Does the job of installing or removing instrumentation code within methods.
    482   // In order to support multiple clients using instrumentation at the same time,
    483   // the caller must pass a unique key (a string) identifying it so we remind which
    484   // instrumentation level it needs. Therefore the current instrumentation level
    485   // becomes the highest instrumentation level required by a client.
    486   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
    487       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
    488       REQUIRES(!deoptimized_methods_lock_,
    489                !Locks::thread_list_lock_,
    490                !Locks::classlinker_classes_lock_);
    491 
    492   void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
    493     /*
    494      * TUNING: Dalvik's mterp stashes the actual current handler table base in a
    495      * tls field.  For Arm, this enables all suspend, debug & tracing checks to be
    496      * collapsed into a single conditionally-executed ldw instruction.
    497      * Move to Dalvik-style handler-table management for both the goto interpreter and
    498      * mterp.
    499      */
    500     interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
    501   }
    502 
    503   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
    504   // exclusive access to mutator lock which you can't get if the runtime isn't started.
    505   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
    506 
    507   void MethodEnterEventImpl(Thread* thread,
    508                             ObjPtr<mirror::Object> this_object,
    509                             ArtMethod* method,
    510                             uint32_t dex_pc) const
    511       REQUIRES_SHARED(Locks::mutator_lock_);
    512   void MethodExitEventImpl(Thread* thread,
    513                            ObjPtr<mirror::Object> this_object,
    514                            ArtMethod* method,
    515                            uint32_t dex_pc,
    516                            const JValue& return_value) const
    517       REQUIRES_SHARED(Locks::mutator_lock_);
    518   void DexPcMovedEventImpl(Thread* thread,
    519                            ObjPtr<mirror::Object> this_object,
    520                            ArtMethod* method,
    521                            uint32_t dex_pc) const
    522       REQUIRES_SHARED(Locks::mutator_lock_);
    523   void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
    524       REQUIRES_SHARED(Locks::mutator_lock_);
    525   void InvokeVirtualOrInterfaceImpl(Thread* thread,
    526                                     ObjPtr<mirror::Object> this_object,
    527                                     ArtMethod* caller,
    528                                     uint32_t dex_pc,
    529                                     ArtMethod* callee) const
    530       REQUIRES_SHARED(Locks::mutator_lock_);
    531   void FieldReadEventImpl(Thread* thread,
    532                           ObjPtr<mirror::Object> this_object,
    533                           ArtMethod* method,
    534                           uint32_t dex_pc,
    535                           ArtField* field) const
    536       REQUIRES_SHARED(Locks::mutator_lock_);
    537   void FieldWriteEventImpl(Thread* thread,
    538                            ObjPtr<mirror::Object> this_object,
    539                            ArtMethod* method,
    540                            uint32_t dex_pc,
    541                            ArtField* field,
    542                            const JValue& field_value) const
    543       REQUIRES_SHARED(Locks::mutator_lock_);
    544 
    545   // Read barrier-aware utility functions for accessing deoptimized_methods_
    546   bool AddDeoptimizedMethod(ArtMethod* method)
    547       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
    548   bool IsDeoptimizedMethod(ArtMethod* method)
    549       REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
    550   bool RemoveDeoptimizedMethod(ArtMethod* method)
    551       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
    552   ArtMethod* BeginDeoptimizedMethod()
    553       REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
    554   bool IsDeoptimizedMethodsEmpty() const
    555       REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
    556   void UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code)
    557       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
    558 
    559 
    560   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
    561   bool instrumentation_stubs_installed_;
    562 
    563   // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
    564   bool entry_exit_stubs_installed_;
    565 
    566   // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
    567   bool interpreter_stubs_installed_;
    568 
    569   // Do we need the fidelity of events that we only get from running within the interpreter?
    570   bool interpret_only_;
    571 
    572   // Did the runtime request we only run in the interpreter? ie -Xint mode.
    573   bool forced_interpret_only_;
    574 
    575   // Do we have any listeners for method entry events? Short-cut to avoid taking the
    576   // instrumentation_lock_.
    577   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
    578 
    579   // Do we have any listeners for method exit events? Short-cut to avoid taking the
    580   // instrumentation_lock_.
    581   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
    582 
    583   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
    584   // instrumentation_lock_.
    585   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
    586 
    587   // Do we have any listeners for dex move events? Short-cut to avoid taking the
    588   // instrumentation_lock_.
    589   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
    590 
    591   // Do we have any listeners for field read events? Short-cut to avoid taking the
    592   // instrumentation_lock_.
    593   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
    594 
    595   // Do we have any listeners for field write events? Short-cut to avoid taking the
    596   // instrumentation_lock_.
    597   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
    598 
    599   // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
    600   bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
    601 
    602   // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
    603   bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
    604 
    605   // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
    606   bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
    607 
    608   // Contains the instrumentation level required by each client of the instrumentation identified
    609   // by a string key.
    610   typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
    611   InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
    612 
    613   // The event listeners, written to with the mutator_lock_ exclusively held.
    614   // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
    615   // added or removed while iterating. The modifying thread holds exclusive lock,
    616   // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
    617   // do keep iterators that need to remain valid. This is the reason these listeners are std::list
    618   // and not for example std::vector: the existing storage for a std::list does not move.
    619   // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
    620   // listeners can also be deleted concurrently.
    621   // As a result, these lists are never trimmed. That's acceptable given the low number of
    622   // listeners we have.
    623   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
    624   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
    625   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
    626   std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
    627   std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
    628       GUARDED_BY(Locks::mutator_lock_);
    629   std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
    630   std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
    631   std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
    632   std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
    633 
    634   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
    635   // only.
    636   mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    637   std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
    638   bool deoptimization_enabled_;
    639 
    640   // Current interpreter handler table. This is updated each time the thread state flags are
    641   // modified.
    642   InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
    643 
    644   // Greater than 0 if quick alloc entry points instrumented.
    645   size_t quick_alloc_entry_points_instrumentation_counter_;
    646 
    647   // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
    648   // to prevent races with the GC where the GC relies on thread suspension only see
    649   // alloc_entrypoints_instrumented_ change during suspend points.
    650   bool alloc_entrypoints_instrumented_;
    651 
    652   friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
    653 
    654   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
    655 };
    656 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
    657 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
    658 
    659 // An element in the instrumentation side stack maintained in art::Thread.
    660 struct InstrumentationStackFrame {
    661   InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
    662                             uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
    663       : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
    664         interpreter_entry_(interpreter_entry) {
    665   }
    666 
    667   std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
    668 
    669   mirror::Object* this_object_;
    670   ArtMethod* method_;
    671   uintptr_t return_pc_;
    672   size_t frame_id_;
    673   bool interpreter_entry_;
    674 };
    675 
    676 }  // namespace instrumentation
    677 }  // namespace art
    678 
    679 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
    680