Home | History | Annotate | Download | only in openjdkjvmti
      1 /* Copyright (C) 2016 The Android Open Source Project
      2  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
      3  *
      4  * This file implements interfaces from the file jvmti.h. This implementation
      5  * is licensed under the same terms as the file jvmti.h.  The
      6  * copyright and license information for the file jvmti.h follows.
      7  *
      8  * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
      9  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     10  *
     11  * This code is free software; you can redistribute it and/or modify it
     12  * under the terms of the GNU General Public License version 2 only, as
     13  * published by the Free Software Foundation.  Oracle designates this
     14  * particular file as subject to the "Classpath" exception as provided
     15  * by Oracle in the LICENSE file that accompanied this code.
     16  *
     17  * This code is distributed in the hope that it will be useful, but WITHOUT
     18  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
     19  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     20  * version 2 for more details (a copy is included in the LICENSE file that
     21  * accompanied this code).
     22  *
     23  * You should have received a copy of the GNU General Public License version
     24  * 2 along with this work; if not, write to the Free Software Foundation,
     25  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
     26  *
     27  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
     28  * or visit www.oracle.com if you need additional information or have any
     29  * questions.
     30  */
     31 
     32 #include "events-inl.h"
     33 
     34 #include <array>
     35 #include <sys/time.h>
     36 
     37 #include "arch/context.h"
     38 #include "art_field-inl.h"
     39 #include "art_jvmti.h"
     40 #include "art_method-inl.h"
     41 #include "base/mutex.h"
     42 #include "deopt_manager.h"
     43 #include "dex/dex_file_types.h"
     44 #include "gc/allocation_listener.h"
     45 #include "gc/gc_pause_listener.h"
     46 #include "gc/heap.h"
     47 #include "gc/scoped_gc_critical_section.h"
     48 #include "handle_scope-inl.h"
     49 #include "instrumentation.h"
     50 #include "jni/jni_env_ext-inl.h"
     51 #include "jni/jni_internal.h"
     52 #include "mirror/class.h"
     53 #include "mirror/object-inl.h"
     54 #include "monitor-inl.h"
     55 #include "nativehelper/scoped_local_ref.h"
     56 #include "runtime.h"
     57 #include "scoped_thread_state_change-inl.h"
     58 #include "stack.h"
     59 #include "thread-inl.h"
     60 #include "thread_list.h"
     61 #include "ti_phase.h"
     62 #include "ti_thread.h"
     63 #include "well_known_classes.h"
     64 
     65 namespace openjdkjvmti {
     66 
     67 void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
     68   if (art::kIsDebugBuild) {
     69     ArtJvmtiEventCallbacks clean;
     70     DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
     71         << "CopyExtensionsFrom called with initialized eventsCallbacks!";
     72   }
     73   if (cb != nullptr) {
     74     memcpy(this, cb, sizeof(*this));
     75   } else {
     76     memset(this, 0, sizeof(*this));
     77   }
     78 }
     79 
     80 jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
     81   switch (index) {
     82     case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
     83       DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
     84       return OK;
     85     default:
     86       return ERR(ILLEGAL_ARGUMENT);
     87   }
     88 }
     89 
     90 
     91 bool IsExtensionEvent(jint e) {
     92   return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
     93       e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
     94       IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
     95 }
     96 
     97 bool IsExtensionEvent(ArtJvmtiEvent e) {
     98   switch (e) {
     99     case ArtJvmtiEvent::kDdmPublishChunk:
    100       return true;
    101     default:
    102       return false;
    103   }
    104 }
    105 
    106 bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
    107   return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
    108 }
    109 
    110 EventMask& EventMasks::GetEventMask(art::Thread* thread) {
    111   if (thread == nullptr) {
    112     return global_event_mask;
    113   }
    114 
    115   for (auto& pair : thread_event_masks) {
    116     const UniqueThread& unique_thread = pair.first;
    117     if (unique_thread.first == thread &&
    118         unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
    119       return pair.second;
    120     }
    121   }
    122 
    123   // TODO: Remove old UniqueThread with the same pointer, if exists.
    124 
    125   thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
    126   return thread_event_masks.back().second;
    127 }
    128 
    129 EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
    130   if (thread == nullptr) {
    131     return &global_event_mask;
    132   }
    133 
    134   for (auto& pair : thread_event_masks) {
    135     const UniqueThread& unique_thread = pair.first;
    136     if (unique_thread.first == thread &&
    137         unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
    138       return &pair.second;
    139     }
    140   }
    141 
    142   return nullptr;
    143 }
    144 
    145 
    146 void EventMasks::EnableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
    147   DCHECK_EQ(&env->event_masks, this);
    148   env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
    149   DCHECK(EventMask::EventIsInRange(event));
    150   GetEventMask(thread).Set(event);
    151   if (thread != nullptr) {
    152     unioned_thread_event_mask.Set(event, true);
    153   }
    154 }
    155 
    156 void EventMasks::DisableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
    157   DCHECK_EQ(&env->event_masks, this);
    158   env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
    159   DCHECK(EventMask::EventIsInRange(event));
    160   GetEventMask(thread).Set(event, false);
    161   if (thread != nullptr) {
    162     // Regenerate union for the event.
    163     bool union_value = false;
    164     for (auto& pair : thread_event_masks) {
    165       union_value |= pair.second.Test(event);
    166       if (union_value) {
    167         break;
    168       }
    169     }
    170     unioned_thread_event_mask.Set(event, union_value);
    171   }
    172 }
    173 
    174 void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
    175   if (UNLIKELY(caps.can_retransform_classes == 1)) {
    176     // If we are giving this env the retransform classes cap we need to switch all events of
    177     // NonTransformable to Transformable and vice versa.
    178     ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
    179                                          : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
    180     ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
    181                                       : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
    182     if (global_event_mask.Test(to_remove)) {
    183       CHECK(!global_event_mask.Test(to_add));
    184       global_event_mask.Set(to_remove, false);
    185       global_event_mask.Set(to_add, true);
    186     }
    187 
    188     if (unioned_thread_event_mask.Test(to_remove)) {
    189       CHECK(!unioned_thread_event_mask.Test(to_add));
    190       unioned_thread_event_mask.Set(to_remove, false);
    191       unioned_thread_event_mask.Set(to_add, true);
    192     }
    193     for (auto thread_mask : thread_event_masks) {
    194       if (thread_mask.second.Test(to_remove)) {
    195         CHECK(!thread_mask.second.Test(to_add));
    196         thread_mask.second.Set(to_remove, false);
    197         thread_mask.second.Set(to_add, true);
    198       }
    199     }
    200   }
    201 }
    202 
    203 void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
    204   art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
    205   envs.push_back(env);
    206 }
    207 
    208 void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
    209   art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
    210   // Since we might be currently iterating over the envs list we cannot actually erase elements.
    211   // Instead we will simply replace them with 'nullptr' and skip them manually.
    212   auto it = std::find(envs.begin(), envs.end(), env);
    213   if (it != envs.end()) {
    214     envs.erase(it);
    215     for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
    216          i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
    217          ++i) {
    218       RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
    219     }
    220   }
    221 }
    222 
    223 static bool IsThreadControllable(ArtJvmtiEvent event) {
    224   switch (event) {
    225     case ArtJvmtiEvent::kVmInit:
    226     case ArtJvmtiEvent::kVmStart:
    227     case ArtJvmtiEvent::kVmDeath:
    228     case ArtJvmtiEvent::kThreadStart:
    229     case ArtJvmtiEvent::kCompiledMethodLoad:
    230     case ArtJvmtiEvent::kCompiledMethodUnload:
    231     case ArtJvmtiEvent::kDynamicCodeGenerated:
    232     case ArtJvmtiEvent::kDataDumpRequest:
    233       return false;
    234 
    235     default:
    236       return true;
    237   }
    238 }
    239 
    240 template<typename Type>
    241 static Type AddLocalRef(art::JNIEnvExt* e, art::ObjPtr<art::mirror::Object> obj)
    242     REQUIRES_SHARED(art::Locks::mutator_lock_) {
    243   return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
    244 }
    245 
    246 template<ArtJvmtiEvent kEvent, typename ...Args>
    247 static void RunEventCallback(EventHandler* handler,
    248                              art::Thread* self,
    249                              art::JNIEnvExt* jnienv,
    250                              Args... args)
    251     REQUIRES_SHARED(art::Locks::mutator_lock_) {
    252   ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
    253   handler->DispatchEvent<kEvent>(self,
    254                                  static_cast<JNIEnv*>(jnienv),
    255                                  thread_jni.get(),
    256                                  args...);
    257 }
    258 
    259 static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
    260   art::ScopedObjectAccess soa(art::Thread::Current());
    261   if (enable) {
    262     art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
    263   } else {
    264     art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
    265   }
    266 }
    267 
    268 class JvmtiDdmChunkListener : public art::DdmCallback {
    269  public:
    270   explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
    271 
    272   void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
    273       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    274     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
    275       art::Thread* self = art::Thread::Current();
    276       handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
    277           self,
    278           static_cast<JNIEnv*>(self->GetJniEnv()),
    279           static_cast<jint>(type),
    280           static_cast<jint>(data.size()),
    281           reinterpret_cast<const jbyte*>(data.data()));
    282     }
    283   }
    284 
    285  private:
    286   EventHandler* handler_;
    287 
    288   DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
    289 };
    290 
    291 class JvmtiAllocationListener : public art::gc::AllocationListener {
    292  public:
    293   explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
    294 
    295   void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
    296       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    297     DCHECK_EQ(self, art::Thread::Current());
    298 
    299     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
    300       art::StackHandleScope<1> hs(self);
    301       auto h = hs.NewHandleWrapper(obj);
    302       // jvmtiEventVMObjectAlloc parameters:
    303       //      jvmtiEnv *jvmti_env,
    304       //      JNIEnv* jni_env,
    305       //      jthread thread,
    306       //      jobject object,
    307       //      jclass object_klass,
    308       //      jlong size
    309       art::JNIEnvExt* jni_env = self->GetJniEnv();
    310       ScopedLocalRef<jobject> object(
    311           jni_env, jni_env->AddLocalReference<jobject>(*obj));
    312       ScopedLocalRef<jclass> klass(
    313           jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
    314 
    315       RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
    316                                                       self,
    317                                                       jni_env,
    318                                                       object.get(),
    319                                                       klass.get(),
    320                                                       static_cast<jlong>(byte_count));
    321     }
    322   }
    323 
    324  private:
    325   EventHandler* handler_;
    326 };
    327 
    328 static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
    329   // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
    330   // now, do a workaround: (possibly) acquire and release.
    331   art::ScopedObjectAccess soa(art::Thread::Current());
    332   art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
    333   if (enable) {
    334     art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
    335   } else {
    336     art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
    337   }
    338 }
    339 
    340 class JvmtiMonitorListener : public art::MonitorCallback {
    341  public:
    342   explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
    343 
    344   void MonitorContendedLocking(art::Monitor* m)
    345       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    346     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
    347       art::Thread* self = art::Thread::Current();
    348       art::JNIEnvExt* jnienv = self->GetJniEnv();
    349       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
    350       RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
    351           handler_,
    352           self,
    353           jnienv,
    354           mon.get());
    355     }
    356   }
    357 
    358   void MonitorContendedLocked(art::Monitor* m)
    359       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    360     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
    361       art::Thread* self = art::Thread::Current();
    362       art::JNIEnvExt* jnienv = self->GetJniEnv();
    363       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
    364       RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
    365           handler_,
    366           self,
    367           jnienv,
    368           mon.get());
    369     }
    370   }
    371 
    372   void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
    373       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    374     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
    375       art::Thread* self = art::Thread::Current();
    376       art::JNIEnvExt* jnienv = self->GetJniEnv();
    377       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
    378       RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
    379           handler_,
    380           self,
    381           jnienv,
    382           mon.get(),
    383           static_cast<jlong>(timeout));
    384     }
    385   }
    386 
    387 
    388   // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
    389   // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
    390   // never go to sleep (due to not having the lock, having bad arguments, or having an exception
    391   // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
    392   //
    393   // This does not fully match the RI semantics. Specifically, we will not send the
    394   // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
    395   // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
    396   // send this event and return without going to sleep.
    397   //
    398   // See b/65558434 for more discussion.
    399   void MonitorWaitFinished(art::Monitor* m, bool timeout)
    400       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    401     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
    402       art::Thread* self = art::Thread::Current();
    403       art::JNIEnvExt* jnienv = self->GetJniEnv();
    404       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
    405       RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
    406           handler_,
    407           self,
    408           jnienv,
    409           mon.get(),
    410           static_cast<jboolean>(timeout));
    411     }
    412   }
    413 
    414  private:
    415   EventHandler* handler_;
    416 };
    417 
    418 class JvmtiParkListener : public art::ParkCallback {
    419  public:
    420   explicit JvmtiParkListener(EventHandler* handler) : handler_(handler) {}
    421 
    422   void ThreadParkStart(bool is_absolute, int64_t timeout)
    423       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    424     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
    425       art::Thread* self = art::Thread::Current();
    426       art::JNIEnvExt* jnienv = self->GetJniEnv();
    427       art::ArtField* parkBlockerField = art::jni::DecodeArtField(
    428           art::WellKnownClasses::java_lang_Thread_parkBlocker);
    429       art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
    430       if (blocker_obj.IsNull()) {
    431         blocker_obj = self->GetPeer();
    432       }
    433       int64_t timeout_ms;
    434       if (!is_absolute) {
    435         if (timeout == 0) {
    436           timeout_ms = 0;
    437         } else {
    438           timeout_ms = timeout / 1000000;
    439           if (timeout_ms == 0) {
    440             // If we were instructed to park for a nonzero number of nanoseconds, but not enough
    441             // to be a full millisecond, round up to 1 ms. A nonzero park() call will return
    442             // soon, but a 0 wait or park call will wait indefinitely.
    443             timeout_ms = 1;
    444           }
    445         }
    446       } else {
    447         struct timeval tv;
    448         gettimeofday(&tv, (struct timezone *) nullptr);
    449         int64_t now = tv.tv_sec * 1000LL + tv.tv_usec / 1000;
    450         if (now < timeout) {
    451           timeout_ms = timeout - now;
    452         } else {
    453           // Waiting for 0 ms is an indefinite wait; parking until a time in
    454           // the past or the current time will return immediately, so emulate
    455           // the shortest possible wait event.
    456           timeout_ms = 1;
    457         }
    458       }
    459       ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
    460       RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
    461           handler_,
    462           self,
    463           jnienv,
    464           blocker.get(),
    465           static_cast<jlong>(timeout_ms));
    466     }
    467   }
    468 
    469 
    470   // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
    471   // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
    472   // never go to sleep (due to not having the lock, having bad arguments, or having an exception
    473   // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
    474   //
    475   // This does not fully match the RI semantics. Specifically, we will not send the
    476   // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
    477   // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
    478   // send this event and return without going to sleep.
    479   //
    480   // See b/65558434 for more discussion.
    481   void ThreadParkFinished(bool timeout) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    482     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
    483       art::Thread* self = art::Thread::Current();
    484       art::JNIEnvExt* jnienv = self->GetJniEnv();
    485       art::ArtField* parkBlockerField = art::jni::DecodeArtField(
    486           art::WellKnownClasses::java_lang_Thread_parkBlocker);
    487       art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
    488       if (blocker_obj.IsNull()) {
    489         blocker_obj = self->GetPeer();
    490       }
    491       ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
    492       RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
    493           handler_,
    494           self,
    495           jnienv,
    496           blocker.get(),
    497           static_cast<jboolean>(timeout));
    498     }
    499   }
    500 
    501  private:
    502   EventHandler* handler_;
    503 };
    504 
    505 static void SetupMonitorListener(art::MonitorCallback* monitor_listener, art::ParkCallback* park_listener, bool enable) {
    506   // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
    507   // now, do a workaround: (possibly) acquire and release.
    508   art::ScopedObjectAccess soa(art::Thread::Current());
    509   if (enable) {
    510     art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(monitor_listener);
    511     art::Runtime::Current()->GetRuntimeCallbacks()->AddParkCallback(park_listener);
    512   } else {
    513     art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(monitor_listener);
    514     art::Runtime::Current()->GetRuntimeCallbacks()->RemoveParkCallback(park_listener);
    515   }
    516 }
    517 
    518 // Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
    519 class JvmtiGcPauseListener : public art::gc::GcPauseListener {
    520  public:
    521   explicit JvmtiGcPauseListener(EventHandler* handler)
    522       : handler_(handler),
    523         start_enabled_(false),
    524         finish_enabled_(false) {}
    525 
    526   void StartPause() override {
    527     handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
    528   }
    529 
    530   void EndPause() override {
    531     handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
    532   }
    533 
    534   bool IsEnabled() {
    535     return start_enabled_ || finish_enabled_;
    536   }
    537 
    538   void SetStartEnabled(bool e) {
    539     start_enabled_ = e;
    540   }
    541 
    542   void SetFinishEnabled(bool e) {
    543     finish_enabled_ = e;
    544   }
    545 
    546  private:
    547   EventHandler* handler_;
    548   bool start_enabled_;
    549   bool finish_enabled_;
    550 };
    551 
    552 static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
    553   bool old_state = listener->IsEnabled();
    554 
    555   if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
    556     listener->SetStartEnabled(enable);
    557   } else {
    558     listener->SetFinishEnabled(enable);
    559   }
    560 
    561   bool new_state = listener->IsEnabled();
    562 
    563   if (old_state != new_state) {
    564     if (new_state) {
    565       art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
    566     } else {
    567       art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
    568     }
    569   }
    570 }
    571 
    572 class JvmtiMethodTraceListener final : public art::instrumentation::InstrumentationListener {
    573  public:
    574   explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
    575 
    576   // Call-back for when a method is entered.
    577   void MethodEntered(art::Thread* self,
    578                      art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
    579                      art::ArtMethod* method,
    580                      uint32_t dex_pc ATTRIBUTE_UNUSED)
    581       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    582     if (!method->IsRuntimeMethod() &&
    583         event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
    584       art::JNIEnvExt* jnienv = self->GetJniEnv();
    585       RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
    586                                                     self,
    587                                                     jnienv,
    588                                                     art::jni::EncodeArtMethod(method));
    589     }
    590   }
    591 
    592   // Callback for when a method is exited with a reference return value.
    593   void MethodExited(art::Thread* self,
    594                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
    595                     art::ArtMethod* method,
    596                     uint32_t dex_pc ATTRIBUTE_UNUSED,
    597                     art::Handle<art::mirror::Object> return_value)
    598       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    599     if (!method->IsRuntimeMethod() &&
    600         event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
    601       DCHECK_EQ(
    602           method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
    603           art::Primitive::kPrimNot) << method->PrettyMethod();
    604       DCHECK(!self->IsExceptionPending());
    605       jvalue val;
    606       art::JNIEnvExt* jnienv = self->GetJniEnv();
    607       ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
    608       val.l = return_jobj.get();
    609       RunEventCallback<ArtJvmtiEvent::kMethodExit>(
    610           event_handler_,
    611           self,
    612           jnienv,
    613           art::jni::EncodeArtMethod(method),
    614           /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
    615           val);
    616     }
    617   }
    618 
    619   // Call-back for when a method is exited.
    620   void MethodExited(art::Thread* self,
    621                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
    622                     art::ArtMethod* method,
    623                     uint32_t dex_pc ATTRIBUTE_UNUSED,
    624                     const art::JValue& return_value)
    625       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    626     if (!method->IsRuntimeMethod() &&
    627         event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
    628       DCHECK_NE(
    629           method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
    630           art::Primitive::kPrimNot) << method->PrettyMethod();
    631       DCHECK(!self->IsExceptionPending());
    632       jvalue val;
    633       art::JNIEnvExt* jnienv = self->GetJniEnv();
    634       // 64bit integer is the largest value in the union so we should be fine simply copying it into
    635       // the union.
    636       val.j = return_value.GetJ();
    637       RunEventCallback<ArtJvmtiEvent::kMethodExit>(
    638           event_handler_,
    639           self,
    640           jnienv,
    641           art::jni::EncodeArtMethod(method),
    642           /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
    643           val);
    644     }
    645   }
    646 
    647   // Call-back for when a method is popped due to an exception throw. A method will either cause a
    648   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
    649   void MethodUnwind(art::Thread* self,
    650                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
    651                     art::ArtMethod* method,
    652                     uint32_t dex_pc ATTRIBUTE_UNUSED)
    653       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    654     if (!method->IsRuntimeMethod() &&
    655         event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
    656       jvalue val;
    657       // Just set this to 0xffffffffffffffff so it's not uninitialized.
    658       val.j = static_cast<jlong>(-1);
    659       art::JNIEnvExt* jnienv = self->GetJniEnv();
    660       art::StackHandleScope<1> hs(self);
    661       art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
    662       CHECK(!old_exception.IsNull());
    663       self->ClearException();
    664       RunEventCallback<ArtJvmtiEvent::kMethodExit>(
    665           event_handler_,
    666           self,
    667           jnienv,
    668           art::jni::EncodeArtMethod(method),
    669           /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_TRUE),
    670           val);
    671       // Match RI behavior of just throwing away original exception if a new one is thrown.
    672       if (LIKELY(!self->IsExceptionPending())) {
    673         self->SetException(old_exception.Get());
    674       }
    675     }
    676   }
    677 
    678   // Call-back for when the dex pc moves in a method.
    679   void DexPcMoved(art::Thread* self,
    680                   art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
    681                   art::ArtMethod* method,
    682                   uint32_t new_dex_pc)
    683       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    684     DCHECK(!method->IsRuntimeMethod());
    685     // Default methods might be copied to multiple classes. We need to get the canonical version of
    686     // this method so that we can check for breakpoints correctly.
    687     // TODO We should maybe do this on other events to ensure that we are consistent WRT default
    688     // methods. This could interact with obsolete methods if we ever let interface redefinition
    689     // happen though.
    690     method = method->GetCanonicalMethod();
    691     art::JNIEnvExt* jnienv = self->GetJniEnv();
    692     jmethodID jmethod = art::jni::EncodeArtMethod(method);
    693     jlocation location = static_cast<jlocation>(new_dex_pc);
    694     // Step event is reported first according to the spec.
    695     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
    696       RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
    697     }
    698     // Next we do the Breakpoint events. The Dispatch code will filter the individual
    699     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
    700       RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
    701     }
    702   }
    703 
    704   // Call-back for when we read from a field.
    705   void FieldRead(art::Thread* self,
    706                  art::Handle<art::mirror::Object> this_object,
    707                  art::ArtMethod* method,
    708                  uint32_t dex_pc,
    709                  art::ArtField* field)
    710       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    711     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
    712       art::JNIEnvExt* jnienv = self->GetJniEnv();
    713       // DCHECK(!self->IsExceptionPending());
    714       ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
    715       ScopedLocalRef<jobject> fklass(jnienv,
    716                                      AddLocalRef<jobject>(jnienv,
    717                                                           field->GetDeclaringClass().Ptr()));
    718       RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
    719                                                     self,
    720                                                     jnienv,
    721                                                     art::jni::EncodeArtMethod(method),
    722                                                     static_cast<jlocation>(dex_pc),
    723                                                     static_cast<jclass>(fklass.get()),
    724                                                     this_ref.get(),
    725                                                     art::jni::EncodeArtField(field));
    726     }
    727   }
    728 
    729   void FieldWritten(art::Thread* self,
    730                     art::Handle<art::mirror::Object> this_object,
    731                     art::ArtMethod* method,
    732                     uint32_t dex_pc,
    733                     art::ArtField* field,
    734                     art::Handle<art::mirror::Object> new_val)
    735       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    736     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
    737       art::JNIEnvExt* jnienv = self->GetJniEnv();
    738       // DCHECK(!self->IsExceptionPending());
    739       ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
    740       ScopedLocalRef<jobject> fklass(jnienv,
    741                                      AddLocalRef<jobject>(jnienv,
    742                                                           field->GetDeclaringClass().Ptr()));
    743       ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
    744       jvalue val;
    745       val.l = fval.get();
    746       RunEventCallback<ArtJvmtiEvent::kFieldModification>(
    747           event_handler_,
    748           self,
    749           jnienv,
    750           art::jni::EncodeArtMethod(method),
    751           static_cast<jlocation>(dex_pc),
    752           static_cast<jclass>(fklass.get()),
    753           field->IsStatic() ? nullptr :  this_ref.get(),
    754           art::jni::EncodeArtField(field),
    755           'L',  // type_char
    756           val);
    757     }
    758   }
    759 
    760   // Call-back for when we write into a field.
    761   void FieldWritten(art::Thread* self,
    762                     art::Handle<art::mirror::Object> this_object,
    763                     art::ArtMethod* method,
    764                     uint32_t dex_pc,
    765                     art::ArtField* field,
    766                     const art::JValue& field_value)
    767       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    768     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
    769       art::JNIEnvExt* jnienv = self->GetJniEnv();
    770       DCHECK(!self->IsExceptionPending());
    771       ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
    772       ScopedLocalRef<jobject> fklass(jnienv,
    773                                      AddLocalRef<jobject>(jnienv,
    774                                                           field->GetDeclaringClass().Ptr()));
    775       char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
    776       jvalue val;
    777       // 64bit integer is the largest value in the union so we should be fine simply copying it into
    778       // the union.
    779       val.j = field_value.GetJ();
    780       RunEventCallback<ArtJvmtiEvent::kFieldModification>(
    781           event_handler_,
    782           self,
    783           jnienv,
    784           art::jni::EncodeArtMethod(method),
    785           static_cast<jlocation>(dex_pc),
    786           static_cast<jclass>(fklass.get()),
    787           field->IsStatic() ? nullptr :  this_ref.get(),  // nb static field modification get given
    788                                                           // the class as this_object for some
    789                                                           // reason.
    790           art::jni::EncodeArtField(field),
    791           type_char,
    792           val);
    793     }
    794   }
    795 
    796   void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
    797       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    798       art::JNIEnvExt* jnienv = self->GetJniEnv();
    799     // Remove the force-interpreter added by the WatchFrame.
    800     {
    801       art::MutexLock mu(self, *art::Locks::thread_list_lock_);
    802       CHECK_GT(self->ForceInterpreterCount(), 0u);
    803       self->DecrementForceInterpreterCount();
    804     }
    805     jboolean is_exception_pending = self->IsExceptionPending();
    806     RunEventCallback<ArtJvmtiEvent::kFramePop>(
    807         event_handler_,
    808         self,
    809         jnienv,
    810         art::jni::EncodeArtMethod(frame.GetMethod()),
    811         is_exception_pending,
    812         &frame);
    813   }
    814 
    815   static void FindCatchMethodsFromThrow(art::Thread* self,
    816                                         art::Handle<art::mirror::Throwable> exception,
    817                                         /*out*/ art::ArtMethod** out_method,
    818                                         /*out*/ uint32_t* dex_pc)
    819       REQUIRES_SHARED(art::Locks::mutator_lock_) {
    820     // Finds the location where this exception will most likely be caught. We ignore intervening
    821     // native frames (which could catch the exception) and return the closest java frame with a
    822     // compatible catch statement.
    823     class CatchLocationFinder final : public art::StackVisitor {
    824      public:
    825       CatchLocationFinder(art::Thread* target,
    826                           art::Handle<art::mirror::Class> exception_class,
    827                           art::Context* context,
    828                           /*out*/ art::ArtMethod** out_catch_method,
    829                           /*out*/ uint32_t* out_catch_pc)
    830           REQUIRES_SHARED(art::Locks::mutator_lock_)
    831         : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    832           exception_class_(exception_class),
    833           catch_method_ptr_(out_catch_method),
    834           catch_dex_pc_ptr_(out_catch_pc) {}
    835 
    836       bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
    837         art::ArtMethod* method = GetMethod();
    838         DCHECK(method != nullptr);
    839         if (method->IsRuntimeMethod()) {
    840           return true;
    841         }
    842 
    843         if (!method->IsNative()) {
    844           uint32_t cur_dex_pc = GetDexPc();
    845           if (cur_dex_pc == art::dex::kDexNoIndex) {
    846             // This frame looks opaque. Just keep on going.
    847             return true;
    848           }
    849           bool has_no_move_exception = false;
    850           uint32_t found_dex_pc = method->FindCatchBlock(
    851               exception_class_, cur_dex_pc, &has_no_move_exception);
    852           if (found_dex_pc != art::dex::kDexNoIndex) {
    853             // We found the catch. Store the result and return.
    854             *catch_method_ptr_ = method;
    855             *catch_dex_pc_ptr_ = found_dex_pc;
    856             return false;
    857           }
    858         }
    859         return true;
    860       }
    861 
    862      private:
    863       art::Handle<art::mirror::Class> exception_class_;
    864       art::ArtMethod** catch_method_ptr_;
    865       uint32_t* catch_dex_pc_ptr_;
    866 
    867       DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
    868     };
    869 
    870     art::StackHandleScope<1> hs(self);
    871     *out_method = nullptr;
    872     *dex_pc = 0;
    873     std::unique_ptr<art::Context> context(art::Context::Create());
    874 
    875     CatchLocationFinder clf(self,
    876                             hs.NewHandle(exception->GetClass()),
    877                             context.get(),
    878                             /*out*/ out_method,
    879                             /*out*/ dex_pc);
    880     clf.WalkStack(/* include_transitions= */ false);
    881   }
    882 
    883   // Call-back when an exception is thrown.
    884   void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
    885       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    886     DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
    887     // The instrumentation events get rid of this for us.
    888     DCHECK(!self->IsExceptionPending());
    889     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
    890       art::JNIEnvExt* jnienv = self->GetJniEnv();
    891       art::ArtMethod* catch_method;
    892       uint32_t catch_pc;
    893       FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
    894       uint32_t dex_pc = 0;
    895       art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
    896                                                       /* check_suspended= */ true,
    897                                                       /* abort_on_error= */ art::kIsDebugBuild);
    898       ScopedLocalRef<jobject> exception(jnienv,
    899                                         AddLocalRef<jobject>(jnienv, exception_object.Get()));
    900       RunEventCallback<ArtJvmtiEvent::kException>(
    901           event_handler_,
    902           self,
    903           jnienv,
    904           art::jni::EncodeArtMethod(method),
    905           static_cast<jlocation>(dex_pc),
    906           exception.get(),
    907           art::jni::EncodeArtMethod(catch_method),
    908           static_cast<jlocation>(catch_pc));
    909     }
    910     return;
    911   }
    912 
    913   // Call-back when an exception is handled.
    914   void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
    915       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    916     // Since the exception has already been handled there shouldn't be one pending.
    917     DCHECK(!self->IsExceptionPending());
    918     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
    919       art::JNIEnvExt* jnienv = self->GetJniEnv();
    920       uint32_t dex_pc;
    921       art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
    922                                                       /* check_suspended= */ true,
    923                                                       /* abort_on_error= */ art::kIsDebugBuild);
    924       ScopedLocalRef<jobject> exception(jnienv,
    925                                         AddLocalRef<jobject>(jnienv, exception_object.Get()));
    926       RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
    927           event_handler_,
    928           self,
    929           jnienv,
    930           art::jni::EncodeArtMethod(method),
    931           static_cast<jlocation>(dex_pc),
    932           exception.get());
    933     }
    934     return;
    935   }
    936 
    937   // Call-back for when we execute a branch.
    938   void Branch(art::Thread* self ATTRIBUTE_UNUSED,
    939               art::ArtMethod* method ATTRIBUTE_UNUSED,
    940               uint32_t dex_pc ATTRIBUTE_UNUSED,
    941               int32_t dex_pc_offset ATTRIBUTE_UNUSED)
    942       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
    943     return;
    944   }
    945 
    946  private:
    947   EventHandler* const event_handler_;
    948 };
    949 
    950 static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
    951   switch (event) {
    952     case ArtJvmtiEvent::kMethodEntry:
    953       return art::instrumentation::Instrumentation::kMethodEntered;
    954     case ArtJvmtiEvent::kMethodExit:
    955       return art::instrumentation::Instrumentation::kMethodExited |
    956              art::instrumentation::Instrumentation::kMethodUnwind;
    957     case ArtJvmtiEvent::kFieldModification:
    958       return art::instrumentation::Instrumentation::kFieldWritten;
    959     case ArtJvmtiEvent::kFieldAccess:
    960       return art::instrumentation::Instrumentation::kFieldRead;
    961     case ArtJvmtiEvent::kBreakpoint:
    962     case ArtJvmtiEvent::kSingleStep:
    963       return art::instrumentation::Instrumentation::kDexPcMoved;
    964     case ArtJvmtiEvent::kFramePop:
    965       return art::instrumentation::Instrumentation::kWatchedFramePop;
    966     case ArtJvmtiEvent::kException:
    967       return art::instrumentation::Instrumentation::kExceptionThrown;
    968     case ArtJvmtiEvent::kExceptionCatch:
    969       return art::instrumentation::Instrumentation::kExceptionHandled;
    970     default:
    971       LOG(FATAL) << "Unknown event ";
    972       UNREACHABLE();
    973   }
    974 }
    975 
    976 enum class DeoptRequirement {
    977   // No deoptimization work required.
    978   kNone,
    979   // Limited/no deopt required.
    980   kLimited,
    981   // A single thread must be put into interpret only.
    982   kThread,
    983   // All methods and all threads deopted.
    984   kFull,
    985 };
    986 
    987 static DeoptRequirement GetDeoptRequirement(ArtJvmtiEvent event, jthread thread) {
    988   switch (event) {
    989     case ArtJvmtiEvent::kBreakpoint:
    990     case ArtJvmtiEvent::kException:
    991       return DeoptRequirement::kLimited;
    992     // TODO MethodEntry is needed due to inconsistencies between the interpreter and the trampoline
    993     // in how to handle exceptions.
    994     case ArtJvmtiEvent::kMethodEntry:
    995     case ArtJvmtiEvent::kExceptionCatch:
    996       return DeoptRequirement::kFull;
    997     case ArtJvmtiEvent::kMethodExit:
    998     case ArtJvmtiEvent::kFieldModification:
    999     case ArtJvmtiEvent::kFieldAccess:
   1000     case ArtJvmtiEvent::kSingleStep:
   1001     case ArtJvmtiEvent::kFramePop:
   1002       return thread == nullptr ? DeoptRequirement::kFull : DeoptRequirement::kThread;
   1003     case ArtJvmtiEvent::kVmInit:
   1004     case ArtJvmtiEvent::kVmDeath:
   1005     case ArtJvmtiEvent::kThreadStart:
   1006     case ArtJvmtiEvent::kThreadEnd:
   1007     case ArtJvmtiEvent::kClassFileLoadHookNonRetransformable:
   1008     case ArtJvmtiEvent::kClassLoad:
   1009     case ArtJvmtiEvent::kClassPrepare:
   1010     case ArtJvmtiEvent::kVmStart:
   1011     case ArtJvmtiEvent::kNativeMethodBind:
   1012     case ArtJvmtiEvent::kCompiledMethodLoad:
   1013     case ArtJvmtiEvent::kCompiledMethodUnload:
   1014     case ArtJvmtiEvent::kDynamicCodeGenerated:
   1015     case ArtJvmtiEvent::kDataDumpRequest:
   1016     case ArtJvmtiEvent::kMonitorWait:
   1017     case ArtJvmtiEvent::kMonitorWaited:
   1018     case ArtJvmtiEvent::kMonitorContendedEnter:
   1019     case ArtJvmtiEvent::kMonitorContendedEntered:
   1020     case ArtJvmtiEvent::kResourceExhausted:
   1021     case ArtJvmtiEvent::kGarbageCollectionStart:
   1022     case ArtJvmtiEvent::kGarbageCollectionFinish:
   1023     case ArtJvmtiEvent::kObjectFree:
   1024     case ArtJvmtiEvent::kVmObjectAlloc:
   1025     case ArtJvmtiEvent::kClassFileLoadHookRetransformable:
   1026     case ArtJvmtiEvent::kDdmPublishChunk:
   1027       return DeoptRequirement::kNone;
   1028   }
   1029 }
   1030 
   1031 jvmtiError EventHandler::HandleEventDeopt(ArtJvmtiEvent event, jthread thread, bool enable) {
   1032   DeoptRequirement deopt_req = GetDeoptRequirement(event, thread);
   1033   // Make sure we can deopt.
   1034   if (deopt_req != DeoptRequirement::kNone) {
   1035     art::ScopedObjectAccess soa(art::Thread::Current());
   1036     DeoptManager* deopt_manager = DeoptManager::Get();
   1037     jvmtiError err = OK;
   1038     if (enable) {
   1039       deopt_manager->AddDeoptimizationRequester();
   1040       switch (deopt_req) {
   1041         case DeoptRequirement::kFull:
   1042           deopt_manager->AddDeoptimizeAllMethods();
   1043           break;
   1044         case DeoptRequirement::kThread:
   1045           err = deopt_manager->AddDeoptimizeThreadMethods(soa, thread);
   1046           break;
   1047         default:
   1048           break;
   1049       }
   1050       if (err != OK) {
   1051         deopt_manager->RemoveDeoptimizationRequester();
   1052         return err;
   1053       }
   1054     } else {
   1055       switch (deopt_req) {
   1056         case DeoptRequirement::kFull:
   1057           deopt_manager->RemoveDeoptimizeAllMethods();
   1058           break;
   1059         case DeoptRequirement::kThread:
   1060           err = deopt_manager->RemoveDeoptimizeThreadMethods(soa, thread);
   1061           break;
   1062         default:
   1063           break;
   1064       }
   1065       deopt_manager->RemoveDeoptimizationRequester();
   1066       if (err != OK) {
   1067         return err;
   1068       }
   1069     }
   1070   }
   1071   return OK;
   1072 }
   1073 
   1074 void EventHandler::SetupTraceListener(JvmtiMethodTraceListener* listener,
   1075                                       ArtJvmtiEvent event,
   1076                                       bool enable) {
   1077   // Add the actual listeners.
   1078   uint32_t new_events = GetInstrumentationEventsFor(event);
   1079   if (new_events == art::instrumentation::Instrumentation::kDexPcMoved) {
   1080     // Need to skip adding the listeners if the event is breakpoint/single-step since those events
   1081     // share the same art-instrumentation underlying event. We need to give them their own deopt
   1082     // request though so the test waits until here.
   1083     DCHECK(event == ArtJvmtiEvent::kBreakpoint || event == ArtJvmtiEvent::kSingleStep);
   1084     ArtJvmtiEvent other = event == ArtJvmtiEvent::kBreakpoint ? ArtJvmtiEvent::kSingleStep
   1085                                                               : ArtJvmtiEvent::kBreakpoint;
   1086     if (IsEventEnabledAnywhere(other)) {
   1087       // The event needs to be kept around/is already enabled by the other jvmti event that uses the
   1088       // same instrumentation event.
   1089       return;
   1090     }
   1091   }
   1092   art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
   1093   art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
   1094   art::ScopedSuspendAll ssa("jvmti method tracing installation");
   1095   if (enable) {
   1096     instr->AddListener(listener, new_events);
   1097   } else {
   1098     instr->RemoveListener(listener, new_events);
   1099   }
   1100   return;
   1101 }
   1102 
   1103 // Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
   1104 // the switch interpreter) when we try to get or set a local variable.
   1105 void EventHandler::HandleLocalAccessCapabilityAdded() {
   1106   class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
   1107    public:
   1108     explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
   1109         : runtime_(runtime) {}
   1110 
   1111     bool operator()(art::ObjPtr<art::mirror::Class> klass)
   1112         override REQUIRES(art::Locks::mutator_lock_) {
   1113       if (!klass->IsLoaded()) {
   1114         // Skip classes that aren't loaded since they might not have fully allocated and initialized
   1115         // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
   1116         // these methods will definitately be using debuggable code.
   1117         return true;
   1118       }
   1119       for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
   1120         const void* code = m.GetEntryPointFromQuickCompiledCode();
   1121         if (m.IsNative() || m.IsProxyMethod()) {
   1122           continue;
   1123         } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
   1124                    !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
   1125           runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
   1126         }
   1127       }
   1128       return true;
   1129     }
   1130 
   1131    private:
   1132     art::Runtime* runtime_;
   1133   };
   1134   art::ScopedObjectAccess soa(art::Thread::Current());
   1135   UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
   1136   art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
   1137 }
   1138 
   1139 bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
   1140   std::array<ArtJvmtiEvent, 4> events {
   1141     {
   1142       ArtJvmtiEvent::kMonitorContendedEnter,
   1143       ArtJvmtiEvent::kMonitorContendedEntered,
   1144       ArtJvmtiEvent::kMonitorWait,
   1145       ArtJvmtiEvent::kMonitorWaited
   1146     }
   1147   };
   1148   for (ArtJvmtiEvent e : events) {
   1149     if (e != event && IsEventEnabledAnywhere(e)) {
   1150       return true;
   1151     }
   1152   }
   1153   return false;
   1154 }
   1155 
   1156 void EventHandler::SetupFramePopTraceListener(bool enable) {
   1157   if (enable) {
   1158     frame_pop_enabled = true;
   1159     SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
   1160   } else {
   1161     // remove the listener if we have no outstanding frames.
   1162     {
   1163       art::ReaderMutexLock mu(art::Thread::Current(), envs_lock_);
   1164       for (ArtJvmTiEnv *env : envs) {
   1165         art::ReaderMutexLock event_mu(art::Thread::Current(), env->event_info_mutex_);
   1166         if (!env->notify_frames.empty()) {
   1167           // Leaving FramePop listener since there are unsent FramePop events.
   1168           return;
   1169         }
   1170       }
   1171       frame_pop_enabled = false;
   1172     }
   1173     SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
   1174   }
   1175 }
   1176 
   1177 // Handle special work for the given event type, if necessary.
   1178 void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
   1179   switch (event) {
   1180     case ArtJvmtiEvent::kDdmPublishChunk:
   1181       SetupDdmTracking(ddm_listener_.get(), enable);
   1182       return;
   1183     case ArtJvmtiEvent::kVmObjectAlloc:
   1184       SetupObjectAllocationTracking(alloc_listener_.get(), enable);
   1185       return;
   1186     case ArtJvmtiEvent::kGarbageCollectionStart:
   1187     case ArtJvmtiEvent::kGarbageCollectionFinish:
   1188       SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
   1189       return;
   1190     // FramePop can never be disabled once it's been turned on if it was turned off with outstanding
   1191     // pop-events since we would either need to deal with dangling pointers or have missed events.
   1192     case ArtJvmtiEvent::kFramePop:
   1193       if (enable && frame_pop_enabled) {
   1194         // The frame-pop event was held on by pending events so we don't need to do anything.
   1195       } else {
   1196         SetupFramePopTraceListener(enable);
   1197       }
   1198       return;
   1199     case ArtJvmtiEvent::kMethodEntry:
   1200     case ArtJvmtiEvent::kMethodExit:
   1201     case ArtJvmtiEvent::kFieldAccess:
   1202     case ArtJvmtiEvent::kFieldModification:
   1203     case ArtJvmtiEvent::kException:
   1204     case ArtJvmtiEvent::kExceptionCatch:
   1205     case ArtJvmtiEvent::kBreakpoint:
   1206     case ArtJvmtiEvent::kSingleStep:
   1207       SetupTraceListener(method_trace_listener_.get(), event, enable);
   1208       return;
   1209     case ArtJvmtiEvent::kMonitorContendedEnter:
   1210     case ArtJvmtiEvent::kMonitorContendedEntered:
   1211     case ArtJvmtiEvent::kMonitorWait:
   1212     case ArtJvmtiEvent::kMonitorWaited:
   1213       if (!OtherMonitorEventsEnabledAnywhere(event)) {
   1214         SetupMonitorListener(monitor_listener_.get(), park_listener_.get(), enable);
   1215       }
   1216       return;
   1217     default:
   1218       break;
   1219   }
   1220   return;
   1221 }
   1222 
   1223 // Checks to see if the env has the capabilities associated with the given event.
   1224 static bool HasAssociatedCapability(ArtJvmTiEnv* env,
   1225                                     ArtJvmtiEvent event) {
   1226   jvmtiCapabilities caps = env->capabilities;
   1227   switch (event) {
   1228     case ArtJvmtiEvent::kBreakpoint:
   1229       return caps.can_generate_breakpoint_events == 1;
   1230 
   1231     case ArtJvmtiEvent::kCompiledMethodLoad:
   1232     case ArtJvmtiEvent::kCompiledMethodUnload:
   1233       return caps.can_generate_compiled_method_load_events == 1;
   1234 
   1235     case ArtJvmtiEvent::kException:
   1236     case ArtJvmtiEvent::kExceptionCatch:
   1237       return caps.can_generate_exception_events == 1;
   1238 
   1239     case ArtJvmtiEvent::kFieldAccess:
   1240       return caps.can_generate_field_access_events == 1;
   1241 
   1242     case ArtJvmtiEvent::kFieldModification:
   1243       return caps.can_generate_field_modification_events == 1;
   1244 
   1245     case ArtJvmtiEvent::kFramePop:
   1246       return caps.can_generate_frame_pop_events == 1;
   1247 
   1248     case ArtJvmtiEvent::kGarbageCollectionStart:
   1249     case ArtJvmtiEvent::kGarbageCollectionFinish:
   1250       return caps.can_generate_garbage_collection_events == 1;
   1251 
   1252     case ArtJvmtiEvent::kMethodEntry:
   1253       return caps.can_generate_method_entry_events == 1;
   1254 
   1255     case ArtJvmtiEvent::kMethodExit:
   1256       return caps.can_generate_method_exit_events == 1;
   1257 
   1258     case ArtJvmtiEvent::kMonitorContendedEnter:
   1259     case ArtJvmtiEvent::kMonitorContendedEntered:
   1260     case ArtJvmtiEvent::kMonitorWait:
   1261     case ArtJvmtiEvent::kMonitorWaited:
   1262       return caps.can_generate_monitor_events == 1;
   1263 
   1264     case ArtJvmtiEvent::kNativeMethodBind:
   1265       return caps.can_generate_native_method_bind_events == 1;
   1266 
   1267     case ArtJvmtiEvent::kObjectFree:
   1268       return caps.can_generate_object_free_events == 1;
   1269 
   1270     case ArtJvmtiEvent::kSingleStep:
   1271       return caps.can_generate_single_step_events == 1;
   1272 
   1273     case ArtJvmtiEvent::kVmObjectAlloc:
   1274       return caps.can_generate_vm_object_alloc_events == 1;
   1275 
   1276     default:
   1277       return true;
   1278   }
   1279 }
   1280 
   1281 jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
   1282                                   jthread thread,
   1283                                   ArtJvmtiEvent event,
   1284                                   jvmtiEventMode mode) {
   1285   if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
   1286     return ERR(ILLEGAL_ARGUMENT);
   1287   }
   1288 
   1289   if (!EventMask::EventIsInRange(event)) {
   1290     return ERR(INVALID_EVENT_TYPE);
   1291   }
   1292 
   1293   if (!HasAssociatedCapability(env, event)) {
   1294     return ERR(MUST_POSSESS_CAPABILITY);
   1295   }
   1296 
   1297   if (thread != nullptr && !IsThreadControllable(event)) {
   1298     return ERR(ILLEGAL_ARGUMENT);
   1299   }
   1300 
   1301   art::Thread* self = art::Thread::Current();
   1302   art::Thread* target = nullptr;
   1303   ScopedNoUserCodeSuspension snucs(self);
   1304   // The overall state across all threads and jvmtiEnvs. This is used to control the state of the
   1305   // instrumentation handlers since we only want each added once.
   1306   bool old_state;
   1307   bool new_state;
   1308   // The state for just the current 'thread' (including null) across all jvmtiEnvs. This is used to
   1309   // control the deoptimization state since we do refcounting for that and need to perform different
   1310   // actions depending on if the event is limited to a single thread or global.
   1311   bool old_thread_state;
   1312   bool new_thread_state;
   1313   {
   1314     // From now on we know we cannot get suspended by user-code.
   1315     // NB This does a SuspendCheck (during thread state change) so we need to
   1316     // make sure we don't have the 'suspend_lock' locked here.
   1317     art::ScopedObjectAccess soa(self);
   1318     art::WriterMutexLock el_mu(self, envs_lock_);
   1319     art::MutexLock tll_mu(self, *art::Locks::thread_list_lock_);
   1320     jvmtiError err = ERR(INTERNAL);
   1321     if (thread != nullptr) {
   1322       if (!ThreadUtil::GetAliveNativeThread(thread, soa, &target, &err)) {
   1323         return err;
   1324       } else if (target->IsStillStarting() ||
   1325                 target->GetState() == art::ThreadState::kStarting) {
   1326         target->Dump(LOG_STREAM(WARNING) << "Is not alive: ");
   1327         return ERR(THREAD_NOT_ALIVE);
   1328       }
   1329     }
   1330 
   1331 
   1332     art::WriterMutexLock ei_mu(self, env->event_info_mutex_);
   1333     old_thread_state = GetThreadEventState(event, target);
   1334     old_state = global_mask.Test(event);
   1335     if (mode == JVMTI_ENABLE) {
   1336       env->event_masks.EnableEvent(env, target, event);
   1337       global_mask.Set(event);
   1338       new_state = true;
   1339       new_thread_state = true;
   1340       DCHECK(GetThreadEventState(event, target));
   1341     } else {
   1342       DCHECK_EQ(mode, JVMTI_DISABLE);
   1343 
   1344       env->event_masks.DisableEvent(env, target, event);
   1345       RecalculateGlobalEventMaskLocked(event);
   1346       new_state = global_mask.Test(event);
   1347       new_thread_state = GetThreadEventState(event, target);
   1348       DCHECK(new_state || !new_thread_state);
   1349     }
   1350   }
   1351   // Handle any special work required for the event type. We still have the
   1352   // user_code_suspend_count_lock_ so there won't be any interleaving here.
   1353   if (new_state != old_state) {
   1354     HandleEventType(event, mode == JVMTI_ENABLE);
   1355   }
   1356   if (old_thread_state != new_thread_state) {
   1357     return HandleEventDeopt(event, thread, new_thread_state);
   1358   }
   1359   return OK;
   1360 }
   1361 
   1362 bool EventHandler::GetThreadEventState(ArtJvmtiEvent event, art::Thread* thread) {
   1363   for (ArtJvmTiEnv* stored_env : envs) {
   1364     if (stored_env == nullptr) {
   1365       continue;
   1366     }
   1367     auto& masks = stored_env->event_masks;
   1368     if (thread == nullptr && masks.global_event_mask.Test(event)) {
   1369       return true;
   1370     } else if (thread != nullptr) {
   1371       EventMask* mask =  masks.GetEventMaskOrNull(thread);
   1372       if (mask != nullptr && mask->Test(event)) {
   1373         return true;
   1374       }
   1375     }
   1376   }
   1377   return false;
   1378 }
   1379 
   1380 void EventHandler::HandleBreakpointEventsChanged(bool added) {
   1381   if (added) {
   1382     DeoptManager::Get()->AddDeoptimizationRequester();
   1383   } else {
   1384     DeoptManager::Get()->RemoveDeoptimizationRequester();
   1385   }
   1386 }
   1387 
   1388 void EventHandler::Shutdown() {
   1389   // Need to remove the method_trace_listener_ if it's there.
   1390   art::Thread* self = art::Thread::Current();
   1391   art::gc::ScopedGCCriticalSection gcs(self,
   1392                                        art::gc::kGcCauseInstrumentation,
   1393                                        art::gc::kCollectorTypeInstrumentation);
   1394   art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
   1395   // Just remove every possible event.
   1396   art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
   1397 }
   1398 
   1399 EventHandler::EventHandler()
   1400   : envs_lock_("JVMTI Environment List Lock", art::LockLevel::kPostMutatorTopLockLevel),
   1401     frame_pop_enabled(false) {
   1402   alloc_listener_.reset(new JvmtiAllocationListener(this));
   1403   ddm_listener_.reset(new JvmtiDdmChunkListener(this));
   1404   gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
   1405   method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
   1406   monitor_listener_.reset(new JvmtiMonitorListener(this));
   1407   park_listener_.reset(new JvmtiParkListener(this));
   1408 }
   1409 
   1410 EventHandler::~EventHandler() {
   1411 }
   1412 
   1413 }  // namespace openjdkjvmti
   1414