Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
     18 #define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
     19 
     20 #include "base/casts.h"
     21 #include "jni_internal-inl.h"
     22 #include "read_barrier.h"
     23 #include "thread-inl.h"
     24 #include "verify_object.h"
     25 
     26 namespace art {
     27 
     28 // Scoped change into and out of a particular state. Handles Runnable transitions that require
     29 // more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
     30 // ScopedObjectAccess are used to handle the change into Runnable to Get direct access to objects,
     31 // the unchecked variant doesn't aid annotalysis.
     32 class ScopedThreadStateChange {
     33  public:
     34   ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
     35       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
     36       : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
     37     if (UNLIKELY(self_ == NULL)) {
     38       // Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
     39       old_thread_state_ = kTerminated;
     40       Runtime* runtime = Runtime::Current();
     41       CHECK(runtime == NULL || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
     42     } else {
     43       DCHECK_EQ(self, Thread::Current());
     44       // Read state without locks, ok as state is effectively thread local and we're not interested
     45       // in the suspend count (this will be handled in the runnable transitions).
     46       old_thread_state_ = self->GetState();
     47       if (old_thread_state_ != new_thread_state) {
     48         if (new_thread_state == kRunnable) {
     49           self_->TransitionFromSuspendedToRunnable();
     50         } else if (old_thread_state_ == kRunnable) {
     51           self_->TransitionFromRunnableToSuspended(new_thread_state);
     52         } else {
     53           // A suspended transition to another effectively suspended transition, ok to use Unsafe.
     54           self_->SetState(new_thread_state);
     55         }
     56       }
     57     }
     58   }
     59 
     60   ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
     61     if (UNLIKELY(self_ == NULL)) {
     62       if (!expected_has_no_thread_) {
     63         Runtime* runtime = Runtime::Current();
     64         bool shutting_down = (runtime == NULL) || runtime->IsShuttingDown(nullptr);
     65         CHECK(shutting_down);
     66       }
     67     } else {
     68       if (old_thread_state_ != thread_state_) {
     69         if (old_thread_state_ == kRunnable) {
     70           self_->TransitionFromSuspendedToRunnable();
     71         } else if (thread_state_ == kRunnable) {
     72           self_->TransitionFromRunnableToSuspended(old_thread_state_);
     73         } else {
     74           // A suspended transition to another effectively suspended transition, ok to use Unsafe.
     75           self_->SetState(old_thread_state_);
     76         }
     77       }
     78     }
     79   }
     80 
     81   Thread* Self() const {
     82     return self_;
     83   }
     84 
     85  protected:
     86   // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
     87   ScopedThreadStateChange()
     88       : self_(NULL), thread_state_(kTerminated), old_thread_state_(kTerminated),
     89         expected_has_no_thread_(true) {}
     90 
     91   Thread* const self_;
     92   const ThreadState thread_state_;
     93 
     94  private:
     95   ThreadState old_thread_state_;
     96   const bool expected_has_no_thread_;
     97 
     98   friend class ScopedObjectAccessUnchecked;
     99   DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
    100 };
    101 
    102 // Assumes we are already runnable.
    103 class ScopedObjectAccessAlreadyRunnable {
    104  public:
    105   Thread* Self() const {
    106     return self_;
    107   }
    108 
    109   JNIEnvExt* Env() const {
    110     return env_;
    111   }
    112 
    113   JavaVMExt* Vm() const {
    114     return vm_;
    115   }
    116 
    117   /*
    118    * Add a local reference for an object to the indirect reference table associated with the
    119    * current stack frame.  When the native function returns, the reference will be discarded.
    120    *
    121    * We need to allow the same reference to be added multiple times, and cope with NULL.
    122    *
    123    * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
    124    * it's best if we don't grab a mutex.
    125    */
    126   template<typename T>
    127   T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    128     Locks::mutator_lock_->AssertSharedHeld(Self());
    129     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
    130     if (obj == NULL) {
    131       return NULL;
    132     }
    133     DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
    134     return Env()->AddLocalReference<T>(obj);
    135   }
    136 
    137   template<typename T>
    138   T Decode(jobject obj) const
    139       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    140     Locks::mutator_lock_->AssertSharedHeld(Self());
    141     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
    142     return down_cast<T>(Self()->DecodeJObject(obj));
    143   }
    144 
    145   mirror::ArtField* DecodeField(jfieldID fid) const
    146       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    147     Locks::mutator_lock_->AssertSharedHeld(Self());
    148     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
    149     CHECK(!kMovingFields);
    150     mirror::ArtField* field = reinterpret_cast<mirror::ArtField*>(fid);
    151     return ReadBarrier::BarrierForRoot<mirror::ArtField, kWithReadBarrier>(&field);
    152   }
    153 
    154   jfieldID EncodeField(mirror::ArtField* field) const
    155       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    156     Locks::mutator_lock_->AssertSharedHeld(Self());
    157     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
    158     CHECK(!kMovingFields);
    159     return reinterpret_cast<jfieldID>(field);
    160   }
    161 
    162   mirror::ArtMethod* DecodeMethod(jmethodID mid) const
    163       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    164     Locks::mutator_lock_->AssertSharedHeld(Self());
    165     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
    166     CHECK(!kMovingMethods);
    167     mirror::ArtMethod* method = reinterpret_cast<mirror::ArtMethod*>(mid);
    168     return ReadBarrier::BarrierForRoot<mirror::ArtMethod, kWithReadBarrier>(&method);
    169   }
    170 
    171   jmethodID EncodeMethod(mirror::ArtMethod* method) const
    172       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
    173     Locks::mutator_lock_->AssertSharedHeld(Self());
    174     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
    175     CHECK(!kMovingMethods);
    176     return reinterpret_cast<jmethodID>(method);
    177   }
    178 
    179   bool IsRunnable() const {
    180     return self_->GetState() == kRunnable;
    181   }
    182 
    183  protected:
    184   explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
    185       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
    186       : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
    187   }
    188 
    189   explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
    190       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
    191       : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
    192         vm_(env_ != nullptr ? env_->vm : nullptr) {
    193   }
    194 
    195   // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
    196   // change into Runnable or acquire a share on the mutator_lock_.
    197   explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm)
    198       : self_(nullptr), env_(nullptr), vm_(down_cast<JavaVMExt*>(vm)) {}
    199 
    200   // Here purely to force inlining.
    201   ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE {
    202   }
    203 
    204   // Self thread, can be null.
    205   Thread* const self_;
    206   // The full JNIEnv.
    207   JNIEnvExt* const env_;
    208   // The full JavaVM.
    209   JavaVMExt* const vm_;
    210 };
    211 
    212 // Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
    213 //
    214 // This class performs the necessary thread state switching to and from Runnable and lets us
    215 // amortize the cost of working out the current thread. Additionally it lets us check (and repair)
    216 // apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
    217 // into jobjects via methods of this class. Performing this here enforces the Runnable thread state
    218 // for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
    219 // is also manipulating the Object.
    220 //
    221 // The destructor transitions back to the previous thread state, typically Native. In this state
    222 // GC and thread suspension may occur.
    223 //
    224 // For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
    225 // the mutator_lock_ will be acquired on construction.
    226 class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
    227  public:
    228   explicit ScopedObjectAccessUnchecked(JNIEnv* env)
    229       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
    230       : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
    231     Self()->VerifyStack();
    232     Locks::mutator_lock_->AssertSharedHeld(Self());
    233   }
    234 
    235   explicit ScopedObjectAccessUnchecked(Thread* self)
    236       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
    237       : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
    238     Self()->VerifyStack();
    239     Locks::mutator_lock_->AssertSharedHeld(Self());
    240   }
    241 
    242   // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
    243   // change into Runnable or acquire a share on the mutator_lock_.
    244   explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE
    245       : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {}
    246 
    247  private:
    248   // The scoped thread state change makes sure that we are runnable and restores the thread state
    249   // in the destructor.
    250   const ScopedThreadStateChange tsc_;
    251 
    252   DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
    253 };
    254 
    255 // Annotalysis helping variant of the above.
    256 class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
    257  public:
    258   explicit ScopedObjectAccess(JNIEnv* env)
    259       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
    260       SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
    261       : ScopedObjectAccessUnchecked(env) {
    262   }
    263 
    264   explicit ScopedObjectAccess(Thread* self)
    265       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
    266       SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
    267       : ScopedObjectAccessUnchecked(self) {
    268   }
    269 
    270   ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {
    271     // Base class will release share of lock. Invoked after this destructor.
    272   }
    273 
    274  private:
    275   // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
    276   //       routines operating with just a VM are sound, they are not, but when you have just a VM
    277   //       you cannot call the unsound routines.
    278   explicit ScopedObjectAccess(JavaVM* vm)
    279       SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
    280       : ScopedObjectAccessUnchecked(vm) {}
    281 
    282   friend class ScopedCheck;
    283   DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
    284 };
    285 
    286 }  // namespace art
    287 
    288 #endif  // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
    289