Home | History | Annotate | Download | only in quick
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <android-base/logging.h>
     18 
     19 #include "art_method-inl.h"
     20 #include "base/casts.h"
     21 #include "entrypoints/entrypoint_utils-inl.h"
     22 #include "indirect_reference_table.h"
     23 #include "mirror/object-inl.h"
     24 #include "thread-inl.h"
     25 #include "verify_object.h"
     26 
     27 namespace art {
     28 
     29 static_assert(sizeof(IRTSegmentState) == sizeof(uint32_t), "IRTSegmentState size unexpected");
     30 static_assert(std::is_trivial<IRTSegmentState>::value, "IRTSegmentState not trivial");
     31 
     32 static inline void GoToRunnableFast(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
     33 
     34 extern void ReadBarrierJni(mirror::CompressedReference<mirror::Object>* handle_on_stack,
     35                            Thread* self ATTRIBUTE_UNUSED) {
     36   DCHECK(kUseReadBarrier);
     37   if (kUseBakerReadBarrier) {
     38     DCHECK(handle_on_stack->AsMirrorPtr() != nullptr)
     39         << "The class of a static jni call must not be null";
     40     // Check the mark bit and return early if it's already marked.
     41     if (LIKELY(handle_on_stack->AsMirrorPtr()->GetMarkBit() != 0)) {
     42       return;
     43     }
     44   }
     45   // Call the read barrier and update the handle.
     46   mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack);
     47   handle_on_stack->Assign(to_ref);
     48 }
     49 
     50 // Called on entry to fast JNI, push a new local reference table only.
     51 extern uint32_t JniMethodFastStart(Thread* self) {
     52   JNIEnvExt* env = self->GetJniEnv();
     53   DCHECK(env != nullptr);
     54   uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->GetLocalRefCookie());
     55   env->SetLocalRefCookie(env->GetLocalsSegmentState());
     56 
     57   if (kIsDebugBuild) {
     58     ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
     59     CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
     60   }
     61 
     62   return saved_local_ref_cookie;
     63 }
     64 
     65 // Called on entry to JNI, transition out of Runnable and release share of mutator_lock_.
     66 extern uint32_t JniMethodStart(Thread* self) {
     67   JNIEnvExt* env = self->GetJniEnv();
     68   DCHECK(env != nullptr);
     69   uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->GetLocalRefCookie());
     70   env->SetLocalRefCookie(env->GetLocalsSegmentState());
     71   ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
     72   // TODO: Introduce special entrypoint for synchronized @FastNative methods?
     73   //       Or ban synchronized @FastNative outright to avoid the extra check here?
     74   DCHECK(!native_method->IsFastNative() || native_method->IsSynchronized());
     75   if (!native_method->IsFastNative()) {
     76     // When not fast JNI we transition out of runnable.
     77     self->TransitionFromRunnableToSuspended(kNative);
     78   }
     79   return saved_local_ref_cookie;
     80 }
     81 
     82 extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) {
     83   self->DecodeJObject(to_lock)->MonitorEnter(self);
     84   return JniMethodStart(self);
     85 }
     86 
     87 // TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
     88 static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS {
     89   ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
     90   bool is_fast = native_method->IsFastNative();
     91   if (!is_fast) {
     92     self->TransitionFromSuspendedToRunnable();
     93   } else {
     94     GoToRunnableFast(self);
     95   }
     96 }
     97 
     98 ALWAYS_INLINE static inline void GoToRunnableFast(Thread* self) {
     99   if (kIsDebugBuild) {
    100     // Should only enter here if the method is @FastNative.
    101     ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
    102     CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
    103   }
    104 
    105   // When we are in @FastNative, we are already Runnable.
    106   // Only do a suspend check on the way out of JNI.
    107   if (UNLIKELY(self->TestAllFlags())) {
    108     // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there
    109     // is a flag raised.
    110     DCHECK(Locks::mutator_lock_->IsSharedHeld(self));
    111     self->CheckSuspend();
    112   }
    113 }
    114 
    115 static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self)
    116     REQUIRES_SHARED(Locks::mutator_lock_) {
    117   JNIEnvExt* env = self->GetJniEnv();
    118   if (UNLIKELY(env->IsCheckJniEnabled())) {
    119     env->CheckNoHeldMonitors();
    120   }
    121   env->SetLocalSegmentState(env->GetLocalRefCookie());
    122   env->SetLocalRefCookie(bit_cast<IRTSegmentState>(saved_local_ref_cookie));
    123   self->PopHandleScope();
    124 }
    125 
    126 // TODO: These should probably be templatized or macro-ized.
    127 // Otherwise there's just too much repetitive boilerplate.
    128 
    129 extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self) {
    130   GoToRunnable(self);
    131   PopLocalReferences(saved_local_ref_cookie, self);
    132 }
    133 
    134 extern void JniMethodFastEnd(uint32_t saved_local_ref_cookie, Thread* self) {
    135   GoToRunnableFast(self);
    136   PopLocalReferences(saved_local_ref_cookie, self);
    137 }
    138 
    139 extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie,
    140                                      jobject locked,
    141                                      Thread* self) {
    142   GoToRunnable(self);
    143   UnlockJniSynchronizedMethod(locked, self);  // Must decode before pop.
    144   PopLocalReferences(saved_local_ref_cookie, self);
    145 }
    146 
    147 // Common result handling for EndWithReference.
    148 static mirror::Object* JniMethodEndWithReferenceHandleResult(jobject result,
    149                                                              uint32_t saved_local_ref_cookie,
    150                                                              Thread* self)
    151     NO_THREAD_SAFETY_ANALYSIS {
    152   // Must decode before pop. The 'result' may not be valid in case of an exception, though.
    153   ObjPtr<mirror::Object> o;
    154   if (!self->IsExceptionPending()) {
    155     o = self->DecodeJObject(result);
    156   }
    157   PopLocalReferences(saved_local_ref_cookie, self);
    158   // Process result.
    159   if (UNLIKELY(self->GetJniEnv()->IsCheckJniEnabled())) {
    160     // CheckReferenceResult can resolve types.
    161     StackHandleScope<1> hs(self);
    162     HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&o));
    163     CheckReferenceResult(h_obj, self);
    164   }
    165   VerifyObject(o);
    166   return o.Ptr();
    167 }
    168 
    169 extern mirror::Object* JniMethodFastEndWithReference(jobject result,
    170                                                      uint32_t saved_local_ref_cookie,
    171                                                      Thread* self) {
    172   GoToRunnableFast(self);
    173   return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
    174 }
    175 
    176 extern mirror::Object* JniMethodEndWithReference(jobject result,
    177                                                  uint32_t saved_local_ref_cookie,
    178                                                  Thread* self) {
    179   GoToRunnable(self);
    180   return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
    181 }
    182 
    183 extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result,
    184                                                              uint32_t saved_local_ref_cookie,
    185                                                              jobject locked,
    186                                                              Thread* self) {
    187   GoToRunnable(self);
    188   UnlockJniSynchronizedMethod(locked, self);
    189   return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
    190 }
    191 
    192 extern uint64_t GenericJniMethodEnd(Thread* self,
    193                                     uint32_t saved_local_ref_cookie,
    194                                     jvalue result,
    195                                     uint64_t result_f,
    196                                     ArtMethod* called,
    197                                     HandleScope* handle_scope)
    198     // TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS
    199     NO_THREAD_SAFETY_ANALYSIS {
    200   bool critical_native = called->IsCriticalNative();
    201   bool fast_native = called->IsFastNative();
    202   bool normal_native = !critical_native && !fast_native;
    203 
    204   // @Fast and @CriticalNative do not do a state transition.
    205   if (LIKELY(normal_native)) {
    206     GoToRunnable(self);
    207   }
    208   // We need the mutator lock (i.e., calling GoToRunnable()) before accessing the shorty or the
    209   // locked object.
    210   jobject locked = called->IsSynchronized() ? handle_scope->GetHandle(0).ToJObject() : nullptr;
    211   char return_shorty_char = called->GetShorty()[0];
    212   if (return_shorty_char == 'L') {
    213     if (locked != nullptr) {
    214       DCHECK(normal_native) << " @FastNative and synchronize is not supported";
    215       UnlockJniSynchronizedMethod(locked, self);
    216     }
    217     return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceHandleResult(
    218         result.l, saved_local_ref_cookie, self));
    219   } else {
    220     if (locked != nullptr) {
    221       DCHECK(normal_native) << " @FastNative and synchronize is not supported";
    222       UnlockJniSynchronizedMethod(locked, self);  // Must decode before pop.
    223     }
    224     if (LIKELY(!critical_native)) {
    225       PopLocalReferences(saved_local_ref_cookie, self);
    226     }
    227     switch (return_shorty_char) {
    228       case 'F': {
    229         if (kRuntimeISA == InstructionSet::kX86) {
    230           // Convert back the result to float.
    231           double d = bit_cast<double, uint64_t>(result_f);
    232           return bit_cast<uint32_t, float>(static_cast<float>(d));
    233         } else {
    234           return result_f;
    235         }
    236       }
    237       case 'D':
    238         return result_f;
    239       case 'Z':
    240         return result.z;
    241       case 'B':
    242         return result.b;
    243       case 'C':
    244         return result.c;
    245       case 'S':
    246         return result.s;
    247       case 'I':
    248         return result.i;
    249       case 'J':
    250         return result.j;
    251       case 'V':
    252         return 0;
    253       default:
    254         LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
    255         return 0;
    256     }
    257   }
    258 }
    259 
    260 }  // namespace art
    261