Home | History | Annotate | Download | only in interpreter
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "interpreter.h"
     18 
     19 #include <limits>
     20 
     21 #include "common_throws.h"
     22 #include "interpreter_common.h"
     23 #include "mirror/string-inl.h"
     24 #include "scoped_thread_state_change.h"
     25 #include "ScopedLocalRef.h"
     26 #include "stack.h"
     27 #include "unstarted_runtime.h"
     28 #include "mterp/mterp.h"
     29 #include "jit/jit.h"
     30 #include "jit/jit_code_cache.h"
     31 
     32 namespace art {
     33 namespace interpreter {
     34 
     35 static void InterpreterJni(Thread* self, ArtMethod* method, const StringPiece& shorty,
     36                            Object* receiver, uint32_t* args, JValue* result)
     37     SHARED_REQUIRES(Locks::mutator_lock_) {
     38   // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
     39   //       it should be removed and JNI compiled stubs used instead.
     40   ScopedObjectAccessUnchecked soa(self);
     41   if (method->IsStatic()) {
     42     if (shorty == "L") {
     43       typedef jobject (fntype)(JNIEnv*, jclass);
     44       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     45       ScopedLocalRef<jclass> klass(soa.Env(),
     46                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
     47       jobject jresult;
     48       {
     49         ScopedThreadStateChange tsc(self, kNative);
     50         jresult = fn(soa.Env(), klass.get());
     51       }
     52       result->SetL(soa.Decode<Object*>(jresult));
     53     } else if (shorty == "V") {
     54       typedef void (fntype)(JNIEnv*, jclass);
     55       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     56       ScopedLocalRef<jclass> klass(soa.Env(),
     57                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
     58       ScopedThreadStateChange tsc(self, kNative);
     59       fn(soa.Env(), klass.get());
     60     } else if (shorty == "Z") {
     61       typedef jboolean (fntype)(JNIEnv*, jclass);
     62       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     63       ScopedLocalRef<jclass> klass(soa.Env(),
     64                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
     65       ScopedThreadStateChange tsc(self, kNative);
     66       result->SetZ(fn(soa.Env(), klass.get()));
     67     } else if (shorty == "BI") {
     68       typedef jbyte (fntype)(JNIEnv*, jclass, jint);
     69       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     70       ScopedLocalRef<jclass> klass(soa.Env(),
     71                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
     72       ScopedThreadStateChange tsc(self, kNative);
     73       result->SetB(fn(soa.Env(), klass.get(), args[0]));
     74     } else if (shorty == "II") {
     75       typedef jint (fntype)(JNIEnv*, jclass, jint);
     76       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     77       ScopedLocalRef<jclass> klass(soa.Env(),
     78                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
     79       ScopedThreadStateChange tsc(self, kNative);
     80       result->SetI(fn(soa.Env(), klass.get(), args[0]));
     81     } else if (shorty == "LL") {
     82       typedef jobject (fntype)(JNIEnv*, jclass, jobject);
     83       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     84       ScopedLocalRef<jclass> klass(soa.Env(),
     85                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
     86       ScopedLocalRef<jobject> arg0(soa.Env(),
     87                                    soa.AddLocalReference<jobject>(
     88                                        reinterpret_cast<Object*>(args[0])));
     89       jobject jresult;
     90       {
     91         ScopedThreadStateChange tsc(self, kNative);
     92         jresult = fn(soa.Env(), klass.get(), arg0.get());
     93       }
     94       result->SetL(soa.Decode<Object*>(jresult));
     95     } else if (shorty == "IIZ") {
     96       typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean);
     97       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
     98       ScopedLocalRef<jclass> klass(soa.Env(),
     99                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    100       ScopedThreadStateChange tsc(self, kNative);
    101       result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
    102     } else if (shorty == "ILI") {
    103       typedef jint (fntype)(JNIEnv*, jclass, jobject, jint);
    104       fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
    105           method->GetEntryPointFromJni()));
    106       ScopedLocalRef<jclass> klass(soa.Env(),
    107                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    108       ScopedLocalRef<jobject> arg0(soa.Env(),
    109                                    soa.AddLocalReference<jobject>(
    110                                        reinterpret_cast<Object*>(args[0])));
    111       ScopedThreadStateChange tsc(self, kNative);
    112       result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
    113     } else if (shorty == "SIZ") {
    114       typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean);
    115       fntype* const fn =
    116           reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
    117       ScopedLocalRef<jclass> klass(soa.Env(),
    118                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    119       ScopedThreadStateChange tsc(self, kNative);
    120       result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
    121     } else if (shorty == "VIZ") {
    122       typedef void (fntype)(JNIEnv*, jclass, jint, jboolean);
    123       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    124       ScopedLocalRef<jclass> klass(soa.Env(),
    125                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    126       ScopedThreadStateChange tsc(self, kNative);
    127       fn(soa.Env(), klass.get(), args[0], args[1]);
    128     } else if (shorty == "ZLL") {
    129       typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject);
    130       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    131       ScopedLocalRef<jclass> klass(soa.Env(),
    132                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    133       ScopedLocalRef<jobject> arg0(soa.Env(),
    134                                    soa.AddLocalReference<jobject>(
    135                                        reinterpret_cast<Object*>(args[0])));
    136       ScopedLocalRef<jobject> arg1(soa.Env(),
    137                                    soa.AddLocalReference<jobject>(
    138                                        reinterpret_cast<Object*>(args[1])));
    139       ScopedThreadStateChange tsc(self, kNative);
    140       result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
    141     } else if (shorty == "ZILL") {
    142       typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject);
    143       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    144       ScopedLocalRef<jclass> klass(soa.Env(),
    145                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    146       ScopedLocalRef<jobject> arg1(soa.Env(),
    147                                    soa.AddLocalReference<jobject>(
    148                                        reinterpret_cast<Object*>(args[1])));
    149       ScopedLocalRef<jobject> arg2(soa.Env(),
    150                                    soa.AddLocalReference<jobject>(
    151                                        reinterpret_cast<Object*>(args[2])));
    152       ScopedThreadStateChange tsc(self, kNative);
    153       result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
    154     } else if (shorty == "VILII") {
    155       typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint);
    156       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    157       ScopedLocalRef<jclass> klass(soa.Env(),
    158                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    159       ScopedLocalRef<jobject> arg1(soa.Env(),
    160                                    soa.AddLocalReference<jobject>(
    161                                        reinterpret_cast<Object*>(args[1])));
    162       ScopedThreadStateChange tsc(self, kNative);
    163       fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
    164     } else if (shorty == "VLILII") {
    165       typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
    166       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    167       ScopedLocalRef<jclass> klass(soa.Env(),
    168                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
    169       ScopedLocalRef<jobject> arg0(soa.Env(),
    170                                    soa.AddLocalReference<jobject>(
    171                                        reinterpret_cast<Object*>(args[0])));
    172       ScopedLocalRef<jobject> arg2(soa.Env(),
    173                                    soa.AddLocalReference<jobject>(
    174                                        reinterpret_cast<Object*>(args[2])));
    175       ScopedThreadStateChange tsc(self, kNative);
    176       fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
    177     } else {
    178       LOG(FATAL) << "Do something with static native method: " << PrettyMethod(method)
    179           << " shorty: " << shorty;
    180     }
    181   } else {
    182     if (shorty == "L") {
    183       typedef jobject (fntype)(JNIEnv*, jobject);
    184       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    185       ScopedLocalRef<jobject> rcvr(soa.Env(),
    186                                    soa.AddLocalReference<jobject>(receiver));
    187       jobject jresult;
    188       {
    189         ScopedThreadStateChange tsc(self, kNative);
    190         jresult = fn(soa.Env(), rcvr.get());
    191       }
    192       result->SetL(soa.Decode<Object*>(jresult));
    193     } else if (shorty == "V") {
    194       typedef void (fntype)(JNIEnv*, jobject);
    195       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    196       ScopedLocalRef<jobject> rcvr(soa.Env(),
    197                                    soa.AddLocalReference<jobject>(receiver));
    198       ScopedThreadStateChange tsc(self, kNative);
    199       fn(soa.Env(), rcvr.get());
    200     } else if (shorty == "LL") {
    201       typedef jobject (fntype)(JNIEnv*, jobject, jobject);
    202       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    203       ScopedLocalRef<jobject> rcvr(soa.Env(),
    204                                    soa.AddLocalReference<jobject>(receiver));
    205       ScopedLocalRef<jobject> arg0(soa.Env(),
    206                                    soa.AddLocalReference<jobject>(
    207                                        reinterpret_cast<Object*>(args[0])));
    208       jobject jresult;
    209       {
    210         ScopedThreadStateChange tsc(self, kNative);
    211         jresult = fn(soa.Env(), rcvr.get(), arg0.get());
    212       }
    213       result->SetL(soa.Decode<Object*>(jresult));
    214       ScopedThreadStateChange tsc(self, kNative);
    215     } else if (shorty == "III") {
    216       typedef jint (fntype)(JNIEnv*, jobject, jint, jint);
    217       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
    218       ScopedLocalRef<jobject> rcvr(soa.Env(),
    219                                    soa.AddLocalReference<jobject>(receiver));
    220       ScopedThreadStateChange tsc(self, kNative);
    221       result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
    222     } else {
    223       LOG(FATAL) << "Do something with native method: " << PrettyMethod(method)
    224           << " shorty: " << shorty;
    225     }
    226   }
    227 }
    228 
    229 enum InterpreterImplKind {
    230   kSwitchImplKind,        // Switch-based interpreter implementation.
    231   kComputedGotoImplKind,  // Computed-goto-based interpreter implementation.
    232   kMterpImplKind          // Assembly interpreter
    233 };
    234 static std::ostream& operator<<(std::ostream& os, const InterpreterImplKind& rhs) {
    235   os << ((rhs == kSwitchImplKind)
    236               ? "Switch-based interpreter"
    237               : (rhs == kComputedGotoImplKind)
    238                   ? "Computed-goto-based interpreter"
    239                   : "Asm interpreter");
    240   return os;
    241 }
    242 
    243 static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind;
    244 
    245 #if defined(__clang__)
    246 // Clang 3.4 fails to build the goto interpreter implementation.
    247 template<bool do_access_check, bool transaction_active>
    248 JValue ExecuteGotoImpl(Thread*, const DexFile::CodeItem*, ShadowFrame&, JValue) {
    249   LOG(FATAL) << "UNREACHABLE";
    250   UNREACHABLE();
    251 }
    252 // Explicit definitions of ExecuteGotoImpl.
    253 template<> SHARED_REQUIRES(Locks::mutator_lock_)
    254 JValue ExecuteGotoImpl<true, false>(Thread* self, const DexFile::CodeItem* code_item,
    255                                     ShadowFrame& shadow_frame, JValue result_register);
    256 template<> SHARED_REQUIRES(Locks::mutator_lock_)
    257 JValue ExecuteGotoImpl<false, false>(Thread* self, const DexFile::CodeItem* code_item,
    258                                      ShadowFrame& shadow_frame, JValue result_register);
    259 template<> SHARED_REQUIRES(Locks::mutator_lock_)
    260 JValue ExecuteGotoImpl<true, true>(Thread* self,  const DexFile::CodeItem* code_item,
    261                                    ShadowFrame& shadow_frame, JValue result_register);
    262 template<> SHARED_REQUIRES(Locks::mutator_lock_)
    263 JValue ExecuteGotoImpl<false, true>(Thread* self, const DexFile::CodeItem* code_item,
    264                                     ShadowFrame& shadow_frame, JValue result_register);
    265 #endif
    266 
    267 static inline JValue Execute(
    268     Thread* self,
    269     const DexFile::CodeItem* code_item,
    270     ShadowFrame& shadow_frame,
    271     JValue result_register,
    272     bool stay_in_interpreter = false) SHARED_REQUIRES(Locks::mutator_lock_) {
    273   DCHECK(!shadow_frame.GetMethod()->IsAbstract());
    274   DCHECK(!shadow_frame.GetMethod()->IsNative());
    275   if (LIKELY(shadow_frame.GetDexPC() == 0)) {  // Entering the method, but not via deoptimization.
    276     if (kIsDebugBuild) {
    277       self->AssertNoPendingException();
    278     }
    279     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
    280     ArtMethod *method = shadow_frame.GetMethod();
    281 
    282     if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
    283       instrumentation->MethodEnterEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
    284                                         method, 0);
    285     }
    286 
    287     if (!stay_in_interpreter) {
    288       jit::Jit* jit = Runtime::Current()->GetJit();
    289       if (jit != nullptr) {
    290         jit->MethodEntered(self, shadow_frame.GetMethod());
    291         if (jit->CanInvokeCompiledCode(method)) {
    292           JValue result;
    293 
    294           // Pop the shadow frame before calling into compiled code.
    295           self->PopShadowFrame();
    296           ArtInterpreterToCompiledCodeBridge(self, nullptr, code_item, &shadow_frame, &result);
    297           // Push the shadow frame back as the caller will expect it.
    298           self->PushShadowFrame(&shadow_frame);
    299 
    300           return result;
    301         }
    302       }
    303     }
    304   }
    305 
    306   shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self);
    307 
    308   // Lock counting is a special version of accessibility checks, and for simplicity and
    309   // reduction of template parameters, we gate it behind access-checks mode.
    310   ArtMethod* method = shadow_frame.GetMethod();
    311   DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
    312 
    313   bool transaction_active = Runtime::Current()->IsActiveTransaction();
    314   if (LIKELY(method->SkipAccessChecks())) {
    315     // Enter the "without access check" interpreter.
    316     if (kInterpreterImplKind == kMterpImplKind) {
    317       if (transaction_active) {
    318         // No Mterp variant - just use the switch interpreter.
    319         return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register,
    320                                               false);
    321       } else if (UNLIKELY(!Runtime::Current()->IsStarted())) {
    322         return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
    323                                                false);
    324       } else {
    325         while (true) {
    326           // Mterp does not support all instrumentation/debugging.
    327           if (MterpShouldSwitchInterpreters()) {
    328             return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
    329                                                    false);
    330           }
    331           bool returned = ExecuteMterpImpl(self, code_item, &shadow_frame, &result_register);
    332           if (returned) {
    333             return result_register;
    334           } else {
    335             // Mterp didn't like that instruction.  Single-step it with the reference interpreter.
    336             result_register = ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame,
    337                                                                result_register, true);
    338             if (shadow_frame.GetDexPC() == DexFile::kDexNoIndex) {
    339               // Single-stepped a return or an exception not handled locally.  Return to caller.
    340               return result_register;
    341             }
    342           }
    343         }
    344       }
    345     } else if (kInterpreterImplKind == kSwitchImplKind) {
    346       if (transaction_active) {
    347         return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register,
    348                                               false);
    349       } else {
    350         return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
    351                                                false);
    352       }
    353     } else {
    354       DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
    355       if (transaction_active) {
    356         return ExecuteGotoImpl<false, true>(self, code_item, shadow_frame, result_register);
    357       } else {
    358         return ExecuteGotoImpl<false, false>(self, code_item, shadow_frame, result_register);
    359       }
    360     }
    361   } else {
    362     // Enter the "with access check" interpreter.
    363     if (kInterpreterImplKind == kMterpImplKind) {
    364       // No access check variants for Mterp.  Just use the switch version.
    365       if (transaction_active) {
    366         return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register,
    367                                              false);
    368       } else {
    369         return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register,
    370                                               false);
    371       }
    372     } else if (kInterpreterImplKind == kSwitchImplKind) {
    373       if (transaction_active) {
    374         return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register,
    375                                              false);
    376       } else {
    377         return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register,
    378                                               false);
    379       }
    380     } else {
    381       DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
    382       if (transaction_active) {
    383         return ExecuteGotoImpl<true, true>(self, code_item, shadow_frame, result_register);
    384       } else {
    385         return ExecuteGotoImpl<true, false>(self, code_item, shadow_frame, result_register);
    386       }
    387     }
    388   }
    389 }
    390 
    391 void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver,
    392                                 uint32_t* args, JValue* result,
    393                                 bool stay_in_interpreter) {
    394   DCHECK_EQ(self, Thread::Current());
    395   bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
    396   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
    397     ThrowStackOverflowError(self);
    398     return;
    399   }
    400 
    401   const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
    402   const DexFile::CodeItem* code_item = method->GetCodeItem();
    403   uint16_t num_regs;
    404   uint16_t num_ins;
    405   if (code_item != nullptr) {
    406     num_regs =  code_item->registers_size_;
    407     num_ins = code_item->ins_size_;
    408   } else if (!method->IsInvokable()) {
    409     self->EndAssertNoThreadSuspension(old_cause);
    410     method->ThrowInvocationTimeError();
    411     return;
    412   } else {
    413     DCHECK(method->IsNative());
    414     num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
    415     if (!method->IsStatic()) {
    416       num_regs++;
    417       num_ins++;
    418     }
    419   }
    420   // Set up shadow frame with matching number of reference slots to vregs.
    421   ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
    422   ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
    423       CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0);
    424   ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
    425   self->PushShadowFrame(shadow_frame);
    426 
    427   size_t cur_reg = num_regs - num_ins;
    428   if (!method->IsStatic()) {
    429     CHECK(receiver != nullptr);
    430     shadow_frame->SetVRegReference(cur_reg, receiver);
    431     ++cur_reg;
    432   }
    433   uint32_t shorty_len = 0;
    434   const char* shorty = method->GetShorty(&shorty_len);
    435   for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
    436     DCHECK_LT(shorty_pos + 1, shorty_len);
    437     switch (shorty[shorty_pos + 1]) {
    438       case 'L': {
    439         Object* o = reinterpret_cast<StackReference<Object>*>(&args[arg_pos])->AsMirrorPtr();
    440         shadow_frame->SetVRegReference(cur_reg, o);
    441         break;
    442       }
    443       case 'J': case 'D': {
    444         uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
    445         shadow_frame->SetVRegLong(cur_reg, wide_value);
    446         cur_reg++;
    447         arg_pos++;
    448         break;
    449       }
    450       default:
    451         shadow_frame->SetVReg(cur_reg, args[arg_pos]);
    452         break;
    453     }
    454   }
    455   self->EndAssertNoThreadSuspension(old_cause);
    456   // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
    457   if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
    458     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    459     StackHandleScope<1> hs(self);
    460     Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
    461     if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
    462       CHECK(self->IsExceptionPending());
    463       self->PopShadowFrame();
    464       return;
    465     }
    466   }
    467   if (LIKELY(!method->IsNative())) {
    468     JValue r = Execute(self, code_item, *shadow_frame, JValue(), stay_in_interpreter);
    469     if (result != nullptr) {
    470       *result = r;
    471     }
    472   } else {
    473     // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
    474     // generated stub) except during testing and image writing.
    475     // Update args to be the args in the shadow frame since the input ones could hold stale
    476     // references pointers due to moving GC.
    477     args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
    478     if (!Runtime::Current()->IsStarted()) {
    479       UnstartedRuntime::Jni(self, method, receiver, args, result);
    480     } else {
    481       InterpreterJni(self, method, shorty, receiver, args, result);
    482     }
    483   }
    484   self->PopShadowFrame();
    485 }
    486 
    487 static bool IsStringInit(const Instruction* instr, ArtMethod* caller)
    488     SHARED_REQUIRES(Locks::mutator_lock_) {
    489   if (instr->Opcode() == Instruction::INVOKE_DIRECT ||
    490       instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) {
    491     // Instead of calling ResolveMethod() which has suspend point and can trigger
    492     // GC, look up the callee method symbolically.
    493     uint16_t callee_method_idx = (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
    494         instr->VRegB_3rc() : instr->VRegB_35c();
    495     const DexFile* dex_file = caller->GetDexFile();
    496     const DexFile::MethodId& method_id = dex_file->GetMethodId(callee_method_idx);
    497     const char* class_name = dex_file->StringByTypeIdx(method_id.class_idx_);
    498     const char* method_name = dex_file->GetMethodName(method_id);
    499     // Compare method's class name and method name against string init.
    500     // It's ok since it's not allowed to create your own java/lang/String.
    501     // TODO: verify that assumption.
    502     if ((strcmp(class_name, "Ljava/lang/String;") == 0) &&
    503         (strcmp(method_name, "<init>") == 0)) {
    504       return true;
    505     }
    506   }
    507   return false;
    508 }
    509 
    510 static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
    511   DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
    512          instr->Opcode() == Instruction::INVOKE_DIRECT);
    513   return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
    514       instr->VRegC_3rc() : instr->VRegC_35c();
    515 }
    516 
    517 void EnterInterpreterFromDeoptimize(Thread* self,
    518                                     ShadowFrame* shadow_frame,
    519                                     bool from_code,
    520                                     JValue* ret_val)
    521     SHARED_REQUIRES(Locks::mutator_lock_) {
    522   JValue value;
    523   // Set value to last known result in case the shadow frame chain is empty.
    524   value.SetJ(ret_val->GetJ());
    525   // Are we executing the first shadow frame?
    526   bool first = true;
    527   while (shadow_frame != nullptr) {
    528     // We do not want to recover lock state for lock counting when deoptimizing. Currently,
    529     // the compiler should not have compiled a method that failed structured-locking checks.
    530     DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
    531 
    532     self->SetTopOfShadowStack(shadow_frame);
    533     const DexFile::CodeItem* code_item = shadow_frame->GetMethod()->GetCodeItem();
    534     const uint32_t dex_pc = shadow_frame->GetDexPC();
    535     uint32_t new_dex_pc = dex_pc;
    536     if (UNLIKELY(self->IsExceptionPending())) {
    537       // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
    538       // the instrumentation. To prevent from reporting it a second time, we simply pass a
    539       // null Instrumentation*.
    540       const instrumentation::Instrumentation* const instrumentation =
    541           first ? nullptr : Runtime::Current()->GetInstrumentation();
    542       uint32_t found_dex_pc = FindNextInstructionFollowingException(self, *shadow_frame, dex_pc,
    543                                                                     instrumentation);
    544       new_dex_pc = found_dex_pc;  // the dex pc of a matching catch handler
    545                                   // or DexFile::kDexNoIndex if there is none.
    546     } else if (!from_code) {
    547       // For the debugger and full deoptimization stack, we must go past the invoke
    548       // instruction, as it already executed.
    549       // TODO: should be tested more once b/17586779 is fixed.
    550       const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]);
    551       if (instr->IsInvoke()) {
    552         if (IsStringInit(instr, shadow_frame->GetMethod())) {
    553           uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
    554           // Move the StringFactory.newStringFromChars() result into the register representing
    555           // "this object" when invoking the string constructor in the original dex instruction.
    556           // Also move the result into all aliases.
    557           DCHECK(value.GetL()->IsString());
    558           SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
    559           // Calling string constructor in the original dex code doesn't generate a result value.
    560           value.SetJ(0);
    561         }
    562         new_dex_pc = dex_pc + instr->SizeInCodeUnits();
    563       } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
    564         // It's possible to deoptimize at a NEW_INSTANCE dex instruciton that's for a
    565         // java string, which is turned into a call into StringFactory.newEmptyString();
    566         // Move the StringFactory.newEmptyString() result into the destination register.
    567         DCHECK(value.GetL()->IsString());
    568         shadow_frame->SetVRegReference(instr->VRegA_21c(), value.GetL());
    569         // new-instance doesn't generate a result value.
    570         value.SetJ(0);
    571         // Skip the dex instruction since we essentially come back from an invocation.
    572         new_dex_pc = dex_pc + instr->SizeInCodeUnits();
    573         if (kIsDebugBuild) {
    574           ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
    575           // This is a suspend point. But it's ok since value has been set into shadow_frame.
    576           mirror::Class* klass = class_linker->ResolveType(
    577               instr->VRegB_21c(), shadow_frame->GetMethod());
    578           DCHECK(klass->IsStringClass());
    579         }
    580       } else {
    581         CHECK(false) << "Unexpected instruction opcode " << instr->Opcode()
    582                      << " at dex_pc " << dex_pc
    583                      << " of method: " << PrettyMethod(shadow_frame->GetMethod(), false);
    584       }
    585     } else {
    586       // Nothing to do, the dex_pc is the one at which the code requested
    587       // the deoptimization.
    588     }
    589     if (new_dex_pc != DexFile::kDexNoIndex) {
    590       shadow_frame->SetDexPC(new_dex_pc);
    591       value = Execute(self, code_item, *shadow_frame, value);
    592     }
    593     ShadowFrame* old_frame = shadow_frame;
    594     shadow_frame = shadow_frame->GetLink();
    595     ShadowFrame::DeleteDeoptimizedFrame(old_frame);
    596     // Following deoptimizations of shadow frames must pass the invoke instruction.
    597     from_code = false;
    598     first = false;
    599   }
    600   ret_val->SetJ(value.GetJ());
    601 }
    602 
    603 JValue EnterInterpreterFromEntryPoint(Thread* self, const DexFile::CodeItem* code_item,
    604                                       ShadowFrame* shadow_frame) {
    605   DCHECK_EQ(self, Thread::Current());
    606   bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
    607   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
    608     ThrowStackOverflowError(self);
    609     return JValue();
    610   }
    611 
    612   jit::Jit* jit = Runtime::Current()->GetJit();
    613   if (jit != nullptr) {
    614     jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
    615   }
    616   return Execute(self, code_item, *shadow_frame, JValue());
    617 }
    618 
    619 void ArtInterpreterToInterpreterBridge(Thread* self, const DexFile::CodeItem* code_item,
    620                                        ShadowFrame* shadow_frame, JValue* result) {
    621   bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
    622   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
    623     ThrowStackOverflowError(self);
    624     return;
    625   }
    626 
    627   self->PushShadowFrame(shadow_frame);
    628   ArtMethod* method = shadow_frame->GetMethod();
    629   // Ensure static methods are initialized.
    630   const bool is_static = method->IsStatic();
    631   if (is_static) {
    632     mirror::Class* declaring_class = method->GetDeclaringClass();
    633     if (UNLIKELY(!declaring_class->IsInitialized())) {
    634       StackHandleScope<1> hs(self);
    635       HandleWrapper<Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
    636       if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
    637           self, h_declaring_class, true, true))) {
    638         DCHECK(self->IsExceptionPending());
    639         self->PopShadowFrame();
    640         return;
    641       }
    642       CHECK(h_declaring_class->IsInitializing());
    643     }
    644   }
    645 
    646   if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
    647     result->SetJ(Execute(self, code_item, *shadow_frame, JValue()).GetJ());
    648   } else {
    649     // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
    650     // generated stub) except during testing and image writing.
    651     CHECK(!Runtime::Current()->IsStarted());
    652     Object* receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
    653     uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
    654     UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver, args, result);
    655   }
    656 
    657   self->PopShadowFrame();
    658 }
    659 
    660 void CheckInterpreterAsmConstants() {
    661   CheckMterpAsmConstants();
    662 }
    663 
    664 void InitInterpreterTls(Thread* self) {
    665   InitMterpTls(self);
    666 }
    667 
    668 }  // namespace interpreter
    669 }  // namespace art
    670