Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "quick_exception_handler.h"
     18 
     19 #include "arch/context.h"
     20 #include "art_method-inl.h"
     21 #include "base/enums.h"
     22 #include "base/logging.h"  // For VLOG_IS_ON.
     23 #include "dex/dex_file_types.h"
     24 #include "dex/dex_instruction.h"
     25 #include "entrypoints/entrypoint_utils.h"
     26 #include "entrypoints/quick/quick_entrypoints_enum.h"
     27 #include "entrypoints/runtime_asm_entrypoints.h"
     28 #include "handle_scope-inl.h"
     29 #include "jit/jit.h"
     30 #include "jit/jit_code_cache.h"
     31 #include "mirror/class-inl.h"
     32 #include "mirror/class_loader.h"
     33 #include "mirror/throwable.h"
     34 #include "oat_quick_method_header.h"
     35 #include "stack.h"
     36 #include "stack_map.h"
     37 
     38 namespace art {
     39 
     40 static constexpr bool kDebugExceptionDelivery = false;
     41 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
     42 
     43 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
     44     : self_(self),
     45       context_(self->GetLongJumpContext()),
     46       is_deoptimization_(is_deoptimization),
     47       method_tracing_active_(is_deoptimization ||
     48                              Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
     49       handler_quick_frame_(nullptr),
     50       handler_quick_frame_pc_(0),
     51       handler_method_header_(nullptr),
     52       handler_quick_arg0_(0),
     53       handler_method_(nullptr),
     54       handler_dex_pc_(0),
     55       clear_exception_(false),
     56       handler_frame_depth_(kInvalidFrameDepth),
     57       full_fragment_done_(false) {}
     58 
     59 // Finds catch handler.
     60 class CatchBlockStackVisitor FINAL : public StackVisitor {
     61  public:
     62   CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
     63                          QuickExceptionHandler* exception_handler)
     64       REQUIRES_SHARED(Locks::mutator_lock_)
     65       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
     66         exception_(exception),
     67         exception_handler_(exception_handler) {
     68   }
     69 
     70   bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
     71     ArtMethod* method = GetMethod();
     72     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
     73     if (method == nullptr) {
     74       // This is the upcall, we remember the frame and last pc so that we may long jump to them.
     75       exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
     76       exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
     77       exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
     78       uint32_t next_dex_pc;
     79       ArtMethod* next_art_method;
     80       bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
     81       // Report the method that did the down call as the handler.
     82       exception_handler_->SetHandlerDexPc(next_dex_pc);
     83       exception_handler_->SetHandlerMethod(next_art_method);
     84       if (!has_next) {
     85         // No next method? Check exception handler is set up for the unhandled exception handler
     86         // case.
     87         DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
     88         DCHECK(nullptr == exception_handler_->GetHandlerMethod());
     89       }
     90       return false;  // End stack walk.
     91     }
     92     if (method->IsRuntimeMethod()) {
     93       // Ignore callee save method.
     94       DCHECK(method->IsCalleeSaveMethod());
     95       return true;
     96     }
     97     return HandleTryItems(method);
     98   }
     99 
    100  private:
    101   bool HandleTryItems(ArtMethod* method)
    102       REQUIRES_SHARED(Locks::mutator_lock_) {
    103     uint32_t dex_pc = dex::kDexNoIndex;
    104     if (!method->IsNative()) {
    105       dex_pc = GetDexPc();
    106     }
    107     if (dex_pc != dex::kDexNoIndex) {
    108       bool clear_exception = false;
    109       StackHandleScope<1> hs(GetThread());
    110       Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
    111       uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
    112       exception_handler_->SetClearException(clear_exception);
    113       if (found_dex_pc != dex::kDexNoIndex) {
    114         exception_handler_->SetHandlerMethod(method);
    115         exception_handler_->SetHandlerDexPc(found_dex_pc);
    116         exception_handler_->SetHandlerQuickFramePc(
    117             GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
    118                 method, found_dex_pc, /* is_catch_handler */ true));
    119         exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
    120         exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
    121         return false;  // End stack walk.
    122       } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
    123         // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
    124         size_t frame_id = GetFrameId();
    125         ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
    126         if (frame != nullptr) {
    127           // We will not execute this shadow frame so we can safely deallocate it.
    128           GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
    129           ShadowFrame::DeleteDeoptimizedFrame(frame);
    130         }
    131       }
    132     }
    133     return true;  // Continue stack walk.
    134   }
    135 
    136   // The exception we're looking for the catch block of.
    137   Handle<mirror::Throwable>* exception_;
    138   // The quick exception handler we're visiting for.
    139   QuickExceptionHandler* const exception_handler_;
    140 
    141   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
    142 };
    143 
    144 void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) {
    145   DCHECK(!is_deoptimization_);
    146   StackHandleScope<1> hs(self_);
    147   Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
    148   if (kDebugExceptionDelivery) {
    149     ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
    150     std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
    151     self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
    152                      << ": " << str_msg << "\n");
    153   }
    154 
    155   // Walk the stack to find catch handler.
    156   CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
    157   visitor.WalkStack(true);
    158 
    159   if (kDebugExceptionDelivery) {
    160     if (*handler_quick_frame_ == nullptr) {
    161       LOG(INFO) << "Handler is upcall";
    162     }
    163     if (handler_method_ != nullptr) {
    164       const DexFile* dex_file = handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
    165       int line_number = annotations::GetLineNumFromPC(dex_file, handler_method_, handler_dex_pc_);
    166       LOG(INFO) << "Handler: " << handler_method_->PrettyMethod() << " (line: "
    167                 << line_number << ")";
    168     }
    169   }
    170   // Exception was cleared as part of delivery.
    171   DCHECK(!self_->IsExceptionPending());
    172   if (!clear_exception_) {
    173     // Put exception back in root set with clear throw location.
    174     self_->SetException(exception_ref.Get());
    175   }
    176   // If the handler is in optimized code, we need to set the catch environment.
    177   if (*handler_quick_frame_ != nullptr &&
    178       handler_method_header_ != nullptr &&
    179       handler_method_header_->IsOptimized()) {
    180     SetCatchEnvironmentForOptimizedHandler(&visitor);
    181   }
    182 }
    183 
    184 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
    185   // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
    186   // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
    187   // distinguish between core/FPU registers and low/high bits on 64-bit.
    188   switch (kind) {
    189     case DexRegisterLocation::Kind::kConstant:
    190     case DexRegisterLocation::Kind::kInStack:
    191       // VRegKind is ignored.
    192       return VRegKind::kUndefined;
    193 
    194     case DexRegisterLocation::Kind::kInRegister:
    195       // Selects core register. For 64-bit registers, selects low 32 bits.
    196       return VRegKind::kLongLoVReg;
    197 
    198     case DexRegisterLocation::Kind::kInRegisterHigh:
    199       // Selects core register. For 64-bit registers, selects high 32 bits.
    200       return VRegKind::kLongHiVReg;
    201 
    202     case DexRegisterLocation::Kind::kInFpuRegister:
    203       // Selects FPU register. For 64-bit registers, selects low 32 bits.
    204       return VRegKind::kDoubleLoVReg;
    205 
    206     case DexRegisterLocation::Kind::kInFpuRegisterHigh:
    207       // Selects FPU register. For 64-bit registers, selects high 32 bits.
    208       return VRegKind::kDoubleHiVReg;
    209 
    210     default:
    211       LOG(FATAL) << "Unexpected vreg location " << kind;
    212       UNREACHABLE();
    213   }
    214 }
    215 
    216 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
    217   DCHECK(!is_deoptimization_);
    218   DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
    219   DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
    220 
    221   if (kDebugExceptionDelivery) {
    222     self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
    223   }
    224 
    225   CodeItemDataAccessor accessor(handler_method_->DexInstructionData());
    226   const size_t number_of_vregs = accessor.RegistersSize();
    227   CodeInfo code_info = handler_method_header_->GetOptimizedCodeInfo();
    228   CodeInfoEncoding encoding = code_info.ExtractEncoding();
    229 
    230   // Find stack map of the catch block.
    231   StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
    232   DCHECK(catch_stack_map.IsValid());
    233   DexRegisterMap catch_vreg_map =
    234       code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
    235   if (!catch_vreg_map.IsValid()) {
    236     return;
    237   }
    238 
    239   // Find stack map of the throwing instruction.
    240   StackMap throw_stack_map =
    241       code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
    242   DCHECK(throw_stack_map.IsValid());
    243   DexRegisterMap throw_vreg_map =
    244       code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
    245   DCHECK(throw_vreg_map.IsValid());
    246 
    247   // Copy values between them.
    248   for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
    249     DexRegisterLocation::Kind catch_location =
    250         catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
    251     if (catch_location == DexRegisterLocation::Kind::kNone) {
    252       continue;
    253     }
    254     DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
    255 
    256     // Get vreg value from its current location.
    257     uint32_t vreg_value;
    258     VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
    259                                                                    number_of_vregs,
    260                                                                    code_info,
    261                                                                    encoding));
    262     bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
    263                                                    vreg,
    264                                                    vreg_kind,
    265                                                    &vreg_value);
    266     CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
    267                             << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
    268                             << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
    269                             << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
    270 
    271     // Copy value to the catch phi's stack slot.
    272     int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
    273                                                                number_of_vregs,
    274                                                                code_info,
    275                                                                encoding);
    276     ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
    277     uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
    278     uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
    279     *slot_ptr = vreg_value;
    280   }
    281 }
    282 
    283 // Prepares deoptimization.
    284 class DeoptimizeStackVisitor FINAL : public StackVisitor {
    285  public:
    286   DeoptimizeStackVisitor(Thread* self,
    287                          Context* context,
    288                          QuickExceptionHandler* exception_handler,
    289                          bool single_frame)
    290       REQUIRES_SHARED(Locks::mutator_lock_)
    291       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    292         exception_handler_(exception_handler),
    293         prev_shadow_frame_(nullptr),
    294         stacked_shadow_frame_pushed_(false),
    295         single_frame_deopt_(single_frame),
    296         single_frame_done_(false),
    297         single_frame_deopt_method_(nullptr),
    298         single_frame_deopt_quick_method_header_(nullptr),
    299         callee_method_(nullptr) {
    300   }
    301 
    302   ArtMethod* GetSingleFrameDeoptMethod() const {
    303     return single_frame_deopt_method_;
    304   }
    305 
    306   const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
    307     return single_frame_deopt_quick_method_header_;
    308   }
    309 
    310   void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
    311     // This is the upcall, or the next full frame in single-frame deopt, or the
    312     // code isn't deoptimizeable. We remember the frame and last pc so that we
    313     // may long jump to them.
    314     exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
    315     exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
    316     exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
    317     if (!stacked_shadow_frame_pushed_) {
    318       // In case there is no deoptimized shadow frame for this upcall, we still
    319       // need to push a nullptr to the stack since there is always a matching pop after
    320       // the long jump.
    321       GetThread()->PushStackedShadowFrame(nullptr,
    322                                           StackedShadowFrameType::kDeoptimizationShadowFrame);
    323       stacked_shadow_frame_pushed_ = true;
    324     }
    325     if (GetMethod() == nullptr) {
    326       exception_handler_->SetFullFragmentDone(true);
    327     } else {
    328       CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
    329       exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
    330     }
    331   }
    332 
    333   bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
    334     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
    335     ArtMethod* method = GetMethod();
    336     if (method == nullptr || single_frame_done_) {
    337       FinishStackWalk();
    338       return false;  // End stack walk.
    339     } else if (method->IsRuntimeMethod()) {
    340       // Ignore callee save method.
    341       DCHECK(method->IsCalleeSaveMethod());
    342       return true;
    343     } else if (method->IsNative()) {
    344       // If we return from JNI with a pending exception and want to deoptimize, we need to skip
    345       // the native method.
    346       // The top method is a runtime method, the native method comes next.
    347       CHECK_EQ(GetFrameDepth(), 1U);
    348       callee_method_ = method;
    349       return true;
    350     } else if (!single_frame_deopt_ &&
    351                !Runtime::Current()->IsAsyncDeoptimizeable(GetCurrentQuickFramePc())) {
    352       // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
    353       // from compiled code is always allowed since HDeoptimize always saves the full environment.
    354       LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
    355                    << method->PrettyMethod();
    356       FinishStackWalk();
    357       return false;  // End stack walk.
    358     } else {
    359       // Check if a shadow frame already exists for debugger's set-local-value purpose.
    360       const size_t frame_id = GetFrameId();
    361       ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
    362       const bool* updated_vregs;
    363       CodeItemDataAccessor accessor(method->DexInstructionData());
    364       const size_t num_regs = accessor.RegistersSize();
    365       if (new_frame == nullptr) {
    366         new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
    367         updated_vregs = nullptr;
    368       } else {
    369         updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
    370         DCHECK(updated_vregs != nullptr);
    371       }
    372       HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
    373       if (updated_vregs != nullptr) {
    374         // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
    375         // array so this must come after we processed the frame.
    376         GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
    377         DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
    378       }
    379       if (prev_shadow_frame_ != nullptr) {
    380         prev_shadow_frame_->SetLink(new_frame);
    381       } else {
    382         // Will be popped after the long jump after DeoptimizeStack(),
    383         // right before interpreter::EnterInterpreterFromDeoptimize().
    384         stacked_shadow_frame_pushed_ = true;
    385         GetThread()->PushStackedShadowFrame(
    386             new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
    387       }
    388       prev_shadow_frame_ = new_frame;
    389 
    390       if (single_frame_deopt_ && !IsInInlinedFrame()) {
    391         // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
    392         single_frame_done_ = true;
    393         single_frame_deopt_method_ = method;
    394         single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
    395       }
    396       callee_method_ = method;
    397       return true;
    398     }
    399   }
    400 
    401  private:
    402   void HandleOptimizingDeoptimization(ArtMethod* m,
    403                                       ShadowFrame* new_frame,
    404                                       const bool* updated_vregs)
    405       REQUIRES_SHARED(Locks::mutator_lock_) {
    406     const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
    407     CodeInfo code_info = method_header->GetOptimizedCodeInfo();
    408     uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
    409     CodeInfoEncoding encoding = code_info.ExtractEncoding();
    410     StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
    411     CodeItemDataAccessor accessor(m->DexInstructionData());
    412     const size_t number_of_vregs = accessor.RegistersSize();
    413     uint32_t register_mask = code_info.GetRegisterMaskOf(encoding, stack_map);
    414     BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
    415     DexRegisterMap vreg_map = IsInInlinedFrame()
    416         ? code_info.GetDexRegisterMapAtDepth(GetCurrentInliningDepth() - 1,
    417                                              code_info.GetInlineInfoOf(stack_map, encoding),
    418                                              encoding,
    419                                              number_of_vregs)
    420         : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_vregs);
    421 
    422     if (!vreg_map.IsValid()) {
    423       return;
    424     }
    425 
    426     for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
    427       if (updated_vregs != nullptr && updated_vregs[vreg]) {
    428         // Keep the value set by debugger.
    429         continue;
    430       }
    431 
    432       DexRegisterLocation::Kind location =
    433           vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
    434       static constexpr uint32_t kDeadValue = 0xEBADDE09;
    435       uint32_t value = kDeadValue;
    436       bool is_reference = false;
    437 
    438       switch (location) {
    439         case DexRegisterLocation::Kind::kInStack: {
    440           const int32_t offset = vreg_map.GetStackOffsetInBytes(vreg,
    441                                                                 number_of_vregs,
    442                                                                 code_info,
    443                                                                 encoding);
    444           const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
    445           value = *reinterpret_cast<const uint32_t*>(addr);
    446           uint32_t bit = (offset >> 2);
    447           if (bit < encoding.stack_mask.encoding.BitSize() && stack_mask.LoadBit(bit)) {
    448             is_reference = true;
    449           }
    450           break;
    451         }
    452         case DexRegisterLocation::Kind::kInRegister:
    453         case DexRegisterLocation::Kind::kInRegisterHigh:
    454         case DexRegisterLocation::Kind::kInFpuRegister:
    455         case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
    456           uint32_t reg = vreg_map.GetMachineRegister(vreg, number_of_vregs, code_info, encoding);
    457           bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
    458           CHECK(result);
    459           if (location == DexRegisterLocation::Kind::kInRegister) {
    460             if (((1u << reg) & register_mask) != 0) {
    461               is_reference = true;
    462             }
    463           }
    464           break;
    465         }
    466         case DexRegisterLocation::Kind::kConstant: {
    467           value = vreg_map.GetConstant(vreg, number_of_vregs, code_info, encoding);
    468           if (value == 0) {
    469             // Make it a reference for extra safety.
    470             is_reference = true;
    471           }
    472           break;
    473         }
    474         case DexRegisterLocation::Kind::kNone: {
    475           break;
    476         }
    477         default: {
    478           LOG(FATAL)
    479               << "Unexpected location kind "
    480               << vreg_map.GetLocationInternalKind(vreg,
    481                                                   number_of_vregs,
    482                                                   code_info,
    483                                                   encoding);
    484           UNREACHABLE();
    485         }
    486       }
    487       if (is_reference) {
    488         new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
    489       } else {
    490         new_frame->SetVReg(vreg, value);
    491       }
    492     }
    493   }
    494 
    495   static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
    496     return static_cast<VRegKind>(kinds.at(reg * 2));
    497   }
    498 
    499   QuickExceptionHandler* const exception_handler_;
    500   ShadowFrame* prev_shadow_frame_;
    501   bool stacked_shadow_frame_pushed_;
    502   const bool single_frame_deopt_;
    503   bool single_frame_done_;
    504   ArtMethod* single_frame_deopt_method_;
    505   const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
    506   ArtMethod* callee_method_;
    507 
    508   DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
    509 };
    510 
    511 void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
    512   if (full_fragment_done_) {
    513     // Restore deoptimization exception. When returning from the invoke stub,
    514     // ArtMethod::Invoke() will see the special exception to know deoptimization
    515     // is needed.
    516     self_->SetException(Thread::GetDeoptimizationException());
    517   } else {
    518     // PC needs to be of the quick-to-interpreter bridge.
    519     int32_t offset;
    520     offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
    521     handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
    522         reinterpret_cast<uint8_t*>(self_) + offset);
    523   }
    524 }
    525 
    526 void QuickExceptionHandler::DeoptimizeStack() {
    527   DCHECK(is_deoptimization_);
    528   if (kDebugExceptionDelivery) {
    529     self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
    530   }
    531 
    532   DeoptimizeStackVisitor visitor(self_, context_, this, false);
    533   visitor.WalkStack(true);
    534   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
    535 }
    536 
    537 void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
    538   DCHECK(is_deoptimization_);
    539 
    540   DeoptimizeStackVisitor visitor(self_, context_, this, true);
    541   visitor.WalkStack(true);
    542 
    543   // Compiled code made an explicit deoptimization.
    544   ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
    545   DCHECK(deopt_method != nullptr);
    546   if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
    547     LOG(INFO) << "Single-frame deopting: "
    548               << deopt_method->PrettyMethod()
    549               << " due to "
    550               << GetDeoptimizationKindName(kind);
    551     DumpFramesWithType(self_, /* details */ true);
    552   }
    553   if (Runtime::Current()->UseJitCompilation()) {
    554     Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
    555         deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
    556   } else {
    557     // Transfer the code to interpreter.
    558     Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
    559         deopt_method, GetQuickToInterpreterBridge());
    560   }
    561 
    562   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
    563 }
    564 
    565 void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
    566   // At this point, the instrumentation stack has been updated. We need to install
    567   // the real return pc on stack, in case instrumentation stub is stored there,
    568   // so that the interpreter bridge code can return to the right place.
    569   if (return_pc != 0) {
    570     uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
    571     CHECK(pc_addr != nullptr);
    572     pc_addr--;
    573     *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
    574   }
    575 
    576   // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
    577   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
    578     // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
    579     // change how longjump works.
    580     handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
    581         reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
    582   }
    583 }
    584 
    585 // Unwinds all instrumentation stack frame prior to catch handler or upcall.
    586 class InstrumentationStackVisitor : public StackVisitor {
    587  public:
    588   InstrumentationStackVisitor(Thread* self, size_t frame_depth)
    589       REQUIRES_SHARED(Locks::mutator_lock_)
    590       : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    591         frame_depth_(frame_depth),
    592         instrumentation_frames_to_pop_(0) {
    593     CHECK_NE(frame_depth_, kInvalidFrameDepth);
    594   }
    595 
    596   bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) {
    597     size_t current_frame_depth = GetFrameDepth();
    598     if (current_frame_depth < frame_depth_) {
    599       CHECK(GetMethod() != nullptr);
    600       if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
    601         if (!IsInInlinedFrame()) {
    602           // We do not count inlined frames, because we do not instrument them. The reason we
    603           // include them in the stack walking is the check against `frame_depth_`, which is
    604           // given to us by a visitor that visits inlined frames.
    605           ++instrumentation_frames_to_pop_;
    606         }
    607       }
    608       return true;
    609     } else {
    610       // We reached the frame of the catch handler or the upcall.
    611       return false;
    612     }
    613   }
    614 
    615   size_t GetInstrumentationFramesToPop() const {
    616     return instrumentation_frames_to_pop_;
    617   }
    618 
    619  private:
    620   const size_t frame_depth_;
    621   size_t instrumentation_frames_to_pop_;
    622 
    623   DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
    624 };
    625 
    626 uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
    627   uintptr_t return_pc = 0;
    628   if (method_tracing_active_) {
    629     InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
    630     visitor.WalkStack(true);
    631 
    632     size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
    633     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
    634     for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
    635       return_pc = instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
    636     }
    637   }
    638   return return_pc;
    639 }
    640 
    641 void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
    642   // Place context back on thread so it will be available when we continue.
    643   self_->ReleaseLongJumpContext(context_);
    644   context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
    645   CHECK_NE(handler_quick_frame_pc_, 0u);
    646   context_->SetPC(handler_quick_frame_pc_);
    647   context_->SetArg0(handler_quick_arg0_);
    648   if (smash_caller_saves) {
    649     context_->SmashCallerSaves();
    650   }
    651   context_->DoLongJump();
    652   UNREACHABLE();
    653 }
    654 
    655 // Prints out methods with their type of frame.
    656 class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
    657  public:
    658   explicit DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
    659       REQUIRES_SHARED(Locks::mutator_lock_)
    660       : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    661         show_details_(show_details) {}
    662 
    663   bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
    664     ArtMethod* method = GetMethod();
    665     if (show_details_) {
    666       LOG(INFO) << "|> pc   = " << std::hex << GetCurrentQuickFramePc();
    667       LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
    668       if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
    669         LOG(INFO) << "|> ret  = " << std::hex << GetReturnPc();
    670       }
    671     }
    672     if (method == nullptr) {
    673       // Transition, do go on, we want to unwind over bridges, all the way.
    674       if (show_details_) {
    675         LOG(INFO) << "N  <transition>";
    676       }
    677       return true;
    678     } else if (method->IsRuntimeMethod()) {
    679       if (show_details_) {
    680         LOG(INFO) << "R  " << method->PrettyMethod(true);
    681       }
    682       return true;
    683     } else {
    684       bool is_shadow = GetCurrentShadowFrame() != nullptr;
    685       LOG(INFO) << (is_shadow ? "S" : "Q")
    686                 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
    687                 << " "
    688                 << method->PrettyMethod(true);
    689       return true;  // Go on.
    690     }
    691   }
    692 
    693  private:
    694   bool show_details_;
    695 
    696   DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
    697 };
    698 
    699 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
    700   DumpFramesWithTypeStackVisitor visitor(self, details);
    701   visitor.WalkStack(true);
    702 }
    703 
    704 }  // namespace art
    705