Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "quick_exception_handler.h"
     18 
     19 #include "arch/context.h"
     20 #include "art_method-inl.h"
     21 #include "base/enums.h"
     22 #include "dex_instruction.h"
     23 #include "entrypoints/entrypoint_utils.h"
     24 #include "entrypoints/quick/quick_entrypoints_enum.h"
     25 #include "entrypoints/runtime_asm_entrypoints.h"
     26 #include "handle_scope-inl.h"
     27 #include "jit/jit.h"
     28 #include "jit/jit_code_cache.h"
     29 #include "mirror/class-inl.h"
     30 #include "mirror/class_loader.h"
     31 #include "mirror/throwable.h"
     32 #include "oat_quick_method_header.h"
     33 #include "stack.h"
     34 #include "stack_map.h"
     35 
     36 namespace art {
     37 
     38 static constexpr bool kDebugExceptionDelivery = false;
     39 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
     40 
     41 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
     42     : self_(self),
     43       context_(self->GetLongJumpContext()),
     44       is_deoptimization_(is_deoptimization),
     45       method_tracing_active_(is_deoptimization ||
     46                              Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
     47       handler_quick_frame_(nullptr),
     48       handler_quick_frame_pc_(0),
     49       handler_method_header_(nullptr),
     50       handler_quick_arg0_(0),
     51       handler_method_(nullptr),
     52       handler_dex_pc_(0),
     53       clear_exception_(false),
     54       handler_frame_depth_(kInvalidFrameDepth),
     55       full_fragment_done_(false) {}
     56 
     57 // Finds catch handler.
     58 class CatchBlockStackVisitor FINAL : public StackVisitor {
     59  public:
     60   CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
     61                          QuickExceptionHandler* exception_handler)
     62       REQUIRES_SHARED(Locks::mutator_lock_)
     63       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
     64         exception_(exception),
     65         exception_handler_(exception_handler) {
     66   }
     67 
     68   bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
     69     ArtMethod* method = GetMethod();
     70     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
     71     if (method == nullptr) {
     72       // This is the upcall, we remember the frame and last pc so that we may long jump to them.
     73       exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
     74       exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
     75       exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
     76       uint32_t next_dex_pc;
     77       ArtMethod* next_art_method;
     78       bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
     79       // Report the method that did the down call as the handler.
     80       exception_handler_->SetHandlerDexPc(next_dex_pc);
     81       exception_handler_->SetHandlerMethod(next_art_method);
     82       if (!has_next) {
     83         // No next method? Check exception handler is set up for the unhandled exception handler
     84         // case.
     85         DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
     86         DCHECK(nullptr == exception_handler_->GetHandlerMethod());
     87       }
     88       return false;  // End stack walk.
     89     }
     90     if (method->IsRuntimeMethod()) {
     91       // Ignore callee save method.
     92       DCHECK(method->IsCalleeSaveMethod());
     93       return true;
     94     }
     95     return HandleTryItems(method);
     96   }
     97 
     98  private:
     99   bool HandleTryItems(ArtMethod* method)
    100       REQUIRES_SHARED(Locks::mutator_lock_) {
    101     uint32_t dex_pc = DexFile::kDexNoIndex;
    102     if (!method->IsNative()) {
    103       dex_pc = GetDexPc();
    104     }
    105     if (dex_pc != DexFile::kDexNoIndex) {
    106       bool clear_exception = false;
    107       StackHandleScope<1> hs(GetThread());
    108       Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
    109       uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
    110       exception_handler_->SetClearException(clear_exception);
    111       if (found_dex_pc != DexFile::kDexNoIndex) {
    112         exception_handler_->SetHandlerMethod(method);
    113         exception_handler_->SetHandlerDexPc(found_dex_pc);
    114         exception_handler_->SetHandlerQuickFramePc(
    115             GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
    116                 method, found_dex_pc, /* is_catch_handler */ true));
    117         exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
    118         exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
    119         return false;  // End stack walk.
    120       } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
    121         // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
    122         size_t frame_id = GetFrameId();
    123         ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
    124         if (frame != nullptr) {
    125           // We will not execute this shadow frame so we can safely deallocate it.
    126           GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
    127           ShadowFrame::DeleteDeoptimizedFrame(frame);
    128         }
    129       }
    130     }
    131     return true;  // Continue stack walk.
    132   }
    133 
    134   // The exception we're looking for the catch block of.
    135   Handle<mirror::Throwable>* exception_;
    136   // The quick exception handler we're visiting for.
    137   QuickExceptionHandler* const exception_handler_;
    138 
    139   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
    140 };
    141 
    142 void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) {
    143   DCHECK(!is_deoptimization_);
    144   if (kDebugExceptionDelivery) {
    145     mirror::String* msg = exception->GetDetailMessage();
    146     std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
    147     self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception->PrettyTypeOf()
    148                      << ": " << str_msg << "\n");
    149   }
    150   StackHandleScope<1> hs(self_);
    151   Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
    152 
    153   // Walk the stack to find catch handler.
    154   CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
    155   visitor.WalkStack(true);
    156 
    157   if (kDebugExceptionDelivery) {
    158     if (*handler_quick_frame_ == nullptr) {
    159       LOG(INFO) << "Handler is upcall";
    160     }
    161     if (handler_method_ != nullptr) {
    162       const DexFile* dex_file = handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
    163       int line_number = annotations::GetLineNumFromPC(dex_file, handler_method_, handler_dex_pc_);
    164       LOG(INFO) << "Handler: " << handler_method_->PrettyMethod() << " (line: "
    165                 << line_number << ")";
    166     }
    167   }
    168   if (clear_exception_) {
    169     // Exception was cleared as part of delivery.
    170     DCHECK(!self_->IsExceptionPending());
    171   } else {
    172     // Put exception back in root set with clear throw location.
    173     self_->SetException(exception_ref.Get());
    174   }
    175   // If the handler is in optimized code, we need to set the catch environment.
    176   if (*handler_quick_frame_ != nullptr &&
    177       handler_method_header_ != nullptr &&
    178       handler_method_header_->IsOptimized()) {
    179     SetCatchEnvironmentForOptimizedHandler(&visitor);
    180   }
    181 }
    182 
    183 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
    184   // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
    185   // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
    186   // distinguish between core/FPU registers and low/high bits on 64-bit.
    187   switch (kind) {
    188     case DexRegisterLocation::Kind::kConstant:
    189     case DexRegisterLocation::Kind::kInStack:
    190       // VRegKind is ignored.
    191       return VRegKind::kUndefined;
    192 
    193     case DexRegisterLocation::Kind::kInRegister:
    194       // Selects core register. For 64-bit registers, selects low 32 bits.
    195       return VRegKind::kLongLoVReg;
    196 
    197     case DexRegisterLocation::Kind::kInRegisterHigh:
    198       // Selects core register. For 64-bit registers, selects high 32 bits.
    199       return VRegKind::kLongHiVReg;
    200 
    201     case DexRegisterLocation::Kind::kInFpuRegister:
    202       // Selects FPU register. For 64-bit registers, selects low 32 bits.
    203       return VRegKind::kDoubleLoVReg;
    204 
    205     case DexRegisterLocation::Kind::kInFpuRegisterHigh:
    206       // Selects FPU register. For 64-bit registers, selects high 32 bits.
    207       return VRegKind::kDoubleHiVReg;
    208 
    209     default:
    210       LOG(FATAL) << "Unexpected vreg location " << kind;
    211       UNREACHABLE();
    212   }
    213 }
    214 
    215 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
    216   DCHECK(!is_deoptimization_);
    217   DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
    218   DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
    219 
    220   if (kDebugExceptionDelivery) {
    221     self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
    222   }
    223 
    224   const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
    225   CodeInfo code_info = handler_method_header_->GetOptimizedCodeInfo();
    226   CodeInfoEncoding encoding = code_info.ExtractEncoding();
    227 
    228   // Find stack map of the catch block.
    229   StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
    230   DCHECK(catch_stack_map.IsValid());
    231   DexRegisterMap catch_vreg_map =
    232       code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
    233   if (!catch_vreg_map.IsValid()) {
    234     return;
    235   }
    236 
    237   // Find stack map of the throwing instruction.
    238   StackMap throw_stack_map =
    239       code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
    240   DCHECK(throw_stack_map.IsValid());
    241   DexRegisterMap throw_vreg_map =
    242       code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
    243   DCHECK(throw_vreg_map.IsValid());
    244 
    245   // Copy values between them.
    246   for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
    247     DexRegisterLocation::Kind catch_location =
    248         catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
    249     if (catch_location == DexRegisterLocation::Kind::kNone) {
    250       continue;
    251     }
    252     DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
    253 
    254     // Get vreg value from its current location.
    255     uint32_t vreg_value;
    256     VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
    257                                                                    number_of_vregs,
    258                                                                    code_info,
    259                                                                    encoding));
    260     bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
    261                                                    vreg,
    262                                                    vreg_kind,
    263                                                    &vreg_value);
    264     CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
    265                             << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
    266                             << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
    267                             << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
    268 
    269     // Copy value to the catch phi's stack slot.
    270     int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
    271                                                                number_of_vregs,
    272                                                                code_info,
    273                                                                encoding);
    274     ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
    275     uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
    276     uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
    277     *slot_ptr = vreg_value;
    278   }
    279 }
    280 
    281 // Prepares deoptimization.
    282 class DeoptimizeStackVisitor FINAL : public StackVisitor {
    283  public:
    284   DeoptimizeStackVisitor(Thread* self,
    285                          Context* context,
    286                          QuickExceptionHandler* exception_handler,
    287                          bool single_frame)
    288       REQUIRES_SHARED(Locks::mutator_lock_)
    289       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    290         exception_handler_(exception_handler),
    291         prev_shadow_frame_(nullptr),
    292         stacked_shadow_frame_pushed_(false),
    293         single_frame_deopt_(single_frame),
    294         single_frame_done_(false),
    295         single_frame_deopt_method_(nullptr),
    296         single_frame_deopt_quick_method_header_(nullptr),
    297         callee_method_(nullptr) {
    298   }
    299 
    300   ArtMethod* GetSingleFrameDeoptMethod() const {
    301     return single_frame_deopt_method_;
    302   }
    303 
    304   const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
    305     return single_frame_deopt_quick_method_header_;
    306   }
    307 
    308   void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
    309     // This is the upcall, or the next full frame in single-frame deopt, or the
    310     // code isn't deoptimizeable. We remember the frame and last pc so that we
    311     // may long jump to them.
    312     exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
    313     exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
    314     exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
    315     if (!stacked_shadow_frame_pushed_) {
    316       // In case there is no deoptimized shadow frame for this upcall, we still
    317       // need to push a nullptr to the stack since there is always a matching pop after
    318       // the long jump.
    319       GetThread()->PushStackedShadowFrame(nullptr,
    320                                           StackedShadowFrameType::kDeoptimizationShadowFrame);
    321       stacked_shadow_frame_pushed_ = true;
    322     }
    323     if (GetMethod() == nullptr) {
    324       exception_handler_->SetFullFragmentDone(true);
    325     } else {
    326       CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
    327       exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
    328     }
    329   }
    330 
    331   bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
    332     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
    333     ArtMethod* method = GetMethod();
    334     if (method == nullptr || single_frame_done_) {
    335       FinishStackWalk();
    336       return false;  // End stack walk.
    337     } else if (method->IsRuntimeMethod()) {
    338       // Ignore callee save method.
    339       DCHECK(method->IsCalleeSaveMethod());
    340       return true;
    341     } else if (method->IsNative()) {
    342       // If we return from JNI with a pending exception and want to deoptimize, we need to skip
    343       // the native method.
    344       // The top method is a runtime method, the native method comes next.
    345       CHECK_EQ(GetFrameDepth(), 1U);
    346       callee_method_ = method;
    347       return true;
    348     } else if (!single_frame_deopt_ &&
    349                !Runtime::Current()->IsAsyncDeoptimizeable(GetCurrentQuickFramePc())) {
    350       // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
    351       // from compiled code is always allowed since HDeoptimize always saves the full environment.
    352       LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
    353                    << method->PrettyMethod();
    354       FinishStackWalk();
    355       return false;  // End stack walk.
    356     } else {
    357       // Check if a shadow frame already exists for debugger's set-local-value purpose.
    358       const size_t frame_id = GetFrameId();
    359       ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
    360       const bool* updated_vregs;
    361       const size_t num_regs = method->GetCodeItem()->registers_size_;
    362       if (new_frame == nullptr) {
    363         new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
    364         updated_vregs = nullptr;
    365       } else {
    366         updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
    367         DCHECK(updated_vregs != nullptr);
    368       }
    369       HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
    370       if (updated_vregs != nullptr) {
    371         // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
    372         // array so this must come after we processed the frame.
    373         GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
    374         DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
    375       }
    376       if (prev_shadow_frame_ != nullptr) {
    377         prev_shadow_frame_->SetLink(new_frame);
    378       } else {
    379         // Will be popped after the long jump after DeoptimizeStack(),
    380         // right before interpreter::EnterInterpreterFromDeoptimize().
    381         stacked_shadow_frame_pushed_ = true;
    382         GetThread()->PushStackedShadowFrame(
    383             new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
    384       }
    385       prev_shadow_frame_ = new_frame;
    386 
    387       if (single_frame_deopt_ && !IsInInlinedFrame()) {
    388         // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
    389         single_frame_done_ = true;
    390         single_frame_deopt_method_ = method;
    391         single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
    392       }
    393       callee_method_ = method;
    394       return true;
    395     }
    396   }
    397 
    398  private:
    399   void HandleOptimizingDeoptimization(ArtMethod* m,
    400                                       ShadowFrame* new_frame,
    401                                       const bool* updated_vregs)
    402       REQUIRES_SHARED(Locks::mutator_lock_) {
    403     const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
    404     CodeInfo code_info = method_header->GetOptimizedCodeInfo();
    405     uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
    406     CodeInfoEncoding encoding = code_info.ExtractEncoding();
    407     StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
    408     const size_t number_of_vregs = m->GetCodeItem()->registers_size_;
    409     uint32_t register_mask = code_info.GetRegisterMaskOf(encoding, stack_map);
    410     BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
    411     DexRegisterMap vreg_map = IsInInlinedFrame()
    412         ? code_info.GetDexRegisterMapAtDepth(GetCurrentInliningDepth() - 1,
    413                                              code_info.GetInlineInfoOf(stack_map, encoding),
    414                                              encoding,
    415                                              number_of_vregs)
    416         : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_vregs);
    417 
    418     if (!vreg_map.IsValid()) {
    419       return;
    420     }
    421 
    422     for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
    423       if (updated_vregs != nullptr && updated_vregs[vreg]) {
    424         // Keep the value set by debugger.
    425         continue;
    426       }
    427 
    428       DexRegisterLocation::Kind location =
    429           vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
    430       static constexpr uint32_t kDeadValue = 0xEBADDE09;
    431       uint32_t value = kDeadValue;
    432       bool is_reference = false;
    433 
    434       switch (location) {
    435         case DexRegisterLocation::Kind::kInStack: {
    436           const int32_t offset = vreg_map.GetStackOffsetInBytes(vreg,
    437                                                                 number_of_vregs,
    438                                                                 code_info,
    439                                                                 encoding);
    440           const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
    441           value = *reinterpret_cast<const uint32_t*>(addr);
    442           uint32_t bit = (offset >> 2);
    443           if (bit < encoding.stack_mask.encoding.BitSize() && stack_mask.LoadBit(bit)) {
    444             is_reference = true;
    445           }
    446           break;
    447         }
    448         case DexRegisterLocation::Kind::kInRegister:
    449         case DexRegisterLocation::Kind::kInRegisterHigh:
    450         case DexRegisterLocation::Kind::kInFpuRegister:
    451         case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
    452           uint32_t reg = vreg_map.GetMachineRegister(vreg, number_of_vregs, code_info, encoding);
    453           bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
    454           CHECK(result);
    455           if (location == DexRegisterLocation::Kind::kInRegister) {
    456             if (((1u << reg) & register_mask) != 0) {
    457               is_reference = true;
    458             }
    459           }
    460           break;
    461         }
    462         case DexRegisterLocation::Kind::kConstant: {
    463           value = vreg_map.GetConstant(vreg, number_of_vregs, code_info, encoding);
    464           if (value == 0) {
    465             // Make it a reference for extra safety.
    466             is_reference = true;
    467           }
    468           break;
    469         }
    470         case DexRegisterLocation::Kind::kNone: {
    471           break;
    472         }
    473         default: {
    474           LOG(FATAL)
    475               << "Unexpected location kind "
    476               << vreg_map.GetLocationInternalKind(vreg,
    477                                                   number_of_vregs,
    478                                                   code_info,
    479                                                   encoding);
    480           UNREACHABLE();
    481         }
    482       }
    483       if (is_reference) {
    484         new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
    485       } else {
    486         new_frame->SetVReg(vreg, value);
    487       }
    488     }
    489   }
    490 
    491   static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
    492     return static_cast<VRegKind>(kinds.at(reg * 2));
    493   }
    494 
    495   QuickExceptionHandler* const exception_handler_;
    496   ShadowFrame* prev_shadow_frame_;
    497   bool stacked_shadow_frame_pushed_;
    498   const bool single_frame_deopt_;
    499   bool single_frame_done_;
    500   ArtMethod* single_frame_deopt_method_;
    501   const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
    502   ArtMethod* callee_method_;
    503 
    504   DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
    505 };
    506 
    507 void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
    508   if (full_fragment_done_) {
    509     // Restore deoptimization exception. When returning from the invoke stub,
    510     // ArtMethod::Invoke() will see the special exception to know deoptimization
    511     // is needed.
    512     self_->SetException(Thread::GetDeoptimizationException());
    513   } else {
    514     // PC needs to be of the quick-to-interpreter bridge.
    515     int32_t offset;
    516     offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
    517     handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
    518         reinterpret_cast<uint8_t*>(self_) + offset);
    519   }
    520 }
    521 
    522 void QuickExceptionHandler::DeoptimizeStack() {
    523   DCHECK(is_deoptimization_);
    524   if (kDebugExceptionDelivery) {
    525     self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
    526   }
    527 
    528   DeoptimizeStackVisitor visitor(self_, context_, this, false);
    529   visitor.WalkStack(true);
    530   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
    531 }
    532 
    533 void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
    534   DCHECK(is_deoptimization_);
    535 
    536   if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
    537     LOG(INFO) << "Single-frame deopting:";
    538     DumpFramesWithType(self_, true);
    539   }
    540 
    541   DeoptimizeStackVisitor visitor(self_, context_, this, true);
    542   visitor.WalkStack(true);
    543 
    544   // Compiled code made an explicit deoptimization.
    545   ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
    546   DCHECK(deopt_method != nullptr);
    547   LOG(INFO) << "Deoptimizing "
    548             << deopt_method->PrettyMethod()
    549             << " due to "
    550             << GetDeoptimizationKindName(kind);
    551   if (Runtime::Current()->UseJitCompilation()) {
    552     Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
    553         deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
    554   } else {
    555     // Transfer the code to interpreter.
    556     Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
    557         deopt_method, GetQuickToInterpreterBridge());
    558   }
    559 
    560   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
    561 }
    562 
    563 void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
    564   // At this point, the instrumentation stack has been updated. We need to install
    565   // the real return pc on stack, in case instrumentation stub is stored there,
    566   // so that the interpreter bridge code can return to the right place.
    567   if (return_pc != 0) {
    568     uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
    569     CHECK(pc_addr != nullptr);
    570     pc_addr--;
    571     *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
    572   }
    573 
    574   // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
    575   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
    576     // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
    577     // change how longjump works.
    578     handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
    579         reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
    580   }
    581 }
    582 
    583 // Unwinds all instrumentation stack frame prior to catch handler or upcall.
    584 class InstrumentationStackVisitor : public StackVisitor {
    585  public:
    586   InstrumentationStackVisitor(Thread* self, size_t frame_depth)
    587       REQUIRES_SHARED(Locks::mutator_lock_)
    588       : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    589         frame_depth_(frame_depth),
    590         instrumentation_frames_to_pop_(0) {
    591     CHECK_NE(frame_depth_, kInvalidFrameDepth);
    592   }
    593 
    594   bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) {
    595     size_t current_frame_depth = GetFrameDepth();
    596     if (current_frame_depth < frame_depth_) {
    597       CHECK(GetMethod() != nullptr);
    598       if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
    599         if (!IsInInlinedFrame()) {
    600           // We do not count inlined frames, because we do not instrument them. The reason we
    601           // include them in the stack walking is the check against `frame_depth_`, which is
    602           // given to us by a visitor that visits inlined frames.
    603           ++instrumentation_frames_to_pop_;
    604         }
    605       }
    606       return true;
    607     } else {
    608       // We reached the frame of the catch handler or the upcall.
    609       return false;
    610     }
    611   }
    612 
    613   size_t GetInstrumentationFramesToPop() const {
    614     return instrumentation_frames_to_pop_;
    615   }
    616 
    617  private:
    618   const size_t frame_depth_;
    619   size_t instrumentation_frames_to_pop_;
    620 
    621   DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
    622 };
    623 
    624 uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
    625   uintptr_t return_pc = 0;
    626   if (method_tracing_active_) {
    627     InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
    628     visitor.WalkStack(true);
    629 
    630     size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
    631     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
    632     for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
    633       return_pc = instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
    634     }
    635   }
    636   return return_pc;
    637 }
    638 
    639 void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
    640   // Place context back on thread so it will be available when we continue.
    641   self_->ReleaseLongJumpContext(context_);
    642   context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
    643   CHECK_NE(handler_quick_frame_pc_, 0u);
    644   context_->SetPC(handler_quick_frame_pc_);
    645   context_->SetArg0(handler_quick_arg0_);
    646   if (smash_caller_saves) {
    647     context_->SmashCallerSaves();
    648   }
    649   context_->DoLongJump();
    650   UNREACHABLE();
    651 }
    652 
    653 // Prints out methods with their type of frame.
    654 class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
    655  public:
    656   explicit DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
    657       REQUIRES_SHARED(Locks::mutator_lock_)
    658       : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
    659         show_details_(show_details) {}
    660 
    661   bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
    662     ArtMethod* method = GetMethod();
    663     if (show_details_) {
    664       LOG(INFO) << "|> pc   = " << std::hex << GetCurrentQuickFramePc();
    665       LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
    666       if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
    667         LOG(INFO) << "|> ret  = " << std::hex << GetReturnPc();
    668       }
    669     }
    670     if (method == nullptr) {
    671       // Transition, do go on, we want to unwind over bridges, all the way.
    672       if (show_details_) {
    673         LOG(INFO) << "N  <transition>";
    674       }
    675       return true;
    676     } else if (method->IsRuntimeMethod()) {
    677       if (show_details_) {
    678         LOG(INFO) << "R  " << method->PrettyMethod(true);
    679       }
    680       return true;
    681     } else {
    682       bool is_shadow = GetCurrentShadowFrame() != nullptr;
    683       LOG(INFO) << (is_shadow ? "S" : "Q")
    684                 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
    685                 << " "
    686                 << method->PrettyMethod(true);
    687       return true;  // Go on.
    688     }
    689   }
    690 
    691  private:
    692   bool show_details_;
    693 
    694   DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
    695 };
    696 
    697 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
    698   DumpFramesWithTypeStackVisitor visitor(self, details);
    699   visitor.WalkStack(true);
    700 }
    701 
    702 }  // namespace art
    703