1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "quick_exception_handler.h" 18 19 #include "arch/context.h" 20 #include "dex_instruction.h" 21 #include "entrypoints/entrypoint_utils.h" 22 #include "handle_scope-inl.h" 23 #include "mirror/art_method-inl.h" 24 #include "mirror/class-inl.h" 25 #include "mirror/class_loader.h" 26 #include "mirror/throwable.h" 27 #include "verifier/method_verifier.h" 28 29 namespace art { 30 31 static constexpr bool kDebugExceptionDelivery = false; 32 static constexpr size_t kInvalidFrameDepth = 0xffffffff; 33 34 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization) 35 : self_(self), context_(self->GetLongJumpContext()), is_deoptimization_(is_deoptimization), 36 method_tracing_active_(is_deoptimization || 37 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()), 38 handler_quick_frame_(nullptr), handler_quick_frame_pc_(0), handler_method_(nullptr), 39 handler_dex_pc_(0), clear_exception_(false), handler_frame_depth_(kInvalidFrameDepth) { 40 } 41 42 // Finds catch handler or prepares for deoptimization. 43 class CatchBlockStackVisitor FINAL : public StackVisitor { 44 public: 45 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception, 46 QuickExceptionHandler* exception_handler) 47 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 48 : StackVisitor(self, context), self_(self), exception_(exception), 49 exception_handler_(exception_handler) { 50 } 51 52 bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 53 mirror::ArtMethod* method = GetMethod(); 54 exception_handler_->SetHandlerFrameDepth(GetFrameDepth()); 55 if (method == nullptr) { 56 // This is the upcall, we remember the frame and last pc so that we may long jump to them. 57 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc()); 58 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame()); 59 uint32_t next_dex_pc; 60 mirror::ArtMethod* next_art_method; 61 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc); 62 // Report the method that did the down call as the handler. 63 exception_handler_->SetHandlerDexPc(next_dex_pc); 64 exception_handler_->SetHandlerMethod(next_art_method); 65 if (!has_next) { 66 // No next method? Check exception handler is set up for the unhandled exception handler 67 // case. 68 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc()); 69 DCHECK(nullptr == exception_handler_->GetHandlerMethod()); 70 } 71 return false; // End stack walk. 72 } 73 if (method->IsRuntimeMethod()) { 74 // Ignore callee save method. 75 DCHECK(method->IsCalleeSaveMethod()); 76 return true; 77 } 78 StackHandleScope<1> hs(self_); 79 return HandleTryItems(hs.NewHandle(method)); 80 } 81 82 private: 83 bool HandleTryItems(Handle<mirror::ArtMethod> method) 84 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 85 uint32_t dex_pc = DexFile::kDexNoIndex; 86 if (!method->IsNative()) { 87 dex_pc = GetDexPc(); 88 } 89 if (dex_pc != DexFile::kDexNoIndex) { 90 bool clear_exception = false; 91 StackHandleScope<1> hs(Thread::Current()); 92 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass())); 93 uint32_t found_dex_pc = mirror::ArtMethod::FindCatchBlock(method, to_find, dex_pc, 94 &clear_exception); 95 exception_handler_->SetClearException(clear_exception); 96 if (found_dex_pc != DexFile::kDexNoIndex) { 97 exception_handler_->SetHandlerMethod(method.Get()); 98 exception_handler_->SetHandlerDexPc(found_dex_pc); 99 exception_handler_->SetHandlerQuickFramePc(method->ToNativePc(found_dex_pc)); 100 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame()); 101 return false; // End stack walk. 102 } 103 } 104 return true; // Continue stack walk. 105 } 106 107 Thread* const self_; 108 // The exception we're looking for the catch block of. 109 Handle<mirror::Throwable>* exception_; 110 // The quick exception handler we're visiting for. 111 QuickExceptionHandler* const exception_handler_; 112 113 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor); 114 }; 115 116 void QuickExceptionHandler::FindCatch(const ThrowLocation& throw_location, 117 mirror::Throwable* exception, 118 bool is_exception_reported) { 119 DCHECK(!is_deoptimization_); 120 if (kDebugExceptionDelivery) { 121 mirror::String* msg = exception->GetDetailMessage(); 122 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : ""); 123 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception) 124 << ": " << str_msg << "\n"); 125 } 126 StackHandleScope<1> hs(self_); 127 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception)); 128 129 // Walk the stack to find catch handler or prepare for deoptimization. 130 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this); 131 visitor.WalkStack(true); 132 133 if (kDebugExceptionDelivery) { 134 if (handler_quick_frame_->AsMirrorPtr() == nullptr) { 135 LOG(INFO) << "Handler is upcall"; 136 } 137 if (handler_method_ != nullptr) { 138 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile(); 139 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_); 140 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")"; 141 } 142 } 143 if (clear_exception_) { 144 // Exception was cleared as part of delivery. 145 DCHECK(!self_->IsExceptionPending()); 146 } else { 147 // Put exception back in root set with clear throw location. 148 self_->SetException(ThrowLocation(), exception_ref.Get()); 149 self_->SetExceptionReportedToInstrumentation(is_exception_reported); 150 } 151 // The debugger may suspend this thread and walk its stack. Let's do this before popping 152 // instrumentation frames. 153 if (!is_exception_reported) { 154 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 155 instrumentation->ExceptionCaughtEvent(self_, throw_location, handler_method_, handler_dex_pc_, 156 exception_ref.Get()); 157 // We're not catching this exception but let's remind we already reported the exception above 158 // to avoid reporting it twice. 159 self_->SetExceptionReportedToInstrumentation(true); 160 } 161 bool caught_exception = (handler_method_ != nullptr && handler_dex_pc_ != DexFile::kDexNoIndex); 162 if (caught_exception) { 163 // We're catching this exception so we finish reporting it. We do it here to avoid doing it 164 // in the compiled code. 165 self_->SetExceptionReportedToInstrumentation(false); 166 } 167 } 168 169 // Prepares deoptimization. 170 class DeoptimizeStackVisitor FINAL : public StackVisitor { 171 public: 172 DeoptimizeStackVisitor(Thread* self, Context* context, QuickExceptionHandler* exception_handler) 173 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 174 : StackVisitor(self, context), self_(self), exception_handler_(exception_handler), 175 prev_shadow_frame_(nullptr) { 176 CHECK(!self_->HasDeoptimizationShadowFrame()); 177 } 178 179 bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 180 exception_handler_->SetHandlerFrameDepth(GetFrameDepth()); 181 mirror::ArtMethod* method = GetMethod(); 182 if (method == nullptr) { 183 // This is the upcall, we remember the frame and last pc so that we may long jump to them. 184 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc()); 185 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame()); 186 return false; // End stack walk. 187 } else if (method->IsRuntimeMethod()) { 188 // Ignore callee save method. 189 DCHECK(method->IsCalleeSaveMethod()); 190 return true; 191 } else { 192 return HandleDeoptimization(method); 193 } 194 } 195 196 private: 197 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) { 198 return static_cast<VRegKind>(kinds.at(reg * 2)); 199 } 200 201 bool HandleDeoptimization(mirror::ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 202 const DexFile::CodeItem* code_item = m->GetCodeItem(); 203 CHECK(code_item != nullptr); 204 uint16_t num_regs = code_item->registers_size_; 205 uint32_t dex_pc = GetDexPc(); 206 StackHandleScope<3> hs(self_); // Dex cache, class loader and method. 207 mirror::Class* declaring_class = m->GetDeclaringClass(); 208 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(declaring_class->GetDexCache())); 209 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(declaring_class->GetClassLoader())); 210 Handle<mirror::ArtMethod> h_method(hs.NewHandle(m)); 211 verifier::MethodVerifier verifier(h_dex_cache->GetDexFile(), &h_dex_cache, &h_class_loader, 212 &m->GetClassDef(), code_item, m->GetDexMethodIndex(), m, 213 m->GetAccessFlags(), true, true, true); 214 bool verifier_success = verifier.Verify(); 215 CHECK(verifier_success) << PrettyMethod(h_method.Get()); 216 ShadowFrame* new_frame = ShadowFrame::Create(num_regs, nullptr, h_method.Get(), dex_pc); 217 self_->SetShadowFrameUnderConstruction(new_frame); 218 const std::vector<int32_t> kinds(verifier.DescribeVRegs(dex_pc)); 219 for (uint16_t reg = 0; reg < num_regs; ++reg) { 220 VRegKind kind = GetVRegKind(reg, kinds); 221 switch (kind) { 222 case kUndefined: 223 new_frame->SetVReg(reg, 0xEBADDE09); 224 break; 225 case kConstant: 226 new_frame->SetVReg(reg, kinds.at((reg * 2) + 1)); 227 break; 228 case kReferenceVReg: 229 new_frame->SetVRegReference(reg, 230 reinterpret_cast<mirror::Object*>(GetVReg(h_method.Get(), 231 reg, kind))); 232 break; 233 case kLongLoVReg: 234 if (GetVRegKind(reg + 1, kinds) == kLongHiVReg) { 235 // Treat it as a "long" register pair. 236 new_frame->SetVRegLong(reg, GetVRegPair(h_method.Get(), reg, kLongLoVReg, kLongHiVReg)); 237 } else { 238 new_frame->SetVReg(reg, GetVReg(h_method.Get(), reg, kind)); 239 } 240 break; 241 case kLongHiVReg: 242 if (GetVRegKind(reg - 1, kinds) == kLongLoVReg) { 243 // Nothing to do: we treated it as a "long" register pair. 244 } else { 245 new_frame->SetVReg(reg, GetVReg(h_method.Get(), reg, kind)); 246 } 247 break; 248 case kDoubleLoVReg: 249 if (GetVRegKind(reg + 1, kinds) == kDoubleHiVReg) { 250 // Treat it as a "double" register pair. 251 new_frame->SetVRegLong(reg, GetVRegPair(h_method.Get(), reg, kDoubleLoVReg, kDoubleHiVReg)); 252 } else { 253 new_frame->SetVReg(reg, GetVReg(h_method.Get(), reg, kind)); 254 } 255 break; 256 case kDoubleHiVReg: 257 if (GetVRegKind(reg - 1, kinds) == kDoubleLoVReg) { 258 // Nothing to do: we treated it as a "double" register pair. 259 } else { 260 new_frame->SetVReg(reg, GetVReg(h_method.Get(), reg, kind)); 261 } 262 break; 263 default: 264 new_frame->SetVReg(reg, GetVReg(h_method.Get(), reg, kind)); 265 break; 266 } 267 } 268 if (prev_shadow_frame_ != nullptr) { 269 prev_shadow_frame_->SetLink(new_frame); 270 } else { 271 self_->SetDeoptimizationShadowFrame(new_frame); 272 } 273 self_->ClearShadowFrameUnderConstruction(); 274 prev_shadow_frame_ = new_frame; 275 return true; 276 } 277 278 Thread* const self_; 279 QuickExceptionHandler* const exception_handler_; 280 ShadowFrame* prev_shadow_frame_; 281 282 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor); 283 }; 284 285 void QuickExceptionHandler::DeoptimizeStack() { 286 DCHECK(is_deoptimization_); 287 if (kDebugExceptionDelivery) { 288 self_->DumpStack(LOG(INFO) << "Deoptimizing: "); 289 } 290 291 DeoptimizeStackVisitor visitor(self_, context_, this); 292 visitor.WalkStack(true); 293 294 // Restore deoptimization exception 295 self_->SetException(ThrowLocation(), Thread::GetDeoptimizationException()); 296 } 297 298 // Unwinds all instrumentation stack frame prior to catch handler or upcall. 299 class InstrumentationStackVisitor : public StackVisitor { 300 public: 301 InstrumentationStackVisitor(Thread* self, bool is_deoptimization, size_t frame_depth) 302 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 303 : StackVisitor(self, nullptr), 304 self_(self), frame_depth_(frame_depth), 305 instrumentation_frames_to_pop_(0) { 306 CHECK_NE(frame_depth_, kInvalidFrameDepth); 307 } 308 309 bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 310 size_t current_frame_depth = GetFrameDepth(); 311 if (current_frame_depth < frame_depth_) { 312 CHECK(GetMethod() != nullptr); 313 if (UNLIKELY(GetQuickInstrumentationExitPc() == GetReturnPc())) { 314 ++instrumentation_frames_to_pop_; 315 } 316 return true; 317 } else { 318 // We reached the frame of the catch handler or the upcall. 319 return false; 320 } 321 } 322 323 size_t GetInstrumentationFramesToPop() const { 324 return instrumentation_frames_to_pop_; 325 } 326 327 private: 328 Thread* const self_; 329 const size_t frame_depth_; 330 size_t instrumentation_frames_to_pop_; 331 332 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor); 333 }; 334 335 void QuickExceptionHandler::UpdateInstrumentationStack() { 336 if (method_tracing_active_) { 337 InstrumentationStackVisitor visitor(self_, is_deoptimization_, handler_frame_depth_); 338 visitor.WalkStack(true); 339 340 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop(); 341 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 342 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) { 343 instrumentation->PopMethodForUnwind(self_, is_deoptimization_); 344 } 345 } 346 } 347 348 void QuickExceptionHandler::DoLongJump() { 349 // Place context back on thread so it will be available when we continue. 350 self_->ReleaseLongJumpContext(context_); 351 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_)); 352 CHECK_NE(handler_quick_frame_pc_, 0u); 353 context_->SetPC(handler_quick_frame_pc_); 354 context_->SmashCallerSaves(); 355 context_->DoLongJump(); 356 } 357 358 } // namespace art 359