1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "interpreter_common.h" 18 19 #include <cmath> 20 21 #include "base/enums.h" 22 #include "debugger.h" 23 #include "dex/dex_file_types.h" 24 #include "entrypoints/runtime_asm_entrypoints.h" 25 #include "intrinsics_enum.h" 26 #include "jit/jit.h" 27 #include "jvalue.h" 28 #include "method_handles-inl.h" 29 #include "method_handles.h" 30 #include "mirror/array-inl.h" 31 #include "mirror/class.h" 32 #include "mirror/emulated_stack_frame.h" 33 #include "mirror/method_handle_impl-inl.h" 34 #include "mirror/var_handle.h" 35 #include "reflection-inl.h" 36 #include "reflection.h" 37 #include "stack.h" 38 #include "thread-inl.h" 39 #include "transaction.h" 40 #include "well_known_classes.h" 41 42 namespace art { 43 namespace interpreter { 44 45 void ThrowNullPointerExceptionFromInterpreter() { 46 ThrowNullPointerExceptionFromDexPC(); 47 } 48 49 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check, 50 bool transaction_active> 51 bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, 52 uint16_t inst_data) { 53 const bool is_static = (find_type == StaticObjectRead) || (find_type == StaticPrimitiveRead); 54 const uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c(); 55 ArtField* f = 56 FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self, 57 Primitive::ComponentSize(field_type)); 58 if (UNLIKELY(f == nullptr)) { 59 CHECK(self->IsExceptionPending()); 60 return false; 61 } 62 ObjPtr<mirror::Object> obj; 63 if (is_static) { 64 obj = f->GetDeclaringClass(); 65 if (transaction_active) { 66 if (Runtime::Current()->GetTransaction()->ReadConstraint(obj.Ptr(), f)) { 67 Runtime::Current()->AbortTransactionAndThrowAbortError(self, "Can't read static fields of " 68 + obj->PrettyTypeOf() + " since it does not belong to clinit's class."); 69 return false; 70 } 71 } 72 } else { 73 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 74 if (UNLIKELY(obj == nullptr)) { 75 ThrowNullPointerExceptionForFieldAccess(f, true); 76 return false; 77 } 78 } 79 80 JValue result; 81 if (UNLIKELY(!DoFieldGetCommon<field_type>(self, shadow_frame, obj, f, &result))) { 82 // Instrumentation threw an error! 83 CHECK(self->IsExceptionPending()); 84 return false; 85 } 86 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data); 87 switch (field_type) { 88 case Primitive::kPrimBoolean: 89 shadow_frame.SetVReg(vregA, result.GetZ()); 90 break; 91 case Primitive::kPrimByte: 92 shadow_frame.SetVReg(vregA, result.GetB()); 93 break; 94 case Primitive::kPrimChar: 95 shadow_frame.SetVReg(vregA, result.GetC()); 96 break; 97 case Primitive::kPrimShort: 98 shadow_frame.SetVReg(vregA, result.GetS()); 99 break; 100 case Primitive::kPrimInt: 101 shadow_frame.SetVReg(vregA, result.GetI()); 102 break; 103 case Primitive::kPrimLong: 104 shadow_frame.SetVRegLong(vregA, result.GetJ()); 105 break; 106 case Primitive::kPrimNot: 107 shadow_frame.SetVRegReference(vregA, result.GetL()); 108 break; 109 default: 110 LOG(FATAL) << "Unreachable: " << field_type; 111 UNREACHABLE(); 112 } 113 return true; 114 } 115 116 // Explicitly instantiate all DoFieldGet functions. 117 #define EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, _do_check, _transaction_active) \ 118 template bool DoFieldGet<_find_type, _field_type, _do_check, _transaction_active>(Thread* self, \ 119 ShadowFrame& shadow_frame, \ 120 const Instruction* inst, \ 121 uint16_t inst_data) 122 123 #define EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(_find_type, _field_type) \ 124 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, false, true); \ 125 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, false, false); \ 126 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, true, true); \ 127 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, true, false); 128 129 // iget-XXX 130 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimBoolean) 131 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimByte) 132 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimChar) 133 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimShort) 134 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimInt) 135 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimLong) 136 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstanceObjectRead, Primitive::kPrimNot) 137 138 // sget-XXX 139 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimBoolean) 140 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimByte) 141 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimChar) 142 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimShort) 143 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimInt) 144 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimLong) 145 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticObjectRead, Primitive::kPrimNot) 146 147 #undef EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL 148 #undef EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL 149 150 // Handles iget-quick, iget-wide-quick and iget-object-quick instructions. 151 // Returns true on success, otherwise throws an exception and returns false. 152 template<Primitive::Type field_type> 153 bool DoIGetQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) { 154 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 155 if (UNLIKELY(obj == nullptr)) { 156 // We lost the reference to the field index so we cannot get a more 157 // precised exception message. 158 ThrowNullPointerExceptionFromDexPC(); 159 return false; 160 } 161 MemberOffset field_offset(inst->VRegC_22c()); 162 // Report this field access to instrumentation if needed. Since we only have the offset of 163 // the field from the base of the object, we need to look for it first. 164 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 165 if (UNLIKELY(instrumentation->HasFieldReadListeners())) { 166 ArtField* f = ArtField::FindInstanceFieldWithOffset(obj->GetClass(), 167 field_offset.Uint32Value()); 168 DCHECK(f != nullptr); 169 DCHECK(!f->IsStatic()); 170 Thread* self = Thread::Current(); 171 StackHandleScope<1> hs(self); 172 // Save obj in case the instrumentation event has thread suspension. 173 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&obj); 174 instrumentation->FieldReadEvent(self, 175 obj.Ptr(), 176 shadow_frame.GetMethod(), 177 shadow_frame.GetDexPC(), 178 f); 179 if (UNLIKELY(self->IsExceptionPending())) { 180 return false; 181 } 182 } 183 // Note: iget-x-quick instructions are only for non-volatile fields. 184 const uint32_t vregA = inst->VRegA_22c(inst_data); 185 switch (field_type) { 186 case Primitive::kPrimInt: 187 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetField32(field_offset))); 188 break; 189 case Primitive::kPrimBoolean: 190 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldBoolean(field_offset))); 191 break; 192 case Primitive::kPrimByte: 193 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldByte(field_offset))); 194 break; 195 case Primitive::kPrimChar: 196 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldChar(field_offset))); 197 break; 198 case Primitive::kPrimShort: 199 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldShort(field_offset))); 200 break; 201 case Primitive::kPrimLong: 202 shadow_frame.SetVRegLong(vregA, static_cast<int64_t>(obj->GetField64(field_offset))); 203 break; 204 case Primitive::kPrimNot: 205 shadow_frame.SetVRegReference(vregA, obj->GetFieldObject<mirror::Object>(field_offset)); 206 break; 207 default: 208 LOG(FATAL) << "Unreachable: " << field_type; 209 UNREACHABLE(); 210 } 211 return true; 212 } 213 214 // Explicitly instantiate all DoIGetQuick functions. 215 #define EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(_field_type) \ 216 template bool DoIGetQuick<_field_type>(ShadowFrame& shadow_frame, const Instruction* inst, \ 217 uint16_t inst_data) 218 219 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimInt); // iget-quick. 220 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimBoolean); // iget-boolean-quick. 221 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimByte); // iget-byte-quick. 222 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimChar); // iget-char-quick. 223 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimShort); // iget-short-quick. 224 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimLong); // iget-wide-quick. 225 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimNot); // iget-object-quick. 226 #undef EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL 227 228 template<Primitive::Type field_type> 229 static JValue GetFieldValue(const ShadowFrame& shadow_frame, uint32_t vreg) 230 REQUIRES_SHARED(Locks::mutator_lock_) { 231 JValue field_value; 232 switch (field_type) { 233 case Primitive::kPrimBoolean: 234 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg))); 235 break; 236 case Primitive::kPrimByte: 237 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg))); 238 break; 239 case Primitive::kPrimChar: 240 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg))); 241 break; 242 case Primitive::kPrimShort: 243 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg))); 244 break; 245 case Primitive::kPrimInt: 246 field_value.SetI(shadow_frame.GetVReg(vreg)); 247 break; 248 case Primitive::kPrimLong: 249 field_value.SetJ(shadow_frame.GetVRegLong(vreg)); 250 break; 251 case Primitive::kPrimNot: 252 field_value.SetL(shadow_frame.GetVRegReference(vreg)); 253 break; 254 default: 255 LOG(FATAL) << "Unreachable: " << field_type; 256 UNREACHABLE(); 257 } 258 return field_value; 259 } 260 261 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check, 262 bool transaction_active> 263 bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame, const Instruction* inst, 264 uint16_t inst_data) { 265 const bool do_assignability_check = do_access_check; 266 bool is_static = (find_type == StaticObjectWrite) || (find_type == StaticPrimitiveWrite); 267 uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c(); 268 ArtField* f = 269 FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self, 270 Primitive::ComponentSize(field_type)); 271 if (UNLIKELY(f == nullptr)) { 272 CHECK(self->IsExceptionPending()); 273 return false; 274 } 275 ObjPtr<mirror::Object> obj; 276 if (is_static) { 277 obj = f->GetDeclaringClass(); 278 if (transaction_active) { 279 if (Runtime::Current()->GetTransaction()->WriteConstraint(obj.Ptr(), f)) { 280 Runtime::Current()->AbortTransactionAndThrowAbortError( 281 self, "Can't set fields of " + obj->PrettyTypeOf()); 282 return false; 283 } 284 } 285 286 } else { 287 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 288 if (UNLIKELY(obj == nullptr)) { 289 ThrowNullPointerExceptionForFieldAccess(f, false); 290 return false; 291 } 292 } 293 294 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data); 295 JValue value = GetFieldValue<field_type>(shadow_frame, vregA); 296 return DoFieldPutCommon<field_type, do_assignability_check, transaction_active>(self, 297 shadow_frame, 298 obj, 299 f, 300 value); 301 } 302 303 // Explicitly instantiate all DoFieldPut functions. 304 #define EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, _do_check, _transaction_active) \ 305 template bool DoFieldPut<_find_type, _field_type, _do_check, _transaction_active>(Thread* self, \ 306 const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) 307 308 #define EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(_find_type, _field_type) \ 309 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, false, false); \ 310 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, true, false); \ 311 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, false, true); \ 312 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, true, true); 313 314 // iput-XXX 315 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimBoolean) 316 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimByte) 317 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimChar) 318 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimShort) 319 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimInt) 320 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimLong) 321 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstanceObjectWrite, Primitive::kPrimNot) 322 323 // sput-XXX 324 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimBoolean) 325 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimByte) 326 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimChar) 327 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimShort) 328 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimInt) 329 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimLong) 330 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticObjectWrite, Primitive::kPrimNot) 331 332 #undef EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL 333 #undef EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL 334 335 template<Primitive::Type field_type, bool transaction_active> 336 bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) { 337 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 338 if (UNLIKELY(obj == nullptr)) { 339 // We lost the reference to the field index so we cannot get a more 340 // precised exception message. 341 ThrowNullPointerExceptionFromDexPC(); 342 return false; 343 } 344 MemberOffset field_offset(inst->VRegC_22c()); 345 const uint32_t vregA = inst->VRegA_22c(inst_data); 346 // Report this field modification to instrumentation if needed. Since we only have the offset of 347 // the field from the base of the object, we need to look for it first. 348 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 349 if (UNLIKELY(instrumentation->HasFieldWriteListeners())) { 350 ArtField* f = ArtField::FindInstanceFieldWithOffset(obj->GetClass(), 351 field_offset.Uint32Value()); 352 DCHECK(f != nullptr); 353 DCHECK(!f->IsStatic()); 354 JValue field_value = GetFieldValue<field_type>(shadow_frame, vregA); 355 Thread* self = Thread::Current(); 356 StackHandleScope<2> hs(self); 357 // Save obj in case the instrumentation event has thread suspension. 358 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&obj); 359 mirror::Object* fake_root = nullptr; 360 HandleWrapper<mirror::Object> ret(hs.NewHandleWrapper<mirror::Object>( 361 field_type == Primitive::kPrimNot ? field_value.GetGCRoot() : &fake_root)); 362 instrumentation->FieldWriteEvent(self, 363 obj.Ptr(), 364 shadow_frame.GetMethod(), 365 shadow_frame.GetDexPC(), 366 f, 367 field_value); 368 if (UNLIKELY(self->IsExceptionPending())) { 369 return false; 370 } 371 } 372 // Note: iput-x-quick instructions are only for non-volatile fields. 373 switch (field_type) { 374 case Primitive::kPrimBoolean: 375 obj->SetFieldBoolean<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 376 break; 377 case Primitive::kPrimByte: 378 obj->SetFieldByte<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 379 break; 380 case Primitive::kPrimChar: 381 obj->SetFieldChar<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 382 break; 383 case Primitive::kPrimShort: 384 obj->SetFieldShort<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 385 break; 386 case Primitive::kPrimInt: 387 obj->SetField32<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 388 break; 389 case Primitive::kPrimLong: 390 obj->SetField64<transaction_active>(field_offset, shadow_frame.GetVRegLong(vregA)); 391 break; 392 case Primitive::kPrimNot: 393 obj->SetFieldObject<transaction_active>(field_offset, shadow_frame.GetVRegReference(vregA)); 394 break; 395 default: 396 LOG(FATAL) << "Unreachable: " << field_type; 397 UNREACHABLE(); 398 } 399 return true; 400 } 401 402 // Explicitly instantiate all DoIPutQuick functions. 403 #define EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, _transaction_active) \ 404 template bool DoIPutQuick<_field_type, _transaction_active>(const ShadowFrame& shadow_frame, \ 405 const Instruction* inst, \ 406 uint16_t inst_data) 407 408 #define EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(_field_type) \ 409 EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, false); \ 410 EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, true); 411 412 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimInt) // iput-quick. 413 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimBoolean) // iput-boolean-quick. 414 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimByte) // iput-byte-quick. 415 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimChar) // iput-char-quick. 416 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimShort) // iput-short-quick. 417 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimLong) // iput-wide-quick. 418 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimNot) // iput-object-quick. 419 #undef EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL 420 #undef EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL 421 422 // We execute any instrumentation events that are triggered by this exception and change the 423 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method. 424 // Return true if we should continue executing in the current method and false if we need to go up 425 // the stack to find an exception handler. 426 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation. 427 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that 428 // behavior. 429 bool MoveToExceptionHandler(Thread* self, 430 ShadowFrame& shadow_frame, 431 const instrumentation::Instrumentation* instrumentation) { 432 self->VerifyStack(); 433 StackHandleScope<2> hs(self); 434 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException())); 435 if (instrumentation != nullptr && 436 instrumentation->HasExceptionThrownListeners() && 437 self->IsExceptionThrownByCurrentMethod(exception.Get())) { 438 // See b/65049545 for why we don't need to check to see if the exception has changed. 439 instrumentation->ExceptionThrownEvent(self, exception.Get()); 440 } 441 bool clear_exception = false; 442 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock( 443 hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception); 444 if (found_dex_pc == dex::kDexNoIndex) { 445 if (instrumentation != nullptr) { 446 if (shadow_frame.NeedsNotifyPop()) { 447 instrumentation->WatchedFramePopped(self, shadow_frame); 448 } 449 // Exception is not caught by the current method. We will unwind to the 450 // caller. Notify any instrumentation listener. 451 instrumentation->MethodUnwindEvent(self, 452 shadow_frame.GetThisObject(), 453 shadow_frame.GetMethod(), 454 shadow_frame.GetDexPC()); 455 } 456 return false; 457 } else { 458 shadow_frame.SetDexPC(found_dex_pc); 459 if (instrumentation != nullptr && instrumentation->HasExceptionHandledListeners()) { 460 self->ClearException(); 461 instrumentation->ExceptionHandledEvent(self, exception.Get()); 462 if (UNLIKELY(self->IsExceptionPending())) { 463 // Exception handled event threw an exception. Try to find the handler for this one. 464 return MoveToExceptionHandler(self, shadow_frame, instrumentation); 465 } else if (!clear_exception) { 466 self->SetException(exception.Get()); 467 } 468 } else if (clear_exception) { 469 self->ClearException(); 470 } 471 return true; 472 } 473 } 474 475 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) { 476 LOG(FATAL) << "Unexpected instruction: " 477 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile()); 478 UNREACHABLE(); 479 } 480 481 void AbortTransactionF(Thread* self, const char* fmt, ...) { 482 va_list args; 483 va_start(args, fmt); 484 AbortTransactionV(self, fmt, args); 485 va_end(args); 486 } 487 488 void AbortTransactionV(Thread* self, const char* fmt, va_list args) { 489 CHECK(Runtime::Current()->IsActiveTransaction()); 490 // Constructs abort message. 491 std::string abort_msg; 492 android::base::StringAppendV(&abort_msg, fmt, args); 493 // Throws an exception so we can abort the transaction and rollback every change. 494 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg); 495 } 496 497 // START DECLARATIONS : 498 // 499 // These additional declarations are required because clang complains 500 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions. 501 // 502 503 template <bool is_range, bool do_assignability_check> 504 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method, 505 Thread* self, 506 ShadowFrame& shadow_frame, 507 JValue* result, 508 uint16_t number_of_inputs, 509 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 510 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_); 511 512 template <bool is_range> 513 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame, 514 ShadowFrame* callee_frame, 515 const uint32_t (&arg)[Instruction::kMaxVarArgRegs], 516 const size_t first_src_reg, 517 const size_t first_dest_reg, 518 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_); 519 520 // END DECLARATIONS. 521 522 void ArtInterpreterToCompiledCodeBridge(Thread* self, 523 ArtMethod* caller, 524 ShadowFrame* shadow_frame, 525 uint16_t arg_offset, 526 JValue* result) 527 REQUIRES_SHARED(Locks::mutator_lock_) { 528 ArtMethod* method = shadow_frame->GetMethod(); 529 // Ensure static methods are initialized. 530 if (method->IsStatic()) { 531 ObjPtr<mirror::Class> declaringClass = method->GetDeclaringClass(); 532 if (UNLIKELY(!declaringClass->IsInitialized())) { 533 self->PushShadowFrame(shadow_frame); 534 StackHandleScope<1> hs(self); 535 Handle<mirror::Class> h_class(hs.NewHandle(declaringClass)); 536 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, 537 true))) { 538 self->PopShadowFrame(); 539 DCHECK(self->IsExceptionPending()); 540 return; 541 } 542 self->PopShadowFrame(); 543 CHECK(h_class->IsInitializing()); 544 // Reload from shadow frame in case the method moved, this is faster than adding a handle. 545 method = shadow_frame->GetMethod(); 546 } 547 } 548 // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise, 549 // check that the arg_offset isn't greater than the number of registers. A stronger check is 550 // difficult since the frame may contain space for all the registers in the method, or only enough 551 // space for the arguments. 552 if (kIsDebugBuild) { 553 if (method->GetCodeItem() == nullptr) { 554 DCHECK_EQ(0u, arg_offset) << method->PrettyMethod(); 555 } else { 556 DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs()); 557 } 558 } 559 jit::Jit* jit = Runtime::Current()->GetJit(); 560 if (jit != nullptr && caller != nullptr) { 561 jit->NotifyInterpreterToCompiledCodeTransition(self, caller); 562 } 563 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset), 564 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t), 565 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()); 566 } 567 568 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame, 569 uint16_t this_obj_vreg, 570 JValue result) 571 REQUIRES_SHARED(Locks::mutator_lock_) { 572 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg); 573 if (existing == nullptr) { 574 // If it's null, we come from compiled code that was deoptimized. Nothing to do, 575 // as the compiler verified there was no alias. 576 // Set the new string result of the StringFactory. 577 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL()); 578 return; 579 } 580 // Set the string init result into all aliases. 581 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) { 582 if (shadow_frame->GetVRegReference(i) == existing) { 583 DCHECK_EQ(shadow_frame->GetVRegReference(i), 584 reinterpret_cast<mirror::Object*>(shadow_frame->GetVReg(i))); 585 shadow_frame->SetVRegReference(i, result.GetL()); 586 DCHECK_EQ(shadow_frame->GetVRegReference(i), 587 reinterpret_cast<mirror::Object*>(shadow_frame->GetVReg(i))); 588 } 589 } 590 } 591 592 template<bool is_range> 593 static bool DoMethodHandleInvokeCommon(Thread* self, 594 ShadowFrame& shadow_frame, 595 bool invoke_exact, 596 const Instruction* inst, 597 uint16_t inst_data, 598 JValue* result) 599 REQUIRES_SHARED(Locks::mutator_lock_) { 600 // Make sure to check for async exceptions 601 if (UNLIKELY(self->ObserveAsyncException())) { 602 return false; 603 } 604 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static. 605 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc(); 606 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc(); 607 608 // Initialize |result| to 0 as this is the default return value for 609 // polymorphic invocations of method handle types with void return 610 // and provides sane return result in error cases. 611 result->SetJ(0); 612 613 // The invoke_method_idx here is the name of the signature polymorphic method that 614 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact) 615 // and not the method that we'll dispatch to in the end. 616 StackHandleScope<2> hs(self); 617 Handle<mirror::MethodHandle> method_handle(hs.NewHandle( 618 ObjPtr<mirror::MethodHandle>::DownCast( 619 MakeObjPtr(shadow_frame.GetVRegReference(vRegC))))); 620 if (UNLIKELY(method_handle == nullptr)) { 621 // Note that the invoke type is kVirtual here because a call to a signature 622 // polymorphic method is shaped like a virtual call at the bytecode level. 623 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual); 624 return false; 625 } 626 627 // The vRegH value gives the index of the proto_id associated with this 628 // signature polymorphic call site. 629 const uint32_t callsite_proto_id = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc(); 630 631 // Call through to the classlinker and ask it to resolve the static type associated 632 // with the callsite. This information is stored in the dex cache so it's 633 // guaranteed to be fast after the first resolution. 634 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 635 Handle<mirror::MethodType> callsite_type(hs.NewHandle( 636 class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod()))); 637 638 // This implies we couldn't resolve one or more types in this method handle. 639 if (UNLIKELY(callsite_type == nullptr)) { 640 CHECK(self->IsExceptionPending()); 641 return false; 642 } 643 644 // There is a common dispatch method for method handles that takes 645 // arguments either from a range or an array of arguments depending 646 // on whether the DEX instruction is invoke-polymorphic/range or 647 // invoke-polymorphic. The array here is for the latter. 648 if (UNLIKELY(is_range)) { 649 // VRegC is the register holding the method handle. Arguments passed 650 // to the method handle's target do not include the method handle. 651 RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1); 652 if (invoke_exact) { 653 return MethodHandleInvokeExact(self, 654 shadow_frame, 655 method_handle, 656 callsite_type, 657 &operands, 658 result); 659 } else { 660 return MethodHandleInvoke(self, 661 shadow_frame, 662 method_handle, 663 callsite_type, 664 &operands, 665 result); 666 } 667 } else { 668 // Get the register arguments for the invoke. 669 uint32_t args[Instruction::kMaxVarArgRegs] = {}; 670 inst->GetVarArgs(args, inst_data); 671 // Drop the first register which is the method handle performing the invoke. 672 memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1)); 673 args[Instruction::kMaxVarArgRegs - 1] = 0; 674 VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1); 675 if (invoke_exact) { 676 return MethodHandleInvokeExact(self, 677 shadow_frame, 678 method_handle, 679 callsite_type, 680 &operands, 681 result); 682 } else { 683 return MethodHandleInvoke(self, 684 shadow_frame, 685 method_handle, 686 callsite_type, 687 &operands, 688 result); 689 } 690 } 691 } 692 693 bool DoMethodHandleInvokeExact(Thread* self, 694 ShadowFrame& shadow_frame, 695 const Instruction* inst, 696 uint16_t inst_data, 697 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { 698 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) { 699 static const bool kIsRange = false; 700 return DoMethodHandleInvokeCommon<kIsRange>( 701 self, shadow_frame, true /* is_exact */, inst, inst_data, result); 702 } else { 703 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE); 704 static const bool kIsRange = true; 705 return DoMethodHandleInvokeCommon<kIsRange>( 706 self, shadow_frame, true /* is_exact */, inst, inst_data, result); 707 } 708 } 709 710 bool DoMethodHandleInvoke(Thread* self, 711 ShadowFrame& shadow_frame, 712 const Instruction* inst, 713 uint16_t inst_data, 714 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { 715 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) { 716 static const bool kIsRange = false; 717 return DoMethodHandleInvokeCommon<kIsRange>( 718 self, shadow_frame, false /* is_exact */, inst, inst_data, result); 719 } else { 720 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE); 721 static const bool kIsRange = true; 722 return DoMethodHandleInvokeCommon<kIsRange>( 723 self, shadow_frame, false /* is_exact */, inst, inst_data, result); 724 } 725 } 726 727 static bool DoVarHandleInvokeChecked(Thread* self, 728 Handle<mirror::VarHandle> var_handle, 729 Handle<mirror::MethodType> callsite_type, 730 mirror::VarHandle::AccessMode access_mode, 731 ShadowFrame& shadow_frame, 732 InstructionOperands* operands, 733 JValue* result) 734 REQUIRES_SHARED(Locks::mutator_lock_) { 735 // TODO(oth): GetMethodTypeForAccessMode() allocates a MethodType() 736 // which is only required if we need to convert argument and/or 737 // return types. 738 StackHandleScope<1> hs(self); 739 Handle<mirror::MethodType> accessor_type(hs.NewHandle( 740 var_handle->GetMethodTypeForAccessMode(self, access_mode))); 741 const size_t num_vregs = accessor_type->NumberOfVRegs(); 742 const int num_params = accessor_type->GetPTypes()->GetLength(); 743 ShadowFrameAllocaUniquePtr accessor_frame = 744 CREATE_SHADOW_FRAME(num_vregs, nullptr, shadow_frame.GetMethod(), shadow_frame.GetDexPC()); 745 ShadowFrameGetter getter(shadow_frame, operands); 746 static const uint32_t kFirstDestinationReg = 0; 747 ShadowFrameSetter setter(accessor_frame.get(), kFirstDestinationReg); 748 if (!PerformConversions(self, callsite_type, accessor_type, &getter, &setter, num_params)) { 749 return false; 750 } 751 RangeInstructionOperands accessor_operands(kFirstDestinationReg, 752 kFirstDestinationReg + num_vregs); 753 if (!var_handle->Access(access_mode, accessor_frame.get(), &accessor_operands, result)) { 754 return false; 755 } 756 return ConvertReturnValue(callsite_type, accessor_type, result); 757 } 758 759 static bool DoVarHandleInvokeCommon(Thread* self, 760 ShadowFrame& shadow_frame, 761 const Instruction* inst, 762 uint16_t inst_data, 763 JValue* result, 764 mirror::VarHandle::AccessMode access_mode) 765 REQUIRES_SHARED(Locks::mutator_lock_) { 766 // Make sure to check for async exceptions 767 if (UNLIKELY(self->ObserveAsyncException())) { 768 return false; 769 } 770 771 bool is_var_args = inst->HasVarArgs(); 772 const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc(); 773 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(vRegC)); 774 if (receiver.IsNull()) { 775 ThrowNullPointerExceptionFromDexPC(); 776 return false; 777 } 778 779 StackHandleScope<2> hs(self); 780 Handle<mirror::VarHandle> var_handle(hs.NewHandle(down_cast<mirror::VarHandle*>(receiver.Ptr()))); 781 if (!var_handle->IsAccessModeSupported(access_mode)) { 782 ThrowUnsupportedOperationException(); 783 return false; 784 } 785 786 const uint32_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc(); 787 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); 788 Handle<mirror::MethodType> callsite_type(hs.NewHandle( 789 class_linker->ResolveMethodType(self, vRegH, shadow_frame.GetMethod()))); 790 // This implies we couldn't resolve one or more types in this VarHandle. 791 if (UNLIKELY(callsite_type == nullptr)) { 792 CHECK(self->IsExceptionPending()); 793 return false; 794 } 795 796 if (!var_handle->IsMethodTypeCompatible(access_mode, callsite_type.Get())) { 797 ThrowWrongMethodTypeException(var_handle->GetMethodTypeForAccessMode(self, access_mode), 798 callsite_type.Get()); 799 return false; 800 } 801 802 if (is_var_args) { 803 uint32_t args[Instruction::kMaxVarArgRegs]; 804 inst->GetVarArgs(args, inst_data); 805 VarArgsInstructionOperands all_operands(args, inst->VRegA_45cc()); 806 NoReceiverInstructionOperands operands(&all_operands); 807 return DoVarHandleInvokeChecked(self, 808 var_handle, 809 callsite_type, 810 access_mode, 811 shadow_frame, 812 &operands, 813 result); 814 } else { 815 RangeInstructionOperands all_operands(inst->VRegC_4rcc(), inst->VRegA_4rcc()); 816 NoReceiverInstructionOperands operands(&all_operands); 817 return DoVarHandleInvokeChecked(self, 818 var_handle, 819 callsite_type, 820 access_mode, 821 shadow_frame, 822 &operands, 823 result); 824 } 825 } 826 827 #define DO_VAR_HANDLE_ACCESSOR(_access_mode) \ 828 bool DoVarHandle ## _access_mode(Thread* self, \ 829 ShadowFrame& shadow_frame, \ 830 const Instruction* inst, \ 831 uint16_t inst_data, \ 832 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { \ 833 const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode; \ 834 return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \ 835 } 836 837 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange) 838 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire) 839 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease) 840 DO_VAR_HANDLE_ACCESSOR(CompareAndSet) 841 DO_VAR_HANDLE_ACCESSOR(Get) 842 DO_VAR_HANDLE_ACCESSOR(GetAcquire) 843 DO_VAR_HANDLE_ACCESSOR(GetAndAdd) 844 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire) 845 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease) 846 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd) 847 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire) 848 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease) 849 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr) 850 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire) 851 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease) 852 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor) 853 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire) 854 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease) 855 DO_VAR_HANDLE_ACCESSOR(GetAndSet) 856 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire) 857 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease) 858 DO_VAR_HANDLE_ACCESSOR(GetOpaque) 859 DO_VAR_HANDLE_ACCESSOR(GetVolatile) 860 DO_VAR_HANDLE_ACCESSOR(Set) 861 DO_VAR_HANDLE_ACCESSOR(SetOpaque) 862 DO_VAR_HANDLE_ACCESSOR(SetRelease) 863 DO_VAR_HANDLE_ACCESSOR(SetVolatile) 864 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet) 865 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire) 866 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain) 867 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease) 868 869 #undef DO_VAR_HANDLE_ACCESSOR 870 871 template<bool is_range> 872 bool DoInvokePolymorphic(Thread* self, 873 ShadowFrame& shadow_frame, 874 const Instruction* inst, 875 uint16_t inst_data, 876 JValue* result) { 877 const int invoke_method_idx = inst->VRegB(); 878 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 879 ArtMethod* invoke_method = 880 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>( 881 self, invoke_method_idx, shadow_frame.GetMethod(), kVirtual); 882 883 // Ensure intrinsic identifiers are initialized. 884 DCHECK(invoke_method->IsIntrinsic()); 885 886 // Dispatch based on intrinsic identifier associated with method. 887 switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) { 888 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \ 889 case Intrinsics::k##Name: \ 890 return Do ## Name(self, shadow_frame, inst, inst_data, result); 891 #include "intrinsics_list.h" 892 SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC) 893 #undef INTRINSICS_LIST 894 #undef SIGNATURE_POLYMORPHIC_INTRINSICS_LIST 895 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC 896 default: 897 LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic(); 898 UNREACHABLE(); 899 return false; 900 } 901 } 902 903 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self, 904 ShadowFrame& shadow_frame, 905 uint32_t call_site_idx) 906 REQUIRES_SHARED(Locks::mutator_lock_) { 907 ArtMethod* referrer = shadow_frame.GetMethod(); 908 const DexFile* dex_file = referrer->GetDexFile(); 909 const DexFile::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx); 910 911 StackHandleScope<10> hs(self); 912 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader())); 913 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache())); 914 915 CallSiteArrayValueIterator it(*dex_file, csi); 916 uint32_t method_handle_idx = static_cast<uint32_t>(it.GetJavaValue().i); 917 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 918 Handle<mirror::MethodHandle> 919 bootstrap(hs.NewHandle(class_linker->ResolveMethodHandle(self, method_handle_idx, referrer))); 920 if (bootstrap.IsNull()) { 921 DCHECK(self->IsExceptionPending()); 922 return nullptr; 923 } 924 Handle<mirror::MethodType> bootstrap_method_type = hs.NewHandle(bootstrap->GetMethodType()); 925 it.Next(); 926 927 DCHECK_EQ(static_cast<size_t>(bootstrap->GetMethodType()->GetPTypes()->GetLength()), it.Size()); 928 const size_t num_bootstrap_vregs = bootstrap->GetMethodType()->NumberOfVRegs(); 929 930 // Set-up a shadow frame for invoking the bootstrap method handle. 931 ShadowFrameAllocaUniquePtr bootstrap_frame = 932 CREATE_SHADOW_FRAME(num_bootstrap_vregs, nullptr, referrer, shadow_frame.GetDexPC()); 933 ScopedStackedShadowFramePusher pusher( 934 self, bootstrap_frame.get(), StackedShadowFrameType::kShadowFrameUnderConstruction); 935 size_t vreg = 0; 936 937 // The first parameter is a MethodHandles lookup instance. 938 { 939 Handle<mirror::Class> lookup_class = 940 hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass()); 941 ObjPtr<mirror::MethodHandlesLookup> lookup = 942 mirror::MethodHandlesLookup::Create(self, lookup_class); 943 if (lookup.IsNull()) { 944 DCHECK(self->IsExceptionPending()); 945 return nullptr; 946 } 947 bootstrap_frame->SetVRegReference(vreg++, lookup.Ptr()); 948 } 949 950 // The second parameter is the name to lookup. 951 { 952 dex::StringIndex name_idx(static_cast<uint32_t>(it.GetJavaValue().i)); 953 ObjPtr<mirror::String> name = class_linker->ResolveString(name_idx, dex_cache); 954 if (name.IsNull()) { 955 DCHECK(self->IsExceptionPending()); 956 return nullptr; 957 } 958 bootstrap_frame->SetVRegReference(vreg++, name.Ptr()); 959 } 960 it.Next(); 961 962 // The third parameter is the method type associated with the name. 963 uint32_t method_type_idx = static_cast<uint32_t>(it.GetJavaValue().i); 964 Handle<mirror::MethodType> method_type(hs.NewHandle( 965 class_linker->ResolveMethodType(self, method_type_idx, dex_cache, class_loader))); 966 if (method_type.IsNull()) { 967 DCHECK(self->IsExceptionPending()); 968 return nullptr; 969 } 970 bootstrap_frame->SetVRegReference(vreg++, method_type.Get()); 971 it.Next(); 972 973 // Append remaining arguments (if any). 974 while (it.HasNext()) { 975 const jvalue& jvalue = it.GetJavaValue(); 976 switch (it.GetValueType()) { 977 case EncodedArrayValueIterator::ValueType::kBoolean: 978 case EncodedArrayValueIterator::ValueType::kByte: 979 case EncodedArrayValueIterator::ValueType::kChar: 980 case EncodedArrayValueIterator::ValueType::kShort: 981 case EncodedArrayValueIterator::ValueType::kInt: 982 bootstrap_frame->SetVReg(vreg, jvalue.i); 983 vreg += 1; 984 break; 985 case EncodedArrayValueIterator::ValueType::kLong: 986 bootstrap_frame->SetVRegLong(vreg, jvalue.j); 987 vreg += 2; 988 break; 989 case EncodedArrayValueIterator::ValueType::kFloat: 990 bootstrap_frame->SetVRegFloat(vreg, jvalue.f); 991 vreg += 1; 992 break; 993 case EncodedArrayValueIterator::ValueType::kDouble: 994 bootstrap_frame->SetVRegDouble(vreg, jvalue.d); 995 vreg += 2; 996 break; 997 case EncodedArrayValueIterator::ValueType::kMethodType: { 998 uint32_t idx = static_cast<uint32_t>(jvalue.i); 999 ObjPtr<mirror::MethodType> ref = 1000 class_linker->ResolveMethodType(self, idx, dex_cache, class_loader); 1001 if (ref.IsNull()) { 1002 DCHECK(self->IsExceptionPending()); 1003 return nullptr; 1004 } 1005 bootstrap_frame->SetVRegReference(vreg, ref.Ptr()); 1006 vreg += 1; 1007 break; 1008 } 1009 case EncodedArrayValueIterator::ValueType::kMethodHandle: { 1010 uint32_t idx = static_cast<uint32_t>(jvalue.i); 1011 ObjPtr<mirror::MethodHandle> ref = 1012 class_linker->ResolveMethodHandle(self, idx, referrer); 1013 if (ref.IsNull()) { 1014 DCHECK(self->IsExceptionPending()); 1015 return nullptr; 1016 } 1017 bootstrap_frame->SetVRegReference(vreg, ref.Ptr()); 1018 vreg += 1; 1019 break; 1020 } 1021 case EncodedArrayValueIterator::ValueType::kString: { 1022 dex::StringIndex idx(static_cast<uint32_t>(jvalue.i)); 1023 ObjPtr<mirror::String> ref = class_linker->ResolveString(idx, dex_cache); 1024 if (ref.IsNull()) { 1025 DCHECK(self->IsExceptionPending()); 1026 return nullptr; 1027 } 1028 bootstrap_frame->SetVRegReference(vreg, ref.Ptr()); 1029 vreg += 1; 1030 break; 1031 } 1032 case EncodedArrayValueIterator::ValueType::kType: { 1033 dex::TypeIndex idx(static_cast<uint32_t>(jvalue.i)); 1034 ObjPtr<mirror::Class> ref = class_linker->ResolveType(idx, dex_cache, class_loader); 1035 if (ref.IsNull()) { 1036 DCHECK(self->IsExceptionPending()); 1037 return nullptr; 1038 } 1039 bootstrap_frame->SetVRegReference(vreg, ref.Ptr()); 1040 vreg += 1; 1041 break; 1042 } 1043 case EncodedArrayValueIterator::ValueType::kNull: 1044 bootstrap_frame->SetVRegReference(vreg, nullptr); 1045 vreg += 1; 1046 break; 1047 case EncodedArrayValueIterator::ValueType::kField: 1048 case EncodedArrayValueIterator::ValueType::kMethod: 1049 case EncodedArrayValueIterator::ValueType::kEnum: 1050 case EncodedArrayValueIterator::ValueType::kArray: 1051 case EncodedArrayValueIterator::ValueType::kAnnotation: 1052 // Unreachable based on current EncodedArrayValueIterator::Next(). 1053 UNREACHABLE(); 1054 } 1055 1056 it.Next(); 1057 } 1058 1059 // Invoke the bootstrap method handle. 1060 JValue result; 1061 RangeInstructionOperands operands(0, vreg); 1062 bool invoke_success = MethodHandleInvokeExact(self, 1063 *bootstrap_frame, 1064 bootstrap, 1065 bootstrap_method_type, 1066 &operands, 1067 &result); 1068 if (!invoke_success) { 1069 DCHECK(self->IsExceptionPending()); 1070 return nullptr; 1071 } 1072 1073 Handle<mirror::Object> object(hs.NewHandle(result.GetL())); 1074 if (UNLIKELY(object.IsNull())) { 1075 // This will typically be for LambdaMetafactory which is not supported. 1076 ThrowClassCastException("Bootstrap method returned null"); 1077 return nullptr; 1078 } 1079 1080 // Check the result type is a subclass of CallSite. 1081 if (UNLIKELY(!object->InstanceOf(mirror::CallSite::StaticClass()))) { 1082 ThrowClassCastException(object->GetClass(), mirror::CallSite::StaticClass()); 1083 return nullptr; 1084 } 1085 1086 Handle<mirror::CallSite> call_site = 1087 hs.NewHandle(ObjPtr<mirror::CallSite>::DownCast(ObjPtr<mirror::Object>(result.GetL()))); 1088 // Check the call site target is not null as we're going to invoke it. 1089 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget()); 1090 if (UNLIKELY(target.IsNull())) { 1091 ThrowClassCastException("Bootstrap method did not return a callsite"); 1092 return nullptr; 1093 } 1094 1095 // Check the target method type matches the method type requested modulo the receiver 1096 // needs to be compatible rather than exact. 1097 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType()); 1098 if (UNLIKELY(!target_method_type->IsExactMatch(method_type.Get()) && 1099 !IsParameterTypeConvertible(target_method_type->GetPTypes()->GetWithoutChecks(0), 1100 method_type->GetPTypes()->GetWithoutChecks(0)))) { 1101 ThrowWrongMethodTypeException(target_method_type.Get(), method_type.Get()); 1102 return nullptr; 1103 } 1104 1105 return call_site.Get(); 1106 } 1107 1108 template<bool is_range> 1109 bool DoInvokeCustom(Thread* self, 1110 ShadowFrame& shadow_frame, 1111 const Instruction* inst, 1112 uint16_t inst_data, 1113 JValue* result) 1114 REQUIRES_SHARED(Locks::mutator_lock_) { 1115 // Make sure to check for async exceptions 1116 if (UNLIKELY(self->ObserveAsyncException())) { 1117 return false; 1118 } 1119 // invoke-custom is not supported in transactions. In transactions 1120 // there is a limited set of types supported. invoke-custom allows 1121 // running arbitrary code and instantiating arbitrary types. 1122 CHECK(!Runtime::Current()->IsActiveTransaction()); 1123 StackHandleScope<4> hs(self); 1124 Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache())); 1125 const uint32_t call_site_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c(); 1126 MutableHandle<mirror::CallSite> 1127 call_site(hs.NewHandle(dex_cache->GetResolvedCallSite(call_site_idx))); 1128 if (call_site.IsNull()) { 1129 call_site.Assign(InvokeBootstrapMethod(self, shadow_frame, call_site_idx)); 1130 if (UNLIKELY(call_site.IsNull())) { 1131 CHECK(self->IsExceptionPending()); 1132 ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method", 1133 call_site_idx); 1134 result->SetJ(0); 1135 return false; 1136 } 1137 mirror::CallSite* winning_call_site = 1138 dex_cache->SetResolvedCallSite(call_site_idx, call_site.Get()); 1139 call_site.Assign(winning_call_site); 1140 } 1141 1142 // CallSite.java checks the re-assignment of the call site target 1143 // when mutating call site targets. We only check the target is 1144 // non-null and has the right type during bootstrap method execution. 1145 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget()); 1146 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType()); 1147 DCHECK_EQ(static_cast<size_t>(inst->VRegA()), target_method_type->NumberOfVRegs()); 1148 if (is_range) { 1149 RangeInstructionOperands operands(inst->VRegC_3rc(), inst->VRegA_3rc()); 1150 return MethodHandleInvokeExact(self, 1151 shadow_frame, 1152 target, 1153 target_method_type, 1154 &operands, 1155 result); 1156 } else { 1157 uint32_t args[Instruction::kMaxVarArgRegs]; 1158 inst->GetVarArgs(args, inst_data); 1159 VarArgsInstructionOperands operands(args, inst->VRegA_35c()); 1160 return MethodHandleInvokeExact(self, 1161 shadow_frame, 1162 target, 1163 target_method_type, 1164 &operands, 1165 result); 1166 } 1167 } 1168 1169 template <bool is_range> 1170 inline void CopyRegisters(ShadowFrame& caller_frame, 1171 ShadowFrame* callee_frame, 1172 const uint32_t (&arg)[Instruction::kMaxVarArgRegs], 1173 const size_t first_src_reg, 1174 const size_t first_dest_reg, 1175 const size_t num_regs) { 1176 if (is_range) { 1177 const size_t dest_reg_bound = first_dest_reg + num_regs; 1178 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound; 1179 ++dest_reg, ++src_reg) { 1180 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg); 1181 } 1182 } else { 1183 DCHECK_LE(num_regs, arraysize(arg)); 1184 1185 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) { 1186 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]); 1187 } 1188 } 1189 } 1190 1191 template <bool is_range, 1192 bool do_assignability_check> 1193 static inline bool DoCallCommon(ArtMethod* called_method, 1194 Thread* self, 1195 ShadowFrame& shadow_frame, 1196 JValue* result, 1197 uint16_t number_of_inputs, 1198 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 1199 uint32_t vregC) { 1200 bool string_init = false; 1201 // Replace calls to String.<init> with equivalent StringFactory call. 1202 if (UNLIKELY(called_method->GetDeclaringClass()->IsStringClass() 1203 && called_method->IsConstructor())) { 1204 called_method = WellKnownClasses::StringInitToStringFactory(called_method); 1205 string_init = true; 1206 } 1207 1208 // Compute method information. 1209 CodeItemDataAccessor accessor(called_method->DexInstructionData()); 1210 // Number of registers for the callee's call frame. 1211 uint16_t num_regs; 1212 // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to 1213 // PerformCall. A deoptimization could occur at any time, and we shouldn't change which 1214 // entrypoint to use once we start building the shadow frame. 1215 1216 // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where we are 1217 // doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use the image 1218 // pointer size here and this may case an overflow if it is called from the compiler. b/62402160 1219 const bool use_interpreter_entrypoint = !Runtime::Current()->IsStarted() || 1220 ClassLinker::ShouldUseInterpreterEntrypoint( 1221 called_method, 1222 called_method->GetEntryPointFromQuickCompiledCode()); 1223 if (LIKELY(accessor.HasCodeItem())) { 1224 // When transitioning to compiled code, space only needs to be reserved for the input registers. 1225 // The rest of the frame gets discarded. This also prevents accessing the called method's code 1226 // item, saving memory by keeping code items of compiled code untouched. 1227 if (!use_interpreter_entrypoint) { 1228 DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint"; 1229 num_regs = number_of_inputs; 1230 } else { 1231 num_regs = accessor.RegistersSize(); 1232 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize()); 1233 } 1234 } else { 1235 DCHECK(called_method->IsNative() || called_method->IsProxyMethod()); 1236 num_regs = number_of_inputs; 1237 } 1238 1239 // Hack for String init: 1240 // 1241 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into: 1242 // invoke-x StringFactory(a, b, c, ...) 1243 // by effectively dropping the first virtual register from the invoke. 1244 // 1245 // (at this point the ArtMethod has already been replaced, 1246 // so we just need to fix-up the arguments) 1247 // 1248 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased 1249 // to handle the compiler optimization of replacing `this` with null without 1250 // throwing NullPointerException. 1251 uint32_t string_init_vreg_this = is_range ? vregC : arg[0]; 1252 if (UNLIKELY(string_init)) { 1253 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1. 1254 1255 // The new StringFactory call is static and has one fewer argument. 1256 if (!accessor.HasCodeItem()) { 1257 DCHECK(called_method->IsNative() || called_method->IsProxyMethod()); 1258 num_regs--; 1259 } // else ... don't need to change num_regs since it comes up from the string_init's code item 1260 number_of_inputs--; 1261 1262 // Rewrite the var-args, dropping the 0th argument ("this") 1263 for (uint32_t i = 1; i < arraysize(arg); ++i) { 1264 arg[i - 1] = arg[i]; 1265 } 1266 arg[arraysize(arg) - 1] = 0; 1267 1268 // Rewrite the non-var-arg case 1269 vregC++; // Skips the 0th vreg in the range ("this"). 1270 } 1271 1272 // Parameter registers go at the end of the shadow frame. 1273 DCHECK_GE(num_regs, number_of_inputs); 1274 size_t first_dest_reg = num_regs - number_of_inputs; 1275 DCHECK_NE(first_dest_reg, (size_t)-1); 1276 1277 // Allocate shadow frame on the stack. 1278 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon"); 1279 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = 1280 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0); 1281 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get(); 1282 1283 // Initialize new shadow frame by copying the registers from the callee shadow frame. 1284 if (do_assignability_check) { 1285 // Slow path. 1286 // We might need to do class loading, which incurs a thread state change to kNative. So 1287 // register the shadow frame as under construction and allow suspension again. 1288 ScopedStackedShadowFramePusher pusher( 1289 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction); 1290 self->EndAssertNoThreadSuspension(old_cause); 1291 1292 // ArtMethod here is needed to check type information of the call site against the callee. 1293 // Type information is retrieved from a DexFile/DexCache for that respective declared method. 1294 // 1295 // As a special case for proxy methods, which are not dex-backed, 1296 // we have to retrieve type information from the proxy's method 1297 // interface method instead (which is dex backed since proxies are never interfaces). 1298 ArtMethod* method = 1299 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize); 1300 1301 // We need to do runtime check on reference assignment. We need to load the shorty 1302 // to get the exact type of each reference argument. 1303 const DexFile::TypeList* params = method->GetParameterTypeList(); 1304 uint32_t shorty_len = 0; 1305 const char* shorty = method->GetShorty(&shorty_len); 1306 1307 // Handle receiver apart since it's not part of the shorty. 1308 size_t dest_reg = first_dest_reg; 1309 size_t arg_offset = 0; 1310 1311 if (!method->IsStatic()) { 1312 size_t receiver_reg = is_range ? vregC : arg[0]; 1313 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg)); 1314 ++dest_reg; 1315 ++arg_offset; 1316 DCHECK(!string_init); // All StringFactory methods are static. 1317 } 1318 1319 // Copy the caller's invoke-* arguments into the callee's parameter registers. 1320 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) { 1321 // Skip the 0th 'shorty' type since it represents the return type. 1322 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'"; 1323 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset]; 1324 switch (shorty[shorty_pos + 1]) { 1325 // Handle Object references. 1 virtual register slot. 1326 case 'L': { 1327 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg); 1328 if (do_assignability_check && o != nullptr) { 1329 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_; 1330 ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx); 1331 if (arg_type == nullptr) { 1332 StackHandleScope<1> hs(self); 1333 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread 1334 // suspension. 1335 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o); 1336 arg_type = method->ResolveClassFromTypeIndex(type_idx); 1337 if (arg_type == nullptr) { 1338 CHECK(self->IsExceptionPending()); 1339 return false; 1340 } 1341 } 1342 if (!o->VerifierInstanceOf(arg_type)) { 1343 // This should never happen. 1344 std::string temp1, temp2; 1345 self->ThrowNewExceptionF("Ljava/lang/InternalError;", 1346 "Invoking %s with bad arg %d, type '%s' not instance of '%s'", 1347 new_shadow_frame->GetMethod()->GetName(), shorty_pos, 1348 o->GetClass()->GetDescriptor(&temp1), 1349 arg_type->GetDescriptor(&temp2)); 1350 return false; 1351 } 1352 } 1353 new_shadow_frame->SetVRegReference(dest_reg, o.Ptr()); 1354 break; 1355 } 1356 // Handle doubles and longs. 2 consecutive virtual register slots. 1357 case 'J': case 'D': { 1358 uint64_t wide_value = 1359 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) | 1360 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg)); 1361 new_shadow_frame->SetVRegLong(dest_reg, wide_value); 1362 // Skip the next virtual register slot since we already used it. 1363 ++dest_reg; 1364 ++arg_offset; 1365 break; 1366 } 1367 // Handle all other primitives that are always 1 virtual register slot. 1368 default: 1369 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg)); 1370 break; 1371 } 1372 } 1373 } else { 1374 if (is_range) { 1375 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs); 1376 } 1377 1378 CopyRegisters<is_range>(shadow_frame, 1379 new_shadow_frame, 1380 arg, 1381 vregC, 1382 first_dest_reg, 1383 number_of_inputs); 1384 self->EndAssertNoThreadSuspension(old_cause); 1385 } 1386 1387 PerformCall(self, 1388 accessor, 1389 shadow_frame.GetMethod(), 1390 first_dest_reg, 1391 new_shadow_frame, 1392 result, 1393 use_interpreter_entrypoint); 1394 1395 if (string_init && !self->IsExceptionPending()) { 1396 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result); 1397 } 1398 1399 return !self->IsExceptionPending(); 1400 } 1401 1402 template<bool is_range, bool do_assignability_check> 1403 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame, 1404 const Instruction* inst, uint16_t inst_data, JValue* result) { 1405 // Argument word count. 1406 const uint16_t number_of_inputs = 1407 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data); 1408 1409 // TODO: find a cleaner way to separate non-range and range information without duplicating 1410 // code. 1411 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX. 1412 uint32_t vregC = 0; 1413 if (is_range) { 1414 vregC = inst->VRegC_3rc(); 1415 } else { 1416 vregC = inst->VRegC_35c(); 1417 inst->GetVarArgs(arg, inst_data); 1418 } 1419 1420 return DoCallCommon<is_range, do_assignability_check>( 1421 called_method, self, shadow_frame, 1422 result, number_of_inputs, arg, vregC); 1423 } 1424 1425 template <bool is_range, bool do_access_check, bool transaction_active> 1426 bool DoFilledNewArray(const Instruction* inst, 1427 const ShadowFrame& shadow_frame, 1428 Thread* self, 1429 JValue* result) { 1430 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY || 1431 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE); 1432 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c(); 1433 if (!is_range) { 1434 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments. 1435 CHECK_LE(length, 5); 1436 } 1437 if (UNLIKELY(length < 0)) { 1438 ThrowNegativeArraySizeException(length); 1439 return false; 1440 } 1441 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c(); 1442 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx), 1443 shadow_frame.GetMethod(), 1444 self, 1445 false, 1446 do_access_check); 1447 if (UNLIKELY(array_class == nullptr)) { 1448 DCHECK(self->IsExceptionPending()); 1449 return false; 1450 } 1451 CHECK(array_class->IsArrayClass()); 1452 ObjPtr<mirror::Class> component_class = array_class->GetComponentType(); 1453 const bool is_primitive_int_component = component_class->IsPrimitiveInt(); 1454 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) { 1455 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) { 1456 ThrowRuntimeException("Bad filled array request for type %s", 1457 component_class->PrettyDescriptor().c_str()); 1458 } else { 1459 self->ThrowNewExceptionF("Ljava/lang/InternalError;", 1460 "Found type %s; filled-new-array not implemented for anything but 'int'", 1461 component_class->PrettyDescriptor().c_str()); 1462 } 1463 return false; 1464 } 1465 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc<true>( 1466 self, 1467 array_class, 1468 length, 1469 array_class->GetComponentSizeShift(), 1470 Runtime::Current()->GetHeap()->GetCurrentAllocator()); 1471 if (UNLIKELY(new_array == nullptr)) { 1472 self->AssertPendingOOMException(); 1473 return false; 1474 } 1475 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array. 1476 uint32_t vregC = 0; // only used in filled-new-array-range. 1477 if (is_range) { 1478 vregC = inst->VRegC_3rc(); 1479 } else { 1480 inst->GetVarArgs(arg); 1481 } 1482 for (int32_t i = 0; i < length; ++i) { 1483 size_t src_reg = is_range ? vregC + i : arg[i]; 1484 if (is_primitive_int_component) { 1485 new_array->AsIntArray()->SetWithoutChecks<transaction_active>( 1486 i, shadow_frame.GetVReg(src_reg)); 1487 } else { 1488 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>( 1489 i, shadow_frame.GetVRegReference(src_reg)); 1490 } 1491 } 1492 1493 result->SetL(new_array); 1494 return true; 1495 } 1496 1497 // TODO: Use ObjPtr here. 1498 template<typename T> 1499 static void RecordArrayElementsInTransactionImpl(mirror::PrimitiveArray<T>* array, 1500 int32_t count) 1501 REQUIRES_SHARED(Locks::mutator_lock_) { 1502 Runtime* runtime = Runtime::Current(); 1503 for (int32_t i = 0; i < count; ++i) { 1504 runtime->RecordWriteArray(array, i, array->GetWithoutChecks(i)); 1505 } 1506 } 1507 1508 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count) 1509 REQUIRES_SHARED(Locks::mutator_lock_) { 1510 DCHECK(Runtime::Current()->IsActiveTransaction()); 1511 DCHECK(array != nullptr); 1512 DCHECK_LE(count, array->GetLength()); 1513 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType(); 1514 switch (primitive_component_type) { 1515 case Primitive::kPrimBoolean: 1516 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count); 1517 break; 1518 case Primitive::kPrimByte: 1519 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count); 1520 break; 1521 case Primitive::kPrimChar: 1522 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count); 1523 break; 1524 case Primitive::kPrimShort: 1525 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count); 1526 break; 1527 case Primitive::kPrimInt: 1528 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count); 1529 break; 1530 case Primitive::kPrimFloat: 1531 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count); 1532 break; 1533 case Primitive::kPrimLong: 1534 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count); 1535 break; 1536 case Primitive::kPrimDouble: 1537 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count); 1538 break; 1539 default: 1540 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type 1541 << " in fill-array-data"; 1542 break; 1543 } 1544 } 1545 1546 // Explicit DoCall template function declarations. 1547 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range, _do_assignability_check) \ 1548 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1549 bool DoCall<_is_range, _do_assignability_check>(ArtMethod* method, Thread* self, \ 1550 ShadowFrame& shadow_frame, \ 1551 const Instruction* inst, uint16_t inst_data, \ 1552 JValue* result) 1553 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, false); 1554 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, true); 1555 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false); 1556 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true); 1557 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL 1558 1559 // Explicit DoInvokePolymorphic template function declarations. 1560 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \ 1561 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1562 bool DoInvokePolymorphic<_is_range>( \ 1563 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \ 1564 uint16_t inst_data, JValue* result) 1565 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false); 1566 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true); 1567 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL 1568 1569 // Explicit DoInvokeCustom template function declarations. 1570 #define EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL(_is_range) \ 1571 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1572 bool DoInvokeCustom<_is_range>( \ 1573 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \ 1574 uint16_t inst_data, JValue* result) 1575 EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL(false); 1576 EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL(true); 1577 #undef EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL 1578 1579 // Explicit DoFilledNewArray template function declarations. 1580 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _check, _transaction_active) \ 1581 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1582 bool DoFilledNewArray<_is_range_, _check, _transaction_active>(const Instruction* inst, \ 1583 const ShadowFrame& shadow_frame, \ 1584 Thread* self, JValue* result) 1585 #define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \ 1586 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, false, _transaction_active); \ 1587 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, true, _transaction_active); \ 1588 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, false, _transaction_active); \ 1589 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, true, _transaction_active) 1590 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false); 1591 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true); 1592 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL 1593 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL 1594 1595 } // namespace interpreter 1596 } // namespace art 1597