1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Perform some simple bytecode optimizations, chiefly "quickening" of 19 * opcodes. 20 */ 21 #include "Dalvik.h" 22 #include "libdex/InstrUtils.h" 23 #include "Optimize.h" 24 25 #include <zlib.h> 26 27 #include <stdlib.h> 28 29 /* 30 * Virtual/direct calls to "method" are replaced with an execute-inline 31 * instruction with index "idx". 32 */ 33 struct InlineSub { 34 Method* method; 35 int inlineIdx; 36 }; 37 38 39 /* fwd */ 40 static void optimizeMethod(Method* method, bool essentialOnly); 41 static void rewriteInstField(Method* method, u2* insns, Opcode quickOpc, 42 Opcode volatileOpc); 43 static void rewriteJumboInstField(Method* method, u2* insns, 44 Opcode volatileOpc); 45 static void rewriteStaticField(Method* method, u2* insns, Opcode volatileOpc); 46 static void rewriteJumboStaticField(Method* method, u2* insns, 47 Opcode volatileOpc); 48 static void rewriteVirtualInvoke(Method* method, u2* insns, Opcode newOpc); 49 static bool rewriteInvokeObjectInit(Method* method, u2* insns); 50 static bool rewriteJumboInvokeObjectInit(Method* method, u2* insns); 51 static bool rewriteExecuteInline(Method* method, u2* insns, 52 MethodType methodType); 53 static bool rewriteExecuteInlineRange(Method* method, u2* insns, 54 MethodType methodType); 55 static void rewriteReturnVoid(Method* method, u2* insns); 56 static bool needsReturnBarrier(Method* method); 57 58 59 /* 60 * Create a table of inline substitutions. Sets gDvm.inlineSubs. 61 * 62 * TODO: this is currently just a linear array. We will want to put this 63 * into a hash table as the list size increases. 64 */ 65 bool dvmCreateInlineSubsTable() 66 { 67 const InlineOperation* ops = dvmGetInlineOpsTable(); 68 const int count = dvmGetInlineOpsTableLength(); 69 InlineSub* table; 70 int i, tableIndex; 71 72 assert(gDvm.inlineSubs == NULL); 73 74 /* 75 * One slot per entry, plus an end-of-list marker. 76 */ 77 table = (InlineSub*) calloc(count + 1, sizeof(InlineSub)); 78 79 tableIndex = 0; 80 for (i = 0; i < count; i++) { 81 Method* method = dvmFindInlinableMethod(ops[i].classDescriptor, 82 ops[i].methodName, ops[i].methodSignature); 83 if (method == NULL) { 84 /* 85 * Not expected. We only use this for key methods in core 86 * classes, so we should always be able to find them. 87 */ 88 LOGE("Unable to find method for inlining: %s.%s:%s", 89 ops[i].classDescriptor, ops[i].methodName, 90 ops[i].methodSignature); 91 return false; 92 } 93 94 table[tableIndex].method = method; 95 table[tableIndex].inlineIdx = i; 96 tableIndex++; 97 } 98 99 /* mark end of table */ 100 table[tableIndex].method = NULL; 101 102 gDvm.inlineSubs = table; 103 return true; 104 } 105 106 /* 107 * Release inline sub data structure. 108 */ 109 void dvmFreeInlineSubsTable() 110 { 111 free(gDvm.inlineSubs); 112 gDvm.inlineSubs = NULL; 113 } 114 115 116 /* 117 * Optimize the specified class. 118 * 119 * If "essentialOnly" is true, we only do essential optimizations. For 120 * example, accesses to volatile 64-bit fields must be replaced with 121 * "-wide-volatile" instructions or the program could behave incorrectly. 122 * (Skipping non-essential optimizations makes us a little bit faster, and 123 * more importantly avoids dirtying DEX pages.) 124 */ 125 void dvmOptimizeClass(ClassObject* clazz, bool essentialOnly) 126 { 127 int i; 128 129 for (i = 0; i < clazz->directMethodCount; i++) { 130 optimizeMethod(&clazz->directMethods[i], essentialOnly); 131 } 132 for (i = 0; i < clazz->virtualMethodCount; i++) { 133 optimizeMethod(&clazz->virtualMethods[i], essentialOnly); 134 } 135 } 136 137 /* 138 * Optimize instructions in a method. 139 * 140 * This does a single pass through the code, examining each instruction. 141 * 142 * This is not expected to fail if the class was successfully verified. 143 * The only significant failure modes on unverified code occur when an 144 * "essential" update fails, but we can't generally identify those: if we 145 * can't look up a field, we can't know if the field access was supposed 146 * to be handled as volatile. 147 * 148 * Instead, we give it our best effort, and hope for the best. For 100% 149 * reliability, only optimize a class after verification succeeds. 150 */ 151 static void optimizeMethod(Method* method, bool essentialOnly) 152 { 153 bool needRetBar, forSmp; 154 u4 insnsSize; 155 u2* insns; 156 157 if (dvmIsNativeMethod(method) || dvmIsAbstractMethod(method)) 158 return; 159 160 forSmp = gDvm.dexOptForSmp; 161 needRetBar = needsReturnBarrier(method); 162 163 insns = (u2*) method->insns; 164 assert(insns != NULL); 165 insnsSize = dvmGetMethodInsnsSize(method); 166 167 while (insnsSize > 0) { 168 Opcode opc, quickOpc, volatileOpc; 169 size_t width; 170 bool matched = true; 171 172 opc = dexOpcodeFromCodeUnit(*insns); 173 width = dexGetWidthFromInstruction(insns); 174 volatileOpc = OP_NOP; 175 176 /* 177 * Each instruction may have: 178 * - "volatile" replacement 179 * - may be essential or essential-on-SMP 180 * - correctness replacement 181 * - may be essential or essential-on-SMP 182 * - performance replacement 183 * - always non-essential 184 * 185 * Replacements are considered in the order shown, and the first 186 * match is applied. For example, iget-wide will convert to 187 * iget-wide-volatile rather than iget-wide-quick if the target 188 * field is volatile. 189 */ 190 191 /* 192 * essential substitutions: 193 * {iget,iput,sget,sput}-wide[/jumbo] --> {op}-wide-volatile 194 * invoke-direct[/jumbo][/range] --> invoke-object-init/range 195 * 196 * essential-on-SMP substitutions: 197 * {iget,iput,sget,sput}-*[/jumbo] --> {op}-volatile 198 * return-void --> return-void-barrier 199 * 200 * non-essential substitutions: 201 * {iget,iput}-* --> {op}-quick 202 * 203 * TODO: might be time to merge this with the other two switches 204 */ 205 switch (opc) { 206 case OP_IGET: 207 case OP_IGET_BOOLEAN: 208 case OP_IGET_BYTE: 209 case OP_IGET_CHAR: 210 case OP_IGET_SHORT: 211 quickOpc = OP_IGET_QUICK; 212 if (forSmp) 213 volatileOpc = OP_IGET_VOLATILE; 214 goto rewrite_inst_field; 215 case OP_IGET_WIDE: 216 quickOpc = OP_IGET_WIDE_QUICK; 217 volatileOpc = OP_IGET_WIDE_VOLATILE; 218 goto rewrite_inst_field; 219 case OP_IGET_OBJECT: 220 quickOpc = OP_IGET_OBJECT_QUICK; 221 if (forSmp) 222 volatileOpc = OP_IGET_OBJECT_VOLATILE; 223 goto rewrite_inst_field; 224 case OP_IPUT: 225 case OP_IPUT_BOOLEAN: 226 case OP_IPUT_BYTE: 227 case OP_IPUT_CHAR: 228 case OP_IPUT_SHORT: 229 quickOpc = OP_IPUT_QUICK; 230 if (forSmp) 231 volatileOpc = OP_IPUT_VOLATILE; 232 goto rewrite_inst_field; 233 case OP_IPUT_WIDE: 234 quickOpc = OP_IPUT_WIDE_QUICK; 235 volatileOpc = OP_IPUT_WIDE_VOLATILE; 236 goto rewrite_inst_field; 237 case OP_IPUT_OBJECT: 238 quickOpc = OP_IPUT_OBJECT_QUICK; 239 if (forSmp) 240 volatileOpc = OP_IPUT_OBJECT_VOLATILE; 241 /* fall through */ 242 rewrite_inst_field: 243 if (essentialOnly) 244 quickOpc = OP_NOP; /* if essential-only, no "-quick" sub */ 245 if (quickOpc != OP_NOP || volatileOpc != OP_NOP) 246 rewriteInstField(method, insns, quickOpc, volatileOpc); 247 break; 248 249 case OP_IGET_JUMBO: 250 case OP_IGET_BOOLEAN_JUMBO: 251 case OP_IGET_BYTE_JUMBO: 252 case OP_IGET_CHAR_JUMBO: 253 case OP_IGET_SHORT_JUMBO: 254 if (forSmp) 255 volatileOpc = OP_IGET_VOLATILE_JUMBO; 256 goto rewrite_jumbo_inst_field; 257 case OP_IGET_WIDE_JUMBO: 258 volatileOpc = OP_IGET_WIDE_VOLATILE_JUMBO; 259 goto rewrite_jumbo_inst_field; 260 case OP_IGET_OBJECT_JUMBO: 261 if (forSmp) 262 volatileOpc = OP_IGET_OBJECT_VOLATILE_JUMBO; 263 goto rewrite_jumbo_inst_field; 264 case OP_IPUT_JUMBO: 265 case OP_IPUT_BOOLEAN_JUMBO: 266 case OP_IPUT_BYTE_JUMBO: 267 case OP_IPUT_CHAR_JUMBO: 268 case OP_IPUT_SHORT_JUMBO: 269 if (forSmp) 270 volatileOpc = OP_IPUT_VOLATILE_JUMBO; 271 goto rewrite_jumbo_inst_field; 272 case OP_IPUT_WIDE_JUMBO: 273 volatileOpc = OP_IPUT_WIDE_VOLATILE_JUMBO; 274 goto rewrite_jumbo_inst_field; 275 case OP_IPUT_OBJECT_JUMBO: 276 if (forSmp) 277 volatileOpc = OP_IPUT_OBJECT_VOLATILE_JUMBO; 278 /* fall through */ 279 rewrite_jumbo_inst_field: 280 if (volatileOpc != OP_NOP) 281 rewriteJumboInstField(method, insns, volatileOpc); 282 break; 283 284 case OP_SGET: 285 case OP_SGET_BOOLEAN: 286 case OP_SGET_BYTE: 287 case OP_SGET_CHAR: 288 case OP_SGET_SHORT: 289 if (forSmp) 290 volatileOpc = OP_SGET_VOLATILE; 291 goto rewrite_static_field; 292 case OP_SGET_WIDE: 293 volatileOpc = OP_SGET_WIDE_VOLATILE; 294 goto rewrite_static_field; 295 case OP_SGET_OBJECT: 296 if (forSmp) 297 volatileOpc = OP_SGET_OBJECT_VOLATILE; 298 goto rewrite_static_field; 299 case OP_SPUT: 300 case OP_SPUT_BOOLEAN: 301 case OP_SPUT_BYTE: 302 case OP_SPUT_CHAR: 303 case OP_SPUT_SHORT: 304 if (forSmp) 305 volatileOpc = OP_SPUT_VOLATILE; 306 goto rewrite_static_field; 307 case OP_SPUT_WIDE: 308 volatileOpc = OP_SPUT_WIDE_VOLATILE; 309 goto rewrite_static_field; 310 case OP_SPUT_OBJECT: 311 if (forSmp) 312 volatileOpc = OP_SPUT_OBJECT_VOLATILE; 313 /* fall through */ 314 rewrite_static_field: 315 if (volatileOpc != OP_NOP) 316 rewriteStaticField(method, insns, volatileOpc); 317 break; 318 319 case OP_SGET_JUMBO: 320 case OP_SGET_BOOLEAN_JUMBO: 321 case OP_SGET_BYTE_JUMBO: 322 case OP_SGET_CHAR_JUMBO: 323 case OP_SGET_SHORT_JUMBO: 324 if (forSmp) 325 volatileOpc = OP_SGET_VOLATILE_JUMBO; 326 goto rewrite_jumbo_static_field; 327 case OP_SGET_WIDE_JUMBO: 328 volatileOpc = OP_SGET_WIDE_VOLATILE_JUMBO; 329 goto rewrite_jumbo_static_field; 330 case OP_SGET_OBJECT_JUMBO: 331 if (forSmp) 332 volatileOpc = OP_SGET_OBJECT_VOLATILE_JUMBO; 333 goto rewrite_jumbo_static_field; 334 case OP_SPUT_JUMBO: 335 case OP_SPUT_BOOLEAN_JUMBO: 336 case OP_SPUT_BYTE_JUMBO: 337 case OP_SPUT_CHAR_JUMBO: 338 case OP_SPUT_SHORT_JUMBO: 339 if (forSmp) 340 volatileOpc = OP_SPUT_VOLATILE_JUMBO; 341 goto rewrite_jumbo_static_field; 342 case OP_SPUT_WIDE_JUMBO: 343 volatileOpc = OP_SPUT_WIDE_VOLATILE_JUMBO; 344 goto rewrite_jumbo_static_field; 345 case OP_SPUT_OBJECT_JUMBO: 346 if (forSmp) 347 volatileOpc = OP_SPUT_OBJECT_VOLATILE_JUMBO; 348 /* fall through */ 349 rewrite_jumbo_static_field: 350 if (volatileOpc != OP_NOP) 351 rewriteJumboStaticField(method, insns, volatileOpc); 352 break; 353 354 case OP_INVOKE_DIRECT: 355 case OP_INVOKE_DIRECT_RANGE: 356 if (!rewriteInvokeObjectInit(method, insns)) { 357 /* may want to try execute-inline, below */ 358 matched = false; 359 } 360 break; 361 case OP_INVOKE_DIRECT_JUMBO: 362 rewriteJumboInvokeObjectInit(method, insns); 363 break; 364 case OP_RETURN_VOID: 365 if (needRetBar) 366 rewriteReturnVoid(method, insns); 367 break; 368 default: 369 matched = false; 370 break; 371 } 372 373 374 /* 375 * non-essential substitutions: 376 * invoke-{virtual,direct,static}[/range] --> execute-inline 377 * invoke-{virtual,super}[/range] --> invoke-*-quick 378 */ 379 if (!matched && !essentialOnly) { 380 switch (opc) { 381 case OP_INVOKE_VIRTUAL: 382 if (!rewriteExecuteInline(method, insns, METHOD_VIRTUAL)) { 383 rewriteVirtualInvoke(method, insns, 384 OP_INVOKE_VIRTUAL_QUICK); 385 } 386 break; 387 case OP_INVOKE_VIRTUAL_RANGE: 388 if (!rewriteExecuteInlineRange(method, insns, METHOD_VIRTUAL)) { 389 rewriteVirtualInvoke(method, insns, 390 OP_INVOKE_VIRTUAL_QUICK_RANGE); 391 } 392 break; 393 case OP_INVOKE_SUPER: 394 rewriteVirtualInvoke(method, insns, OP_INVOKE_SUPER_QUICK); 395 break; 396 case OP_INVOKE_SUPER_RANGE: 397 rewriteVirtualInvoke(method, insns, OP_INVOKE_SUPER_QUICK_RANGE); 398 break; 399 case OP_INVOKE_DIRECT: 400 rewriteExecuteInline(method, insns, METHOD_DIRECT); 401 break; 402 case OP_INVOKE_DIRECT_RANGE: 403 rewriteExecuteInlineRange(method, insns, METHOD_DIRECT); 404 break; 405 case OP_INVOKE_STATIC: 406 rewriteExecuteInline(method, insns, METHOD_STATIC); 407 break; 408 case OP_INVOKE_STATIC_RANGE: 409 rewriteExecuteInlineRange(method, insns, METHOD_STATIC); 410 break; 411 default: 412 /* nothing to do for this instruction */ 413 ; 414 } 415 } 416 417 assert(width > 0); 418 assert(width <= insnsSize); 419 assert(width == dexGetWidthFromInstruction(insns)); 420 421 insns += width; 422 insnsSize -= width; 423 } 424 425 assert(insnsSize == 0); 426 } 427 428 /* 429 * Update a 16-bit code unit in "meth". The way in which the DEX data was 430 * loaded determines how we go about the write. 431 * 432 * This will be operating on post-byte-swap DEX data, so values will 433 * be in host order. 434 */ 435 void dvmUpdateCodeUnit(const Method* meth, u2* ptr, u2 newVal) 436 { 437 DvmDex* pDvmDex = meth->clazz->pDvmDex; 438 439 if (!pDvmDex->isMappedReadOnly) { 440 /* in-memory DEX (dexopt or byte[]), alter the output directly */ 441 *ptr = newVal; 442 } else { 443 /* memory-mapped file, toggle the page read/write status */ 444 dvmDexChangeDex2(pDvmDex, ptr, newVal); 445 } 446 } 447 448 /* 449 * Update an instruction's opcode. 450 * 451 * If "opcode" is an 8-bit op, we just replace that portion. If it's a 452 * 16-bit op, we convert the opcode from "packed" form (e.g. 0x0108) to 453 * bytecode form (e.g. 0x08ff). 454 */ 455 static inline void updateOpcode(const Method* meth, u2* ptr, Opcode opcode) 456 { 457 if (opcode >= 256) { 458 /* opcode low byte becomes high byte, low byte becomes 0xff */ 459 assert((ptr[0] & 0xff) == 0xff); 460 dvmUpdateCodeUnit(meth, ptr, (u2) (opcode << 8) | 0x00ff); 461 } else { 462 /* 8-bit op, just replace the low byte */ 463 assert((ptr[0] & 0xff) != 0xff); 464 dvmUpdateCodeUnit(meth, ptr, (ptr[0] & 0xff00) | (u2) opcode); 465 } 466 } 467 468 /* 469 * If "referrer" and "resClass" don't come from the same DEX file, and 470 * the DEX we're working on is not destined for the bootstrap class path, 471 * tweak the class loader so package-access checks work correctly. 472 * 473 * Only do this if we're doing pre-verification or optimization. 474 */ 475 static void tweakLoader(ClassObject* referrer, ClassObject* resClass) 476 { 477 if (!gDvm.optimizing) 478 return; 479 assert(referrer->classLoader == NULL); 480 assert(resClass->classLoader == NULL); 481 482 if (!gDvm.optimizingBootstrapClass) { 483 /* class loader for an array class comes from element type */ 484 if (dvmIsArrayClass(resClass)) 485 resClass = resClass->elementClass; 486 if (referrer->pDvmDex != resClass->pDvmDex) 487 resClass->classLoader = (Object*) 0xdead3333; 488 } 489 } 490 491 /* 492 * Undo the effects of tweakLoader. 493 */ 494 static void untweakLoader(ClassObject* referrer, ClassObject* resClass) 495 { 496 if (!gDvm.optimizing || gDvm.optimizingBootstrapClass) 497 return; 498 499 if (dvmIsArrayClass(resClass)) 500 resClass = resClass->elementClass; 501 resClass->classLoader = NULL; 502 } 503 504 505 /* 506 * Alternate version of dvmResolveClass for use with verification and 507 * optimization. Performs access checks on every resolve, and refuses 508 * to acknowledge the existence of classes defined in more than one DEX 509 * file. 510 * 511 * Exceptions caused by failures are cleared before returning. 512 * 513 * On failure, returns NULL, and sets *pFailure if pFailure is not NULL. 514 */ 515 ClassObject* dvmOptResolveClass(ClassObject* referrer, u4 classIdx, 516 VerifyError* pFailure) 517 { 518 DvmDex* pDvmDex = referrer->pDvmDex; 519 ClassObject* resClass; 520 521 /* 522 * Check the table first. If not there, do the lookup by name. 523 */ 524 resClass = dvmDexGetResolvedClass(pDvmDex, classIdx); 525 if (resClass == NULL) { 526 const char* className = dexStringByTypeIdx(pDvmDex->pDexFile, classIdx); 527 if (className[0] != '\0' && className[1] == '\0') { 528 /* primitive type */ 529 resClass = dvmFindPrimitiveClass(className[0]); 530 } else { 531 resClass = dvmFindClassNoInit(className, referrer->classLoader); 532 } 533 if (resClass == NULL) { 534 /* not found, exception should be raised */ 535 LOGV("DexOpt: class %d (%s) not found", 536 classIdx, 537 dexStringByTypeIdx(pDvmDex->pDexFile, classIdx)); 538 if (pFailure != NULL) { 539 /* dig through the wrappers to find the original failure */ 540 Object* excep = dvmGetException(dvmThreadSelf()); 541 while (true) { 542 Object* cause = dvmGetExceptionCause(excep); 543 if (cause == NULL) 544 break; 545 excep = cause; 546 } 547 if (strcmp(excep->clazz->descriptor, 548 "Ljava/lang/IncompatibleClassChangeError;") == 0) 549 { 550 *pFailure = VERIFY_ERROR_CLASS_CHANGE; 551 } else { 552 *pFailure = VERIFY_ERROR_NO_CLASS; 553 } 554 } 555 dvmClearOptException(dvmThreadSelf()); 556 return NULL; 557 } 558 559 /* 560 * Add it to the resolved table so we're faster on the next lookup. 561 */ 562 dvmDexSetResolvedClass(pDvmDex, classIdx, resClass); 563 } 564 565 /* multiple definitions? */ 566 if (IS_CLASS_FLAG_SET(resClass, CLASS_MULTIPLE_DEFS)) { 567 LOGI("DexOpt: not resolving ambiguous class '%s'", 568 resClass->descriptor); 569 if (pFailure != NULL) 570 *pFailure = VERIFY_ERROR_NO_CLASS; 571 return NULL; 572 } 573 574 /* access allowed? */ 575 tweakLoader(referrer, resClass); 576 bool allowed = dvmCheckClassAccess(referrer, resClass); 577 untweakLoader(referrer, resClass); 578 if (!allowed) { 579 LOGW("DexOpt: resolve class illegal access: %s -> %s", 580 referrer->descriptor, resClass->descriptor); 581 if (pFailure != NULL) 582 *pFailure = VERIFY_ERROR_ACCESS_CLASS; 583 return NULL; 584 } 585 586 return resClass; 587 } 588 589 /* 590 * Alternate version of dvmResolveInstField(). 591 * 592 * On failure, returns NULL, and sets *pFailure if pFailure is not NULL. 593 */ 594 InstField* dvmOptResolveInstField(ClassObject* referrer, u4 ifieldIdx, 595 VerifyError* pFailure) 596 { 597 DvmDex* pDvmDex = referrer->pDvmDex; 598 InstField* resField; 599 600 resField = (InstField*) dvmDexGetResolvedField(pDvmDex, ifieldIdx); 601 if (resField == NULL) { 602 const DexFieldId* pFieldId; 603 ClassObject* resClass; 604 605 pFieldId = dexGetFieldId(pDvmDex->pDexFile, ifieldIdx); 606 607 /* 608 * Find the field's class. 609 */ 610 resClass = dvmOptResolveClass(referrer, pFieldId->classIdx, pFailure); 611 if (resClass == NULL) { 612 //dvmClearOptException(dvmThreadSelf()); 613 assert(!dvmCheckException(dvmThreadSelf())); 614 if (pFailure != NULL) { assert(!VERIFY_OK(*pFailure)); } 615 return NULL; 616 } 617 618 resField = (InstField*)dvmFindFieldHier(resClass, 619 dexStringById(pDvmDex->pDexFile, pFieldId->nameIdx), 620 dexStringByTypeIdx(pDvmDex->pDexFile, pFieldId->typeIdx)); 621 if (resField == NULL) { 622 LOGD("DexOpt: couldn't find field %s.%s", 623 resClass->descriptor, 624 dexStringById(pDvmDex->pDexFile, pFieldId->nameIdx)); 625 if (pFailure != NULL) 626 *pFailure = VERIFY_ERROR_NO_FIELD; 627 return NULL; 628 } 629 if (dvmIsStaticField(resField)) { 630 LOGD("DexOpt: wanted instance, got static for field %s.%s", 631 resClass->descriptor, 632 dexStringById(pDvmDex->pDexFile, pFieldId->nameIdx)); 633 if (pFailure != NULL) 634 *pFailure = VERIFY_ERROR_CLASS_CHANGE; 635 return NULL; 636 } 637 638 /* 639 * Add it to the resolved table so we're faster on the next lookup. 640 */ 641 dvmDexSetResolvedField(pDvmDex, ifieldIdx, (Field*) resField); 642 } 643 644 /* access allowed? */ 645 tweakLoader(referrer, resField->clazz); 646 bool allowed = dvmCheckFieldAccess(referrer, (Field*)resField); 647 untweakLoader(referrer, resField->clazz); 648 if (!allowed) { 649 LOGI("DexOpt: access denied from %s to field %s.%s", 650 referrer->descriptor, resField->clazz->descriptor, 651 resField->name); 652 if (pFailure != NULL) 653 *pFailure = VERIFY_ERROR_ACCESS_FIELD; 654 return NULL; 655 } 656 657 return resField; 658 } 659 660 /* 661 * Alternate version of dvmResolveStaticField(). 662 * 663 * Does not force initialization of the resolved field's class. 664 * 665 * On failure, returns NULL, and sets *pFailure if pFailure is not NULL. 666 */ 667 StaticField* dvmOptResolveStaticField(ClassObject* referrer, u4 sfieldIdx, 668 VerifyError* pFailure) 669 { 670 DvmDex* pDvmDex = referrer->pDvmDex; 671 StaticField* resField; 672 673 resField = (StaticField*)dvmDexGetResolvedField(pDvmDex, sfieldIdx); 674 if (resField == NULL) { 675 const DexFieldId* pFieldId; 676 ClassObject* resClass; 677 678 pFieldId = dexGetFieldId(pDvmDex->pDexFile, sfieldIdx); 679 680 /* 681 * Find the field's class. 682 */ 683 resClass = dvmOptResolveClass(referrer, pFieldId->classIdx, pFailure); 684 if (resClass == NULL) { 685 //dvmClearOptException(dvmThreadSelf()); 686 assert(!dvmCheckException(dvmThreadSelf())); 687 if (pFailure != NULL) { assert(!VERIFY_OK(*pFailure)); } 688 return NULL; 689 } 690 691 const char* fieldName = 692 dexStringById(pDvmDex->pDexFile, pFieldId->nameIdx); 693 694 resField = (StaticField*)dvmFindFieldHier(resClass, fieldName, 695 dexStringByTypeIdx(pDvmDex->pDexFile, pFieldId->typeIdx)); 696 if (resField == NULL) { 697 LOGD("DexOpt: couldn't find static field %s.%s", 698 resClass->descriptor, fieldName); 699 if (pFailure != NULL) 700 *pFailure = VERIFY_ERROR_NO_FIELD; 701 return NULL; 702 } 703 if (!dvmIsStaticField(resField)) { 704 LOGD("DexOpt: wanted static, got instance for field %s.%s", 705 resClass->descriptor, fieldName); 706 if (pFailure != NULL) 707 *pFailure = VERIFY_ERROR_CLASS_CHANGE; 708 return NULL; 709 } 710 711 /* 712 * Add it to the resolved table so we're faster on the next lookup. 713 * 714 * We can only do this if we're in "dexopt", because the presence 715 * of a valid value in the resolution table implies that the class 716 * containing the static field has been initialized. 717 */ 718 if (gDvm.optimizing) 719 dvmDexSetResolvedField(pDvmDex, sfieldIdx, (Field*) resField); 720 } 721 722 /* access allowed? */ 723 tweakLoader(referrer, resField->clazz); 724 bool allowed = dvmCheckFieldAccess(referrer, (Field*)resField); 725 untweakLoader(referrer, resField->clazz); 726 if (!allowed) { 727 LOGI("DexOpt: access denied from %s to field %s.%s", 728 referrer->descriptor, resField->clazz->descriptor, 729 resField->name); 730 if (pFailure != NULL) 731 *pFailure = VERIFY_ERROR_ACCESS_FIELD; 732 return NULL; 733 } 734 735 return resField; 736 } 737 738 739 /* 740 * Rewrite an iget/iput instruction if appropriate. These all have the form: 741 * op vA, vB, field@CCCC 742 * 743 * Where vA holds the value, vB holds the object reference, and CCCC is 744 * the field reference constant pool offset. For a non-volatile field, 745 * we want to replace the opcode with "quickOpc" and replace CCCC with 746 * the byte offset from the start of the object. For a volatile field, 747 * we just want to replace the opcode with "volatileOpc". 748 * 749 * If "volatileOpc" is OP_NOP we don't check to see if it's a volatile 750 * field. If "quickOpc" is OP_NOP, and this is a non-volatile field, 751 * we don't do anything. 752 * 753 * "method" is the referring method. 754 */ 755 static void rewriteInstField(Method* method, u2* insns, Opcode quickOpc, 756 Opcode volatileOpc) 757 { 758 ClassObject* clazz = method->clazz; 759 u2 fieldIdx = insns[1]; 760 InstField* instField; 761 762 instField = dvmOptResolveInstField(clazz, fieldIdx, NULL); 763 if (instField == NULL) { 764 LOGI("DexOpt: unable to optimize instance field ref " 765 "0x%04x at 0x%02x in %s.%s", 766 fieldIdx, (int) (insns - method->insns), clazz->descriptor, 767 method->name); 768 return; 769 } 770 771 if (volatileOpc != OP_NOP && dvmIsVolatileField(instField)) { 772 updateOpcode(method, insns, volatileOpc); 773 LOGV("DexOpt: rewrote ifield access %s.%s --> volatile", 774 instField->clazz->descriptor, instField->name); 775 } else if (quickOpc != OP_NOP && instField->byteOffset < 65536) { 776 updateOpcode(method, insns, quickOpc); 777 dvmUpdateCodeUnit(method, insns+1, (u2) instField->byteOffset); 778 LOGV("DexOpt: rewrote ifield access %s.%s --> %d", 779 instField->clazz->descriptor, instField->name, 780 instField->byteOffset); 781 } else { 782 LOGV("DexOpt: no rewrite of ifield access %s.%s", 783 instField->clazz->descriptor, instField->name); 784 } 785 786 return; 787 } 788 789 /* 790 * Rewrite a jumbo instance field access instruction if appropriate. If 791 * the target field is volatile, we replace the opcode with "volatileOpc". 792 * 793 * "method" is the referring method. 794 */ 795 static void rewriteJumboInstField(Method* method, u2* insns, Opcode volatileOpc) 796 { 797 ClassObject* clazz = method->clazz; 798 u4 fieldIdx = insns[1] | (u4) insns[2] << 16; 799 InstField* instField; 800 801 assert(volatileOpc != OP_NOP); 802 803 instField = dvmOptResolveInstField(clazz, fieldIdx, NULL); 804 if (instField == NULL) { 805 LOGI("DexOpt: unable to optimize instance field ref " 806 "0x%04x at 0x%02x in %s.%s", 807 fieldIdx, (int) (insns - method->insns), clazz->descriptor, 808 method->name); 809 return; 810 } 811 812 if (dvmIsVolatileField(instField)) { 813 updateOpcode(method, insns, volatileOpc); 814 LOGV("DexOpt: rewrote jumbo ifield access %s.%s --> volatile", 815 instField->clazz->descriptor, instField->name); 816 } else { 817 LOGV("DexOpt: no rewrite of jumbo ifield access %s.%s", 818 instField->clazz->descriptor, instField->name); 819 } 820 } 821 822 /* 823 * Rewrite a static [jumbo] field access instruction if appropriate. If 824 * the target field is volatile, we replace the opcode with "volatileOpc". 825 * 826 * "method" is the referring method. 827 */ 828 static void rewriteStaticField0(Method* method, u2* insns, Opcode volatileOpc, 829 u4 fieldIdx) 830 { 831 ClassObject* clazz = method->clazz; 832 StaticField* staticField; 833 834 assert(volatileOpc != OP_NOP); 835 836 staticField = dvmOptResolveStaticField(clazz, fieldIdx, NULL); 837 if (staticField == NULL) { 838 LOGI("DexOpt: unable to optimize static field ref " 839 "0x%04x at 0x%02x in %s.%s", 840 fieldIdx, (int) (insns - method->insns), clazz->descriptor, 841 method->name); 842 return; 843 } 844 845 if (dvmIsVolatileField(staticField)) { 846 updateOpcode(method, insns, volatileOpc); 847 LOGV("DexOpt: rewrote sfield access %s.%s --> volatile", 848 staticField->clazz->descriptor, staticField->name); 849 } 850 } 851 852 static void rewriteStaticField(Method* method, u2* insns, Opcode volatileOpc) 853 { 854 u2 fieldIdx = insns[1]; 855 rewriteStaticField0(method, insns, volatileOpc, fieldIdx); 856 } 857 static void rewriteJumboStaticField(Method* method, u2* insns, 858 Opcode volatileOpc) 859 { 860 u4 fieldIdx = insns[1] | (u4) insns[2] << 16; 861 rewriteStaticField0(method, insns, volatileOpc, fieldIdx); 862 } 863 864 865 /* 866 * Alternate version of dvmResolveMethod(). 867 * 868 * Doesn't throw exceptions, and checks access on every lookup. 869 * 870 * On failure, returns NULL, and sets *pFailure if pFailure is not NULL. 871 */ 872 Method* dvmOptResolveMethod(ClassObject* referrer, u4 methodIdx, 873 MethodType methodType, VerifyError* pFailure) 874 { 875 DvmDex* pDvmDex = referrer->pDvmDex; 876 Method* resMethod; 877 878 assert(methodType == METHOD_DIRECT || 879 methodType == METHOD_VIRTUAL || 880 methodType == METHOD_STATIC); 881 882 LOGVV("--- resolving method %u (referrer=%s)", methodIdx, 883 referrer->descriptor); 884 885 resMethod = dvmDexGetResolvedMethod(pDvmDex, methodIdx); 886 if (resMethod == NULL) { 887 const DexMethodId* pMethodId; 888 ClassObject* resClass; 889 890 pMethodId = dexGetMethodId(pDvmDex->pDexFile, methodIdx); 891 892 resClass = dvmOptResolveClass(referrer, pMethodId->classIdx, pFailure); 893 if (resClass == NULL) { 894 /* 895 * Can't find the class that the method is a part of, or don't 896 * have permission to access the class. 897 */ 898 LOGV("DexOpt: can't find called method's class (?.%s)", 899 dexStringById(pDvmDex->pDexFile, pMethodId->nameIdx)); 900 if (pFailure != NULL) { assert(!VERIFY_OK(*pFailure)); } 901 return NULL; 902 } 903 if (dvmIsInterfaceClass(resClass)) { 904 /* method is part of an interface; this is wrong method for that */ 905 LOGW("DexOpt: method is in an interface"); 906 if (pFailure != NULL) 907 *pFailure = VERIFY_ERROR_GENERIC; 908 return NULL; 909 } 910 911 /* 912 * We need to chase up the class hierarchy to find methods defined 913 * in super-classes. (We only want to check the current class 914 * if we're looking for a constructor.) 915 */ 916 DexProto proto; 917 dexProtoSetFromMethodId(&proto, pDvmDex->pDexFile, pMethodId); 918 919 if (methodType == METHOD_DIRECT) { 920 resMethod = dvmFindDirectMethod(resClass, 921 dexStringById(pDvmDex->pDexFile, pMethodId->nameIdx), &proto); 922 } else { 923 /* METHOD_STATIC or METHOD_VIRTUAL */ 924 resMethod = dvmFindMethodHier(resClass, 925 dexStringById(pDvmDex->pDexFile, pMethodId->nameIdx), &proto); 926 } 927 928 if (resMethod == NULL) { 929 LOGV("DexOpt: couldn't find method '%s'", 930 dexStringById(pDvmDex->pDexFile, pMethodId->nameIdx)); 931 if (pFailure != NULL) 932 *pFailure = VERIFY_ERROR_NO_METHOD; 933 return NULL; 934 } 935 if (methodType == METHOD_STATIC) { 936 if (!dvmIsStaticMethod(resMethod)) { 937 LOGD("DexOpt: wanted static, got instance for method %s.%s", 938 resClass->descriptor, resMethod->name); 939 if (pFailure != NULL) 940 *pFailure = VERIFY_ERROR_CLASS_CHANGE; 941 return NULL; 942 } 943 } else if (methodType == METHOD_VIRTUAL) { 944 if (dvmIsStaticMethod(resMethod)) { 945 LOGD("DexOpt: wanted instance, got static for method %s.%s", 946 resClass->descriptor, resMethod->name); 947 if (pFailure != NULL) 948 *pFailure = VERIFY_ERROR_CLASS_CHANGE; 949 return NULL; 950 } 951 } 952 953 /* see if this is a pure-abstract method */ 954 if (dvmIsAbstractMethod(resMethod) && !dvmIsAbstractClass(resClass)) { 955 LOGW("DexOpt: pure-abstract method '%s' in %s", 956 dexStringById(pDvmDex->pDexFile, pMethodId->nameIdx), 957 resClass->descriptor); 958 if (pFailure != NULL) 959 *pFailure = VERIFY_ERROR_GENERIC; 960 return NULL; 961 } 962 963 /* 964 * Add it to the resolved table so we're faster on the next lookup. 965 * 966 * We can only do this for static methods if we're not in "dexopt", 967 * because the presence of a valid value in the resolution table 968 * implies that the class containing the static field has been 969 * initialized. 970 */ 971 if (methodType != METHOD_STATIC || gDvm.optimizing) 972 dvmDexSetResolvedMethod(pDvmDex, methodIdx, resMethod); 973 } 974 975 LOGVV("--- found method %d (%s.%s)", 976 methodIdx, resMethod->clazz->descriptor, resMethod->name); 977 978 /* access allowed? */ 979 tweakLoader(referrer, resMethod->clazz); 980 bool allowed = dvmCheckMethodAccess(referrer, resMethod); 981 untweakLoader(referrer, resMethod->clazz); 982 if (!allowed) { 983 IF_LOGI() { 984 char* desc = dexProtoCopyMethodDescriptor(&resMethod->prototype); 985 LOGI("DexOpt: illegal method access (call %s.%s %s from %s)", 986 resMethod->clazz->descriptor, resMethod->name, desc, 987 referrer->descriptor); 988 free(desc); 989 } 990 if (pFailure != NULL) 991 *pFailure = VERIFY_ERROR_ACCESS_METHOD; 992 return NULL; 993 } 994 995 return resMethod; 996 } 997 998 /* 999 * Rewrite invoke-virtual, invoke-virtual/range, invoke-super, and 1000 * invoke-super/range if appropriate. These all have the form: 1001 * op vAA, meth@BBBB, reg stuff @CCCC 1002 * 1003 * We want to replace the method constant pool index BBBB with the 1004 * vtable index. 1005 */ 1006 static void rewriteVirtualInvoke(Method* method, u2* insns, Opcode newOpc) 1007 { 1008 ClassObject* clazz = method->clazz; 1009 Method* baseMethod; 1010 u2 methodIdx = insns[1]; 1011 1012 baseMethod = dvmOptResolveMethod(clazz, methodIdx, METHOD_VIRTUAL, NULL); 1013 if (baseMethod == NULL) { 1014 LOGD("DexOpt: unable to optimize virt call 0x%04x at 0x%02x in %s.%s", 1015 methodIdx, 1016 (int) (insns - method->insns), clazz->descriptor, 1017 method->name); 1018 return; 1019 } 1020 1021 assert((insns[0] & 0xff) == OP_INVOKE_VIRTUAL || 1022 (insns[0] & 0xff) == OP_INVOKE_VIRTUAL_RANGE || 1023 (insns[0] & 0xff) == OP_INVOKE_SUPER || 1024 (insns[0] & 0xff) == OP_INVOKE_SUPER_RANGE); 1025 1026 /* 1027 * Note: Method->methodIndex is a u2 and is range checked during the 1028 * initial load. 1029 */ 1030 updateOpcode(method, insns, newOpc); 1031 dvmUpdateCodeUnit(method, insns+1, baseMethod->methodIndex); 1032 1033 //LOGI("DexOpt: rewrote call to %s.%s --> %s.%s", 1034 // method->clazz->descriptor, method->name, 1035 // baseMethod->clazz->descriptor, baseMethod->name); 1036 1037 return; 1038 } 1039 1040 /* 1041 * Rewrite invoke-direct[/range] if the target is Object.<init>. 1042 * 1043 * This is useful as an optimization, because otherwise every object 1044 * instantiation will cause us to call a method that does nothing. 1045 * It also allows us to inexpensively mark objects as finalizable at the 1046 * correct time. 1047 * 1048 * TODO: verifier should ensure Object.<init> contains only return-void, 1049 * and issue a warning if not. 1050 */ 1051 static bool rewriteInvokeObjectInit(Method* method, u2* insns) 1052 { 1053 ClassObject* clazz = method->clazz; 1054 Method* calledMethod; 1055 u2 methodIdx = insns[1]; 1056 1057 calledMethod = dvmOptResolveMethod(clazz, methodIdx, METHOD_DIRECT, NULL); 1058 if (calledMethod == NULL) { 1059 LOGD("DexOpt: unable to opt direct call 0x%04x at 0x%02x in %s.%s", 1060 methodIdx, (int) (insns - method->insns), 1061 clazz->descriptor, method->name); 1062 return false; 1063 } 1064 1065 if (calledMethod->clazz == gDvm.classJavaLangObject && 1066 dvmCompareNameDescriptorAndMethod("<init>", "()V", calledMethod) == 0) 1067 { 1068 /* 1069 * Replace the instruction. If the debugger is attached, the 1070 * interpreter will forward execution to the invoke-direct/range 1071 * handler. If this was an invoke-direct/range instruction we can 1072 * just replace the opcode, but if it was an invoke-direct we 1073 * have to set the argument count (high 8 bits of first code unit) 1074 * to 1. 1075 */ 1076 u1 origOp = insns[0] & 0xff; 1077 if (origOp == OP_INVOKE_DIRECT) { 1078 dvmUpdateCodeUnit(method, insns, 1079 OP_INVOKE_OBJECT_INIT_RANGE | 0x100); 1080 } else { 1081 assert(origOp == OP_INVOKE_DIRECT_RANGE); 1082 assert((insns[0] >> 8) == 1); 1083 updateOpcode(method, insns, OP_INVOKE_OBJECT_INIT_RANGE); 1084 } 1085 1086 LOGVV("DexOpt: replaced Object.<init> in %s.%s", 1087 method->clazz->descriptor, method->name); 1088 } 1089 1090 return true; 1091 } 1092 1093 /* 1094 * Rewrite invoke-direct/jumbo if the target is Object.<init>. 1095 */ 1096 static bool rewriteJumboInvokeObjectInit(Method* method, u2* insns) 1097 { 1098 ClassObject* clazz = method->clazz; 1099 Method* calledMethod; 1100 u4 methodIdx = insns[1] | (u4) insns[2] << 16; 1101 1102 calledMethod = dvmOptResolveMethod(clazz, methodIdx, METHOD_DIRECT, NULL); 1103 if (calledMethod == NULL) { 1104 LOGD("DexOpt: unable to opt direct call 0x%04x at 0x%02x in %s.%s", 1105 methodIdx, (int) (insns - method->insns), 1106 clazz->descriptor, method->name); 1107 return false; 1108 } 1109 1110 if (calledMethod->clazz == gDvm.classJavaLangObject && 1111 dvmCompareNameDescriptorAndMethod("<init>", "()V", calledMethod) == 0) 1112 { 1113 assert(insns[0] == ((u2) (OP_INVOKE_DIRECT_JUMBO << 8) | 0xff)); 1114 updateOpcode(method, insns, OP_INVOKE_OBJECT_INIT_JUMBO); 1115 1116 LOGVV("DexOpt: replaced jumbo Object.<init> in %s.%s", 1117 method->clazz->descriptor, method->name); 1118 } 1119 1120 return true; 1121 } 1122 1123 /* 1124 * Resolve an interface method reference. 1125 * 1126 * No method access check here -- interface methods are always public. 1127 * 1128 * Returns NULL if the method was not found. Does not throw an exception. 1129 */ 1130 Method* dvmOptResolveInterfaceMethod(ClassObject* referrer, u4 methodIdx) 1131 { 1132 DvmDex* pDvmDex = referrer->pDvmDex; 1133 Method* resMethod; 1134 1135 LOGVV("--- resolving interface method %d (referrer=%s)", 1136 methodIdx, referrer->descriptor); 1137 1138 resMethod = dvmDexGetResolvedMethod(pDvmDex, methodIdx); 1139 if (resMethod == NULL) { 1140 const DexMethodId* pMethodId; 1141 ClassObject* resClass; 1142 1143 pMethodId = dexGetMethodId(pDvmDex->pDexFile, methodIdx); 1144 1145 resClass = dvmOptResolveClass(referrer, pMethodId->classIdx, NULL); 1146 if (resClass == NULL) { 1147 /* can't find the class that the method is a part of */ 1148 dvmClearOptException(dvmThreadSelf()); 1149 return NULL; 1150 } 1151 if (!dvmIsInterfaceClass(resClass)) { 1152 /* whoops */ 1153 LOGI("Interface method not part of interface class"); 1154 return NULL; 1155 } 1156 1157 const char* methodName = 1158 dexStringById(pDvmDex->pDexFile, pMethodId->nameIdx); 1159 DexProto proto; 1160 dexProtoSetFromMethodId(&proto, pDvmDex->pDexFile, pMethodId); 1161 1162 LOGVV("+++ looking for '%s' '%s' in resClass='%s'", 1163 methodName, methodSig, resClass->descriptor); 1164 resMethod = dvmFindInterfaceMethodHier(resClass, methodName, &proto); 1165 if (resMethod == NULL) { 1166 return NULL; 1167 } 1168 1169 /* we're expecting this to be abstract */ 1170 if (!dvmIsAbstractMethod(resMethod)) { 1171 char* desc = dexProtoCopyMethodDescriptor(&resMethod->prototype); 1172 LOGW("Found non-abstract interface method %s.%s %s", 1173 resMethod->clazz->descriptor, resMethod->name, desc); 1174 free(desc); 1175 return NULL; 1176 } 1177 1178 /* 1179 * Add it to the resolved table so we're faster on the next lookup. 1180 */ 1181 dvmDexSetResolvedMethod(pDvmDex, methodIdx, resMethod); 1182 } 1183 1184 LOGVV("--- found interface method %d (%s.%s)", 1185 methodIdx, resMethod->clazz->descriptor, resMethod->name); 1186 1187 /* interface methods are always public; no need to check access */ 1188 1189 return resMethod; 1190 } 1191 1192 /* 1193 * Replace invoke-virtual, invoke-direct, or invoke-static with an 1194 * execute-inline operation if appropriate. 1195 * 1196 * Returns "true" if we replace it. 1197 */ 1198 static bool rewriteExecuteInline(Method* method, u2* insns, 1199 MethodType methodType) 1200 { 1201 const InlineSub* inlineSubs = gDvm.inlineSubs; 1202 ClassObject* clazz = method->clazz; 1203 Method* calledMethod; 1204 u2 methodIdx = insns[1]; 1205 1206 //return false; 1207 1208 calledMethod = dvmOptResolveMethod(clazz, methodIdx, methodType, NULL); 1209 if (calledMethod == NULL) { 1210 LOGV("+++ DexOpt inline: can't find %d", methodIdx); 1211 return false; 1212 } 1213 1214 while (inlineSubs->method != NULL) { 1215 /* 1216 if (extra) { 1217 LOGI("comparing %p vs %p %s.%s %s", 1218 inlineSubs->method, calledMethod, 1219 inlineSubs->method->clazz->descriptor, 1220 inlineSubs->method->name, 1221 inlineSubs->method->signature); 1222 } 1223 */ 1224 if (inlineSubs->method == calledMethod) { 1225 assert((insns[0] & 0xff) == OP_INVOKE_DIRECT || 1226 (insns[0] & 0xff) == OP_INVOKE_STATIC || 1227 (insns[0] & 0xff) == OP_INVOKE_VIRTUAL); 1228 updateOpcode(method, insns, OP_EXECUTE_INLINE); 1229 dvmUpdateCodeUnit(method, insns+1, (u2) inlineSubs->inlineIdx); 1230 1231 //LOGI("DexOpt: execute-inline %s.%s --> %s.%s", 1232 // method->clazz->descriptor, method->name, 1233 // calledMethod->clazz->descriptor, calledMethod->name); 1234 return true; 1235 } 1236 1237 inlineSubs++; 1238 } 1239 1240 return false; 1241 } 1242 1243 /* 1244 * Replace invoke-virtual/range, invoke-direct/range, or invoke-static/range 1245 * with an execute-inline operation if appropriate. 1246 * 1247 * Returns "true" if we replace it. 1248 */ 1249 static bool rewriteExecuteInlineRange(Method* method, u2* insns, 1250 MethodType methodType) 1251 { 1252 const InlineSub* inlineSubs = gDvm.inlineSubs; 1253 ClassObject* clazz = method->clazz; 1254 Method* calledMethod; 1255 u2 methodIdx = insns[1]; 1256 1257 calledMethod = dvmOptResolveMethod(clazz, methodIdx, methodType, NULL); 1258 if (calledMethod == NULL) { 1259 LOGV("+++ DexOpt inline/range: can't find %d", methodIdx); 1260 return false; 1261 } 1262 1263 while (inlineSubs->method != NULL) { 1264 if (inlineSubs->method == calledMethod) { 1265 assert((insns[0] & 0xff) == OP_INVOKE_DIRECT_RANGE || 1266 (insns[0] & 0xff) == OP_INVOKE_STATIC_RANGE || 1267 (insns[0] & 0xff) == OP_INVOKE_VIRTUAL_RANGE); 1268 updateOpcode(method, insns, OP_EXECUTE_INLINE_RANGE); 1269 dvmUpdateCodeUnit(method, insns+1, (u2) inlineSubs->inlineIdx); 1270 1271 //LOGI("DexOpt: execute-inline/range %s.%s --> %s.%s", 1272 // method->clazz->descriptor, method->name, 1273 // calledMethod->clazz->descriptor, calledMethod->name); 1274 return true; 1275 } 1276 1277 inlineSubs++; 1278 } 1279 1280 return false; 1281 } 1282 1283 /* 1284 * Returns "true" if the return-void instructions in this method should 1285 * be converted to return-void-barrier. 1286 * 1287 * This is needed to satisfy a Java Memory Model requirement regarding 1288 * the construction of objects with final fields. (This does not apply 1289 * to <clinit> or static fields, since appropriate barriers are guaranteed 1290 * by the class initialization process.) 1291 */ 1292 static bool needsReturnBarrier(Method* method) 1293 { 1294 if (!gDvm.dexOptForSmp) 1295 return false; 1296 if (strcmp(method->name, "<init>") != 0) 1297 return false; 1298 1299 /* 1300 * Check to see if the class is finalizable. The loader sets a flag 1301 * if the class or one of its superclasses overrides finalize(). 1302 */ 1303 const ClassObject* clazz = method->clazz; 1304 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISFINALIZABLE)) 1305 return true; 1306 1307 /* 1308 * Check to see if the class has any final fields. If not, we don't 1309 * need to generate a barrier instruction. 1310 * 1311 * In theory, we only need to do this if the method actually modifies 1312 * a final field. In practice, non-constructor methods are allowed 1313 * to modify final fields, and there are 3rd-party tools that rely on 1314 * this behavior. (The compiler does not allow it, but the VM does.) 1315 * 1316 * If we alter the verifier to restrict final-field updates to 1317 * constructors, we can tighten this up as well. 1318 */ 1319 int idx = clazz->ifieldCount; 1320 while (--idx >= 0) { 1321 if (dvmIsFinalField(&clazz->ifields[idx])) 1322 return true; 1323 } 1324 1325 return false; 1326 } 1327 1328 /* 1329 * Convert a return-void to a return-void-barrier. 1330 */ 1331 static void rewriteReturnVoid(Method* method, u2* insns) 1332 { 1333 assert((insns[0] & 0xff) == OP_RETURN_VOID); 1334 updateOpcode(method, insns, OP_RETURN_VOID_BARRIER); 1335 } 1336