1 /* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "Dalvik.h" 18 #include "libdex/DexOpcodes.h" 19 #include "libdex/DexCatch.h" 20 #include "interp/Jit.h" 21 #include "CompilerInternals.h" 22 #include "Dataflow.h" 23 24 static inline bool contentIsInsn(const u2 *codePtr) { 25 u2 instr = *codePtr; 26 Opcode opcode = (Opcode)(instr & 0xff); 27 28 /* 29 * Since the low 8-bit in metadata may look like OP_NOP, we need to check 30 * both the low and whole sub-word to determine whether it is code or data. 31 */ 32 return (opcode != OP_NOP || instr == 0); 33 } 34 35 /* 36 * Parse an instruction, return the length of the instruction 37 */ 38 static inline int parseInsn(const u2 *codePtr, DecodedInstruction *decInsn, 39 bool printMe) 40 { 41 // Don't parse instruction data 42 if (!contentIsInsn(codePtr)) { 43 return 0; 44 } 45 46 u2 instr = *codePtr; 47 Opcode opcode = dexOpcodeFromCodeUnit(instr); 48 49 dexDecodeInstruction(codePtr, decInsn); 50 if (printMe) { 51 char *decodedString = dvmCompilerGetDalvikDisassembly(decInsn, NULL); 52 ALOGD("%p: %#06x %s", codePtr, opcode, decodedString); 53 } 54 return dexGetWidthFromOpcode(opcode); 55 } 56 57 #define UNKNOWN_TARGET 0xffffffff 58 59 /* 60 * Identify block-ending instructions and collect supplemental information 61 * regarding the following instructions. 62 */ 63 static inline bool findBlockBoundary(const Method *caller, MIR *insn, 64 unsigned int curOffset, 65 unsigned int *target, bool *isInvoke, 66 const Method **callee) 67 { 68 switch (insn->dalvikInsn.opcode) { 69 /* Target is not compile-time constant */ 70 case OP_RETURN_VOID: 71 case OP_RETURN: 72 case OP_RETURN_WIDE: 73 case OP_RETURN_OBJECT: 74 case OP_THROW: 75 *target = UNKNOWN_TARGET; 76 break; 77 case OP_INVOKE_VIRTUAL: 78 case OP_INVOKE_VIRTUAL_RANGE: 79 case OP_INVOKE_INTERFACE: 80 case OP_INVOKE_INTERFACE_RANGE: 81 case OP_INVOKE_VIRTUAL_QUICK: 82 case OP_INVOKE_VIRTUAL_QUICK_RANGE: 83 *isInvoke = true; 84 break; 85 case OP_INVOKE_SUPER: 86 case OP_INVOKE_SUPER_RANGE: { 87 int mIndex = caller->clazz->pDvmDex-> 88 pResMethods[insn->dalvikInsn.vB]->methodIndex; 89 const Method *calleeMethod = 90 caller->clazz->super->vtable[mIndex]; 91 92 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 93 *target = (unsigned int) calleeMethod->insns; 94 } 95 *isInvoke = true; 96 *callee = calleeMethod; 97 break; 98 } 99 case OP_INVOKE_STATIC: 100 case OP_INVOKE_STATIC_RANGE: { 101 const Method *calleeMethod = 102 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB]; 103 104 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 105 *target = (unsigned int) calleeMethod->insns; 106 } 107 *isInvoke = true; 108 *callee = calleeMethod; 109 break; 110 } 111 case OP_INVOKE_SUPER_QUICK: 112 case OP_INVOKE_SUPER_QUICK_RANGE: { 113 const Method *calleeMethod = 114 caller->clazz->super->vtable[insn->dalvikInsn.vB]; 115 116 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 117 *target = (unsigned int) calleeMethod->insns; 118 } 119 *isInvoke = true; 120 *callee = calleeMethod; 121 break; 122 } 123 case OP_INVOKE_DIRECT: 124 case OP_INVOKE_DIRECT_RANGE: { 125 const Method *calleeMethod = 126 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB]; 127 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 128 *target = (unsigned int) calleeMethod->insns; 129 } 130 *isInvoke = true; 131 *callee = calleeMethod; 132 break; 133 } 134 case OP_GOTO: 135 case OP_GOTO_16: 136 case OP_GOTO_32: 137 *target = curOffset + (int) insn->dalvikInsn.vA; 138 break; 139 140 case OP_IF_EQ: 141 case OP_IF_NE: 142 case OP_IF_LT: 143 case OP_IF_GE: 144 case OP_IF_GT: 145 case OP_IF_LE: 146 *target = curOffset + (int) insn->dalvikInsn.vC; 147 break; 148 149 case OP_IF_EQZ: 150 case OP_IF_NEZ: 151 case OP_IF_LTZ: 152 case OP_IF_GEZ: 153 case OP_IF_GTZ: 154 case OP_IF_LEZ: 155 *target = curOffset + (int) insn->dalvikInsn.vB; 156 break; 157 158 default: 159 return false; 160 } 161 return true; 162 } 163 164 static inline bool isGoto(MIR *insn) 165 { 166 switch (insn->dalvikInsn.opcode) { 167 case OP_GOTO: 168 case OP_GOTO_16: 169 case OP_GOTO_32: 170 return true; 171 default: 172 return false; 173 } 174 } 175 176 /* 177 * Identify unconditional branch instructions 178 */ 179 static inline bool isUnconditionalBranch(MIR *insn) 180 { 181 switch (insn->dalvikInsn.opcode) { 182 case OP_RETURN_VOID: 183 case OP_RETURN: 184 case OP_RETURN_WIDE: 185 case OP_RETURN_OBJECT: 186 return true; 187 default: 188 return isGoto(insn); 189 } 190 } 191 192 /* 193 * dvmHashTableLookup() callback 194 */ 195 static int compareMethod(const CompilerMethodStats *m1, 196 const CompilerMethodStats *m2) 197 { 198 return (int) m1->method - (int) m2->method; 199 } 200 201 /* 202 * Analyze the body of the method to collect high-level information regarding 203 * inlining: 204 * - is empty method? 205 * - is getter/setter? 206 * - can throw exception? 207 * 208 * Currently the inliner only handles getters and setters. When its capability 209 * becomes more sophisticated more information will be retrieved here. 210 */ 211 static int analyzeInlineTarget(DecodedInstruction *dalvikInsn, int attributes, 212 int offset) 213 { 214 int flags = dexGetFlagsFromOpcode(dalvikInsn->opcode); 215 int dalvikOpcode = dalvikInsn->opcode; 216 217 if (flags & kInstrInvoke) { 218 attributes &= ~METHOD_IS_LEAF; 219 } 220 221 if (!(flags & kInstrCanReturn)) { 222 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] & 223 DF_IS_GETTER)) { 224 attributes &= ~METHOD_IS_GETTER; 225 } 226 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] & 227 DF_IS_SETTER)) { 228 attributes &= ~METHOD_IS_SETTER; 229 } 230 } 231 232 /* 233 * The expected instruction sequence is setter will never return value and 234 * getter will also do. Clear the bits if the behavior is discovered 235 * otherwise. 236 */ 237 if (flags & kInstrCanReturn) { 238 if (dalvikOpcode == OP_RETURN_VOID) { 239 attributes &= ~METHOD_IS_GETTER; 240 } 241 else { 242 attributes &= ~METHOD_IS_SETTER; 243 } 244 } 245 246 if (flags & kInstrCanThrow) { 247 attributes &= ~METHOD_IS_THROW_FREE; 248 } 249 250 if (offset == 0 && dalvikOpcode == OP_RETURN_VOID) { 251 attributes |= METHOD_IS_EMPTY; 252 } 253 254 /* 255 * Check if this opcode is selected for single stepping. 256 * If so, don't inline the callee as there is no stack frame for the 257 * interpreter to single-step through the instruction. 258 */ 259 if (SINGLE_STEP_OP(dalvikOpcode)) { 260 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER); 261 } 262 263 return attributes; 264 } 265 266 /* 267 * Analyze each method whose traces are ever compiled. Collect a variety of 268 * statistics like the ratio of exercised vs overall code and code bloat 269 * ratios. If isCallee is true, also analyze each instruction in more details 270 * to see if it is suitable for inlining. 271 */ 272 CompilerMethodStats *dvmCompilerAnalyzeMethodBody(const Method *method, 273 bool isCallee) 274 { 275 const DexCode *dexCode = dvmGetMethodCode(method); 276 const u2 *codePtr = dexCode->insns; 277 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize; 278 int insnSize = 0; 279 int hashValue = dvmComputeUtf8Hash(method->name); 280 281 CompilerMethodStats dummyMethodEntry; // For hash table lookup 282 CompilerMethodStats *realMethodEntry; // For hash table storage 283 284 /* For lookup only */ 285 dummyMethodEntry.method = method; 286 realMethodEntry = (CompilerMethodStats *) 287 dvmHashTableLookup(gDvmJit.methodStatsTable, 288 hashValue, 289 &dummyMethodEntry, 290 (HashCompareFunc) compareMethod, 291 false); 292 293 /* This method has never been analyzed before - create an entry */ 294 if (realMethodEntry == NULL) { 295 realMethodEntry = 296 (CompilerMethodStats *) calloc(1, sizeof(CompilerMethodStats)); 297 realMethodEntry->method = method; 298 299 dvmHashTableLookup(gDvmJit.methodStatsTable, hashValue, 300 realMethodEntry, 301 (HashCompareFunc) compareMethod, 302 true); 303 } 304 305 /* This method is invoked as a callee and has been analyzed - just return */ 306 if ((isCallee == true) && (realMethodEntry->attributes & METHOD_IS_CALLEE)) 307 return realMethodEntry; 308 309 /* 310 * Similarly, return if this method has been compiled before as a hot 311 * method already. 312 */ 313 if ((isCallee == false) && 314 (realMethodEntry->attributes & METHOD_IS_HOT)) 315 return realMethodEntry; 316 317 int attributes; 318 319 /* Method hasn't been analyzed for the desired purpose yet */ 320 if (isCallee) { 321 /* Aggressively set the attributes until proven otherwise */ 322 attributes = METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE | 323 METHOD_IS_GETTER | METHOD_IS_SETTER; 324 } else { 325 attributes = METHOD_IS_HOT; 326 } 327 328 /* Count the number of instructions */ 329 while (codePtr < codeEnd) { 330 DecodedInstruction dalvikInsn; 331 int width = parseInsn(codePtr, &dalvikInsn, false); 332 333 /* Terminate when the data section is seen */ 334 if (width == 0) 335 break; 336 337 if (isCallee) { 338 attributes = analyzeInlineTarget(&dalvikInsn, attributes, insnSize); 339 } 340 341 insnSize += width; 342 codePtr += width; 343 } 344 345 /* 346 * Only handle simple getters/setters with one instruction followed by 347 * return 348 */ 349 if ((attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) && 350 (insnSize != 3)) { 351 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER); 352 } 353 354 realMethodEntry->dalvikSize = insnSize * 2; 355 realMethodEntry->attributes |= attributes; 356 357 #if 0 358 /* Uncomment the following to explore various callee patterns */ 359 if (attributes & METHOD_IS_THROW_FREE) { 360 ALOGE("%s%s is inlinable%s", method->clazz->descriptor, method->name, 361 (attributes & METHOD_IS_EMPTY) ? " empty" : ""); 362 } 363 364 if (attributes & METHOD_IS_LEAF) { 365 ALOGE("%s%s is leaf %d%s", method->clazz->descriptor, method->name, 366 insnSize, insnSize < 5 ? " (small)" : ""); 367 } 368 369 if (attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) { 370 ALOGE("%s%s is %s", method->clazz->descriptor, method->name, 371 attributes & METHOD_IS_GETTER ? "getter": "setter"); 372 } 373 if (attributes == 374 (METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE)) { 375 ALOGE("%s%s is inlinable non setter/getter", method->clazz->descriptor, 376 method->name); 377 } 378 #endif 379 380 return realMethodEntry; 381 } 382 383 /* 384 * Crawl the stack of the thread that requesed compilation to see if any of the 385 * ancestors are on the blacklist. 386 */ 387 static bool filterMethodByCallGraph(Thread *thread, const char *curMethodName) 388 { 389 /* Crawl the Dalvik stack frames and compare the method name*/ 390 StackSaveArea *ssaPtr = ((StackSaveArea *) thread->interpSave.curFrame) - 1; 391 while (ssaPtr != ((StackSaveArea *) NULL) - 1) { 392 const Method *method = ssaPtr->method; 393 if (method) { 394 int hashValue = dvmComputeUtf8Hash(method->name); 395 bool found = 396 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 397 (char *) method->name, 398 (HashCompareFunc) strcmp, false) != 399 NULL; 400 if (found) { 401 ALOGD("Method %s (--> %s) found on the JIT %s list", 402 method->name, curMethodName, 403 gDvmJit.includeSelectedMethod ? "white" : "black"); 404 return true; 405 } 406 407 } 408 ssaPtr = ((StackSaveArea *) ssaPtr->prevFrame) - 1; 409 }; 410 return false; 411 } 412 413 /* 414 * Since we are including instructions from possibly a cold method into the 415 * current trace, we need to make sure that all the associated information 416 * with the callee is properly initialized. If not, we punt on this inline 417 * target. 418 * 419 * TODO: volatile instructions will be handled later. 420 */ 421 bool dvmCompilerCanIncludeThisInstruction(const Method *method, 422 const DecodedInstruction *insn) 423 { 424 switch (insn->opcode) { 425 case OP_NEW_INSTANCE: 426 case OP_CHECK_CAST: { 427 ClassObject *classPtr = (ClassObject *)(void*) 428 (method->clazz->pDvmDex->pResClasses[insn->vB]); 429 430 /* Class hasn't been initialized yet */ 431 if (classPtr == NULL) { 432 return false; 433 } 434 return true; 435 } 436 case OP_SGET: 437 case OP_SGET_WIDE: 438 case OP_SGET_OBJECT: 439 case OP_SGET_BOOLEAN: 440 case OP_SGET_BYTE: 441 case OP_SGET_CHAR: 442 case OP_SGET_SHORT: 443 case OP_SPUT: 444 case OP_SPUT_WIDE: 445 case OP_SPUT_OBJECT: 446 case OP_SPUT_BOOLEAN: 447 case OP_SPUT_BYTE: 448 case OP_SPUT_CHAR: 449 case OP_SPUT_SHORT: { 450 void *fieldPtr = (void*) 451 (method->clazz->pDvmDex->pResFields[insn->vB]); 452 453 if (fieldPtr == NULL) { 454 return false; 455 } 456 return true; 457 } 458 case OP_INVOKE_SUPER: 459 case OP_INVOKE_SUPER_RANGE: { 460 int mIndex = method->clazz->pDvmDex-> 461 pResMethods[insn->vB]->methodIndex; 462 const Method *calleeMethod = method->clazz->super->vtable[mIndex]; 463 if (calleeMethod == NULL) { 464 return false; 465 } 466 return true; 467 } 468 case OP_INVOKE_SUPER_QUICK: 469 case OP_INVOKE_SUPER_QUICK_RANGE: { 470 const Method *calleeMethod = method->clazz->super->vtable[insn->vB]; 471 if (calleeMethod == NULL) { 472 return false; 473 } 474 return true; 475 } 476 case OP_INVOKE_STATIC: 477 case OP_INVOKE_STATIC_RANGE: 478 case OP_INVOKE_DIRECT: 479 case OP_INVOKE_DIRECT_RANGE: { 480 const Method *calleeMethod = 481 method->clazz->pDvmDex->pResMethods[insn->vB]; 482 if (calleeMethod == NULL) { 483 return false; 484 } 485 return true; 486 } 487 case OP_CONST_CLASS: { 488 void *classPtr = (void*) 489 (method->clazz->pDvmDex->pResClasses[insn->vB]); 490 491 if (classPtr == NULL) { 492 return false; 493 } 494 return true; 495 } 496 case OP_CONST_STRING_JUMBO: 497 case OP_CONST_STRING: { 498 void *strPtr = (void*) 499 (method->clazz->pDvmDex->pResStrings[insn->vB]); 500 501 if (strPtr == NULL) { 502 return false; 503 } 504 return true; 505 } 506 default: 507 return true; 508 } 509 } 510 511 /* Split an existing block from the specified code offset into two */ 512 static BasicBlock *splitBlock(CompilationUnit *cUnit, 513 unsigned int codeOffset, 514 BasicBlock *origBlock, 515 BasicBlock **immedPredBlockP) 516 { 517 MIR *insn = origBlock->firstMIRInsn; 518 while (insn) { 519 if (insn->offset == codeOffset) break; 520 insn = insn->next; 521 } 522 if (insn == NULL) { 523 ALOGE("Break split failed"); 524 dvmAbort(); 525 } 526 BasicBlock *bottomBlock = dvmCompilerNewBB(kDalvikByteCode, 527 cUnit->numBlocks++); 528 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bottomBlock); 529 530 bottomBlock->startOffset = codeOffset; 531 bottomBlock->firstMIRInsn = insn; 532 bottomBlock->lastMIRInsn = origBlock->lastMIRInsn; 533 534 /* Handle the taken path */ 535 bottomBlock->taken = origBlock->taken; 536 if (bottomBlock->taken) { 537 origBlock->taken = NULL; 538 dvmCompilerClearBit(bottomBlock->taken->predecessors, origBlock->id); 539 dvmCompilerSetBit(bottomBlock->taken->predecessors, bottomBlock->id); 540 } 541 542 /* Handle the fallthrough path */ 543 bottomBlock->needFallThroughBranch = origBlock->needFallThroughBranch; 544 bottomBlock->fallThrough = origBlock->fallThrough; 545 origBlock->fallThrough = bottomBlock; 546 origBlock->needFallThroughBranch = true; 547 dvmCompilerSetBit(bottomBlock->predecessors, origBlock->id); 548 if (bottomBlock->fallThrough) { 549 dvmCompilerClearBit(bottomBlock->fallThrough->predecessors, 550 origBlock->id); 551 dvmCompilerSetBit(bottomBlock->fallThrough->predecessors, 552 bottomBlock->id); 553 } 554 555 /* Handle the successor list */ 556 if (origBlock->successorBlockList.blockListType != kNotUsed) { 557 bottomBlock->successorBlockList = origBlock->successorBlockList; 558 origBlock->successorBlockList.blockListType = kNotUsed; 559 GrowableListIterator iterator; 560 561 dvmGrowableListIteratorInit(&bottomBlock->successorBlockList.blocks, 562 &iterator); 563 while (true) { 564 SuccessorBlockInfo *successorBlockInfo = 565 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator); 566 if (successorBlockInfo == NULL) break; 567 BasicBlock *bb = successorBlockInfo->block; 568 dvmCompilerClearBit(bb->predecessors, origBlock->id); 569 dvmCompilerSetBit(bb->predecessors, bottomBlock->id); 570 } 571 } 572 573 origBlock->lastMIRInsn = insn->prev; 574 575 insn->prev->next = NULL; 576 insn->prev = NULL; 577 578 /* 579 * Update the immediate predecessor block pointer so that outgoing edges 580 * can be applied to the proper block. 581 */ 582 if (immedPredBlockP) { 583 assert(*immedPredBlockP == origBlock); 584 *immedPredBlockP = bottomBlock; 585 } 586 return bottomBlock; 587 } 588 589 /* 590 * Given a code offset, find out the block that starts with it. If the offset 591 * is in the middle of an existing block, split it into two. If immedPredBlockP 592 * is non-null and is the block being split, update *immedPredBlockP to point 593 * to the bottom block so that outgoing edges can be setup properly (by the 594 * caller). 595 */ 596 static BasicBlock *findBlock(CompilationUnit *cUnit, 597 unsigned int codeOffset, 598 bool split, bool create, 599 BasicBlock **immedPredBlockP) 600 { 601 GrowableList *blockList = &cUnit->blockList; 602 BasicBlock *bb; 603 unsigned int i; 604 605 for (i = 0; i < blockList->numUsed; i++) { 606 bb = (BasicBlock *) blockList->elemList[i]; 607 if (bb->blockType != kDalvikByteCode) continue; 608 if (bb->startOffset == codeOffset) return bb; 609 /* Check if a branch jumps into the middle of an existing block */ 610 if ((split == true) && (codeOffset > bb->startOffset) && 611 (bb->lastMIRInsn != NULL) && 612 (codeOffset <= bb->lastMIRInsn->offset)) { 613 BasicBlock *newBB = splitBlock(cUnit, codeOffset, bb, 614 bb == *immedPredBlockP ? 615 immedPredBlockP : NULL); 616 return newBB; 617 } 618 } 619 if (create) { 620 bb = dvmCompilerNewBB(kDalvikByteCode, cUnit->numBlocks++); 621 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 622 bb->startOffset = codeOffset; 623 return bb; 624 } 625 return NULL; 626 } 627 628 /* Dump the CFG into a DOT graph */ 629 void dvmDumpCFG(CompilationUnit *cUnit, const char *dirPrefix) 630 { 631 const Method *method = cUnit->method; 632 FILE *file; 633 char *signature = dexProtoCopyMethodDescriptor(&method->prototype); 634 char startOffset[80]; 635 sprintf(startOffset, "_%x", cUnit->entryBlock->fallThrough->startOffset); 636 char *fileName = (char *) dvmCompilerNew( 637 strlen(dirPrefix) + 638 strlen(method->clazz->descriptor) + 639 strlen(method->name) + 640 strlen(signature) + 641 strlen(startOffset) + 642 strlen(".dot") + 1, true); 643 sprintf(fileName, "%s%s%s%s%s.dot", dirPrefix, 644 method->clazz->descriptor, method->name, signature, startOffset); 645 free(signature); 646 647 /* 648 * Convert the special characters into a filesystem- and shell-friendly 649 * format. 650 */ 651 int i; 652 for (i = strlen(dirPrefix); fileName[i]; i++) { 653 if (fileName[i] == '/') { 654 fileName[i] = '_'; 655 } else if (fileName[i] == ';') { 656 fileName[i] = '#'; 657 } else if (fileName[i] == '$') { 658 fileName[i] = '+'; 659 } else if (fileName[i] == '(' || fileName[i] == ')') { 660 fileName[i] = '@'; 661 } else if (fileName[i] == '<' || fileName[i] == '>') { 662 fileName[i] = '='; 663 } 664 } 665 file = fopen(fileName, "w"); 666 if (file == NULL) { 667 return; 668 } 669 fprintf(file, "digraph G {\n"); 670 671 fprintf(file, " rankdir=TB\n"); 672 673 int numReachableBlocks = cUnit->numReachableBlocks; 674 int idx; 675 const GrowableList *blockList = &cUnit->blockList; 676 677 for (idx = 0; idx < numReachableBlocks; idx++) { 678 int blockIdx = cUnit->dfsOrder.elemList[idx]; 679 BasicBlock *bb = (BasicBlock *) dvmGrowableListGetElement(blockList, 680 blockIdx); 681 if (bb == NULL) break; 682 if (bb->blockType == kEntryBlock) { 683 fprintf(file, " entry [shape=Mdiamond];\n"); 684 } else if (bb->blockType == kExitBlock) { 685 fprintf(file, " exit [shape=Mdiamond];\n"); 686 } else if (bb->blockType == kDalvikByteCode) { 687 fprintf(file, " block%04x [shape=record,label = \"{ \\\n", 688 bb->startOffset); 689 const MIR *mir; 690 fprintf(file, " {block id %d\\l}%s\\\n", bb->id, 691 bb->firstMIRInsn ? " | " : " "); 692 for (mir = bb->firstMIRInsn; mir; mir = mir->next) { 693 fprintf(file, " {%04x %s\\l}%s\\\n", mir->offset, 694 mir->ssaRep ? 695 dvmCompilerFullDisassembler(cUnit, mir) : 696 dexGetOpcodeName(mir->dalvikInsn.opcode), 697 mir->next ? " | " : " "); 698 } 699 fprintf(file, " }\"];\n\n"); 700 } else if (bb->blockType == kExceptionHandling) { 701 char blockName[BLOCK_NAME_LEN]; 702 703 dvmGetBlockName(bb, blockName); 704 fprintf(file, " %s [shape=invhouse];\n", blockName); 705 } 706 707 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN]; 708 709 if (bb->taken) { 710 dvmGetBlockName(bb, blockName1); 711 dvmGetBlockName(bb->taken, blockName2); 712 fprintf(file, " %s:s -> %s:n [style=dotted]\n", 713 blockName1, blockName2); 714 } 715 if (bb->fallThrough) { 716 dvmGetBlockName(bb, blockName1); 717 dvmGetBlockName(bb->fallThrough, blockName2); 718 fprintf(file, " %s:s -> %s:n\n", blockName1, blockName2); 719 } 720 721 if (bb->successorBlockList.blockListType != kNotUsed) { 722 fprintf(file, " succ%04x [shape=%s,label = \"{ \\\n", 723 bb->startOffset, 724 (bb->successorBlockList.blockListType == kCatch) ? 725 "Mrecord" : "record"); 726 GrowableListIterator iterator; 727 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks, 728 &iterator); 729 SuccessorBlockInfo *successorBlockInfo = 730 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator); 731 732 int succId = 0; 733 while (true) { 734 if (successorBlockInfo == NULL) break; 735 736 BasicBlock *destBlock = successorBlockInfo->block; 737 SuccessorBlockInfo *nextSuccessorBlockInfo = 738 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator); 739 740 fprintf(file, " {<f%d> %04x: %04x\\l}%s\\\n", 741 succId++, 742 successorBlockInfo->key, 743 destBlock->startOffset, 744 (nextSuccessorBlockInfo != NULL) ? " | " : " "); 745 746 successorBlockInfo = nextSuccessorBlockInfo; 747 } 748 fprintf(file, " }\"];\n\n"); 749 750 dvmGetBlockName(bb, blockName1); 751 fprintf(file, " %s:s -> succ%04x:n [style=dashed]\n", 752 blockName1, bb->startOffset); 753 754 if (bb->successorBlockList.blockListType == kPackedSwitch || 755 bb->successorBlockList.blockListType == kSparseSwitch) { 756 757 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks, 758 &iterator); 759 760 succId = 0; 761 while (true) { 762 SuccessorBlockInfo *successorBlockInfo = 763 (SuccessorBlockInfo *) 764 dvmGrowableListIteratorNext(&iterator); 765 if (successorBlockInfo == NULL) break; 766 767 BasicBlock *destBlock = successorBlockInfo->block; 768 769 dvmGetBlockName(destBlock, blockName2); 770 fprintf(file, " succ%04x:f%d:e -> %s:n\n", 771 bb->startOffset, succId++, 772 blockName2); 773 } 774 } 775 } 776 fprintf(file, "\n"); 777 778 /* 779 * If we need to debug the dominator tree, uncomment the following code 780 */ 781 #if 1 782 dvmGetBlockName(bb, blockName1); 783 fprintf(file, " cfg%s [label=\"%s\", shape=none];\n", 784 blockName1, blockName1); 785 if (bb->iDom) { 786 dvmGetBlockName(bb->iDom, blockName2); 787 fprintf(file, " cfg%s:s -> cfg%s:n\n\n", 788 blockName2, blockName1); 789 } 790 #endif 791 } 792 fprintf(file, "}\n"); 793 fclose(file); 794 } 795 796 /* Verify if all the successor is connected with all the claimed predecessors */ 797 static bool verifyPredInfo(CompilationUnit *cUnit, BasicBlock *bb) 798 { 799 BitVectorIterator bvIterator; 800 801 dvmBitVectorIteratorInit(bb->predecessors, &bvIterator); 802 while (true) { 803 int blockIdx = dvmBitVectorIteratorNext(&bvIterator); 804 if (blockIdx == -1) break; 805 BasicBlock *predBB = (BasicBlock *) 806 dvmGrowableListGetElement(&cUnit->blockList, blockIdx); 807 bool found = false; 808 if (predBB->taken == bb) { 809 found = true; 810 } else if (predBB->fallThrough == bb) { 811 found = true; 812 } else if (predBB->successorBlockList.blockListType != kNotUsed) { 813 GrowableListIterator iterator; 814 dvmGrowableListIteratorInit(&predBB->successorBlockList.blocks, 815 &iterator); 816 while (true) { 817 SuccessorBlockInfo *successorBlockInfo = 818 (SuccessorBlockInfo *) 819 dvmGrowableListIteratorNext(&iterator); 820 if (successorBlockInfo == NULL) break; 821 BasicBlock *succBB = successorBlockInfo->block; 822 if (succBB == bb) { 823 found = true; 824 break; 825 } 826 } 827 } 828 if (found == false) { 829 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN]; 830 dvmGetBlockName(bb, blockName1); 831 dvmGetBlockName(predBB, blockName2); 832 dvmDumpCFG(cUnit, "/sdcard/cfg/"); 833 ALOGE("Successor %s not found from %s", 834 blockName1, blockName2); 835 dvmAbort(); 836 } 837 } 838 return true; 839 } 840 841 /* Identify code range in try blocks and set up the empty catch blocks */ 842 static void processTryCatchBlocks(CompilationUnit *cUnit) 843 { 844 const Method *meth = cUnit->method; 845 const DexCode *pCode = dvmGetMethodCode(meth); 846 int triesSize = pCode->triesSize; 847 int i; 848 int offset; 849 850 if (triesSize == 0) { 851 return; 852 } 853 854 const DexTry *pTries = dexGetTries(pCode); 855 BitVector *tryBlockAddr = cUnit->tryBlockAddr; 856 857 /* Mark all the insn offsets in Try blocks */ 858 for (i = 0; i < triesSize; i++) { 859 const DexTry* pTry = &pTries[i]; 860 /* all in 16-bit units */ 861 int startOffset = pTry->startAddr; 862 int endOffset = startOffset + pTry->insnCount; 863 864 for (offset = startOffset; offset < endOffset; offset++) { 865 dvmCompilerSetBit(tryBlockAddr, offset); 866 } 867 } 868 869 /* Iterate over each of the handlers to enqueue the empty Catch blocks */ 870 offset = dexGetFirstHandlerOffset(pCode); 871 int handlersSize = dexGetHandlersSize(pCode); 872 873 for (i = 0; i < handlersSize; i++) { 874 DexCatchIterator iterator; 875 dexCatchIteratorInit(&iterator, pCode, offset); 876 877 for (;;) { 878 DexCatchHandler* handler = dexCatchIteratorNext(&iterator); 879 880 if (handler == NULL) { 881 break; 882 } 883 884 /* 885 * Create dummy catch blocks first. Since these are created before 886 * other blocks are processed, "split" is specified as false. 887 */ 888 findBlock(cUnit, handler->address, 889 /* split */ 890 false, 891 /* create */ 892 true, 893 /* immedPredBlockP */ 894 NULL); 895 } 896 897 offset = dexCatchIteratorGetEndOffset(&iterator, pCode); 898 } 899 } 900 901 /* Process instructions with the kInstrCanBranch flag */ 902 static void processCanBranch(CompilationUnit *cUnit, BasicBlock *curBlock, 903 MIR *insn, int curOffset, int width, int flags, 904 const u2* codePtr, const u2* codeEnd) 905 { 906 int target = curOffset; 907 switch (insn->dalvikInsn.opcode) { 908 case OP_GOTO: 909 case OP_GOTO_16: 910 case OP_GOTO_32: 911 target += (int) insn->dalvikInsn.vA; 912 break; 913 case OP_IF_EQ: 914 case OP_IF_NE: 915 case OP_IF_LT: 916 case OP_IF_GE: 917 case OP_IF_GT: 918 case OP_IF_LE: 919 target += (int) insn->dalvikInsn.vC; 920 break; 921 case OP_IF_EQZ: 922 case OP_IF_NEZ: 923 case OP_IF_LTZ: 924 case OP_IF_GEZ: 925 case OP_IF_GTZ: 926 case OP_IF_LEZ: 927 target += (int) insn->dalvikInsn.vB; 928 break; 929 default: 930 ALOGE("Unexpected opcode(%d) with kInstrCanBranch set", 931 insn->dalvikInsn.opcode); 932 dvmAbort(); 933 } 934 BasicBlock *takenBlock = findBlock(cUnit, target, 935 /* split */ 936 true, 937 /* create */ 938 true, 939 /* immedPredBlockP */ 940 &curBlock); 941 curBlock->taken = takenBlock; 942 dvmCompilerSetBit(takenBlock->predecessors, curBlock->id); 943 944 /* Always terminate the current block for conditional branches */ 945 if (flags & kInstrCanContinue) { 946 BasicBlock *fallthroughBlock = findBlock(cUnit, 947 curOffset + width, 948 /* 949 * If the method is processed 950 * in sequential order from the 951 * beginning, we don't need to 952 * specify split for continue 953 * blocks. However, this 954 * routine can be called by 955 * compileLoop, which starts 956 * parsing the method from an 957 * arbitrary address in the 958 * method body. 959 */ 960 true, 961 /* create */ 962 true, 963 /* immedPredBlockP */ 964 &curBlock); 965 curBlock->fallThrough = fallthroughBlock; 966 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id); 967 } else if (codePtr < codeEnd) { 968 /* Create a fallthrough block for real instructions (incl. OP_NOP) */ 969 if (contentIsInsn(codePtr)) { 970 findBlock(cUnit, curOffset + width, 971 /* split */ 972 false, 973 /* create */ 974 true, 975 /* immedPredBlockP */ 976 NULL); 977 } 978 } 979 } 980 981 /* Process instructions with the kInstrCanSwitch flag */ 982 static void processCanSwitch(CompilationUnit *cUnit, BasicBlock *curBlock, 983 MIR *insn, int curOffset, int width, int flags) 984 { 985 u2 *switchData= (u2 *) (cUnit->method->insns + curOffset + 986 insn->dalvikInsn.vB); 987 int size; 988 int *keyTable; 989 int *targetTable; 990 int i; 991 int firstKey; 992 993 /* 994 * Packed switch data format: 995 * ushort ident = 0x0100 magic value 996 * ushort size number of entries in the table 997 * int first_key first (and lowest) switch case value 998 * int targets[size] branch targets, relative to switch opcode 999 * 1000 * Total size is (4+size*2) 16-bit code units. 1001 */ 1002 if (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) { 1003 assert(switchData[0] == kPackedSwitchSignature); 1004 size = switchData[1]; 1005 firstKey = switchData[2] | (switchData[3] << 16); 1006 targetTable = (int *) &switchData[4]; 1007 keyTable = NULL; // Make the compiler happy 1008 /* 1009 * Sparse switch data format: 1010 * ushort ident = 0x0200 magic value 1011 * ushort size number of entries in the table; > 0 1012 * int keys[size] keys, sorted low-to-high; 32-bit aligned 1013 * int targets[size] branch targets, relative to switch opcode 1014 * 1015 * Total size is (2+size*4) 16-bit code units. 1016 */ 1017 } else { 1018 assert(switchData[0] == kSparseSwitchSignature); 1019 size = switchData[1]; 1020 keyTable = (int *) &switchData[2]; 1021 targetTable = (int *) &switchData[2 + size*2]; 1022 firstKey = 0; // To make the compiler happy 1023 } 1024 1025 if (curBlock->successorBlockList.blockListType != kNotUsed) { 1026 ALOGE("Successor block list already in use: %d", 1027 curBlock->successorBlockList.blockListType); 1028 dvmAbort(); 1029 } 1030 curBlock->successorBlockList.blockListType = 1031 (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) ? 1032 kPackedSwitch : kSparseSwitch; 1033 dvmInitGrowableList(&curBlock->successorBlockList.blocks, size); 1034 1035 for (i = 0; i < size; i++) { 1036 BasicBlock *caseBlock = findBlock(cUnit, curOffset + targetTable[i], 1037 /* split */ 1038 true, 1039 /* create */ 1040 true, 1041 /* immedPredBlockP */ 1042 &curBlock); 1043 SuccessorBlockInfo *successorBlockInfo = 1044 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo), 1045 false); 1046 successorBlockInfo->block = caseBlock; 1047 successorBlockInfo->key = (insn->dalvikInsn.opcode == OP_PACKED_SWITCH)? 1048 firstKey + i : keyTable[i]; 1049 dvmInsertGrowableList(&curBlock->successorBlockList.blocks, 1050 (intptr_t) successorBlockInfo); 1051 dvmCompilerSetBit(caseBlock->predecessors, curBlock->id); 1052 } 1053 1054 /* Fall-through case */ 1055 BasicBlock *fallthroughBlock = findBlock(cUnit, 1056 curOffset + width, 1057 /* split */ 1058 false, 1059 /* create */ 1060 true, 1061 /* immedPredBlockP */ 1062 NULL); 1063 curBlock->fallThrough = fallthroughBlock; 1064 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id); 1065 } 1066 1067 /* Process instructions with the kInstrCanThrow flag */ 1068 static void processCanThrow(CompilationUnit *cUnit, BasicBlock *curBlock, 1069 MIR *insn, int curOffset, int width, int flags, 1070 BitVector *tryBlockAddr, const u2 *codePtr, 1071 const u2* codeEnd) 1072 { 1073 const Method *method = cUnit->method; 1074 const DexCode *dexCode = dvmGetMethodCode(method); 1075 1076 /* In try block */ 1077 if (dvmIsBitSet(tryBlockAddr, curOffset)) { 1078 DexCatchIterator iterator; 1079 1080 if (!dexFindCatchHandler(&iterator, dexCode, curOffset)) { 1081 ALOGE("Catch block not found in dexfile for insn %x in %s", 1082 curOffset, method->name); 1083 dvmAbort(); 1084 1085 } 1086 if (curBlock->successorBlockList.blockListType != kNotUsed) { 1087 ALOGE("Successor block list already in use: %d", 1088 curBlock->successorBlockList.blockListType); 1089 dvmAbort(); 1090 } 1091 curBlock->successorBlockList.blockListType = kCatch; 1092 dvmInitGrowableList(&curBlock->successorBlockList.blocks, 2); 1093 1094 for (;;) { 1095 DexCatchHandler* handler = dexCatchIteratorNext(&iterator); 1096 1097 if (handler == NULL) { 1098 break; 1099 } 1100 1101 BasicBlock *catchBlock = findBlock(cUnit, handler->address, 1102 /* split */ 1103 false, 1104 /* create */ 1105 false, 1106 /* immedPredBlockP */ 1107 NULL); 1108 1109 SuccessorBlockInfo *successorBlockInfo = 1110 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo), 1111 false); 1112 successorBlockInfo->block = catchBlock; 1113 successorBlockInfo->key = handler->typeIdx; 1114 dvmInsertGrowableList(&curBlock->successorBlockList.blocks, 1115 (intptr_t) successorBlockInfo); 1116 dvmCompilerSetBit(catchBlock->predecessors, curBlock->id); 1117 } 1118 } else { 1119 BasicBlock *ehBlock = dvmCompilerNewBB(kExceptionHandling, 1120 cUnit->numBlocks++); 1121 curBlock->taken = ehBlock; 1122 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) ehBlock); 1123 ehBlock->startOffset = curOffset; 1124 dvmCompilerSetBit(ehBlock->predecessors, curBlock->id); 1125 } 1126 1127 /* 1128 * Force the current block to terminate. 1129 * 1130 * Data may be present before codeEnd, so we need to parse it to know 1131 * whether it is code or data. 1132 */ 1133 if (codePtr < codeEnd) { 1134 /* Create a fallthrough block for real instructions (incl. OP_NOP) */ 1135 if (contentIsInsn(codePtr)) { 1136 BasicBlock *fallthroughBlock = findBlock(cUnit, 1137 curOffset + width, 1138 /* split */ 1139 false, 1140 /* create */ 1141 true, 1142 /* immedPredBlockP */ 1143 NULL); 1144 /* 1145 * OP_THROW and OP_THROW_VERIFICATION_ERROR are unconditional 1146 * branches. 1147 */ 1148 if (insn->dalvikInsn.opcode != OP_THROW_VERIFICATION_ERROR && 1149 insn->dalvikInsn.opcode != OP_THROW) { 1150 curBlock->fallThrough = fallthroughBlock; 1151 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id); 1152 } 1153 } 1154 } 1155 } 1156 1157 /* 1158 * Similar to dvmCompileTrace, but the entity processed here is the whole 1159 * method. 1160 * 1161 * TODO: implementation will be revisited when the trace builder can provide 1162 * whole-method traces. 1163 */ 1164 bool dvmCompileMethod(const Method *method, JitTranslationInfo *info) 1165 { 1166 CompilationUnit cUnit; 1167 const DexCode *dexCode = dvmGetMethodCode(method); 1168 const u2 *codePtr = dexCode->insns; 1169 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize; 1170 int numBlocks = 0; 1171 unsigned int curOffset = 0; 1172 1173 /* Method already compiled */ 1174 if (dvmJitGetMethodAddr(codePtr)) { 1175 info->codeAddress = NULL; 1176 return false; 1177 } 1178 1179 memset(&cUnit, 0, sizeof(cUnit)); 1180 cUnit.method = method; 1181 1182 cUnit.jitMode = kJitMethod; 1183 1184 /* Initialize the block list */ 1185 dvmInitGrowableList(&cUnit.blockList, 4); 1186 1187 /* 1188 * FIXME - PC reconstruction list won't be needed after the codegen routines 1189 * are enhanced to true method mode. 1190 */ 1191 /* Initialize the PC reconstruction list */ 1192 dvmInitGrowableList(&cUnit.pcReconstructionList, 8); 1193 1194 /* Allocate the bit-vector to track the beginning of basic blocks */ 1195 BitVector *tryBlockAddr = dvmCompilerAllocBitVector(dexCode->insnsSize, 1196 true /* expandable */); 1197 cUnit.tryBlockAddr = tryBlockAddr; 1198 1199 /* Create the default entry and exit blocks and enter them to the list */ 1200 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++); 1201 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++); 1202 1203 cUnit.entryBlock = entryBlock; 1204 cUnit.exitBlock = exitBlock; 1205 1206 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) entryBlock); 1207 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) exitBlock); 1208 1209 /* Current block to record parsed instructions */ 1210 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1211 curBlock->startOffset = 0; 1212 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) curBlock); 1213 entryBlock->fallThrough = curBlock; 1214 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id); 1215 1216 /* 1217 * Store back the number of blocks since new blocks may be created of 1218 * accessing cUnit. 1219 */ 1220 cUnit.numBlocks = numBlocks; 1221 1222 /* Identify code range in try blocks and set up the empty catch blocks */ 1223 processTryCatchBlocks(&cUnit); 1224 1225 /* Parse all instructions and put them into containing basic blocks */ 1226 while (codePtr < codeEnd) { 1227 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true); 1228 insn->offset = curOffset; 1229 int width = parseInsn(codePtr, &insn->dalvikInsn, false); 1230 insn->width = width; 1231 1232 /* Terminate when the data section is seen */ 1233 if (width == 0) 1234 break; 1235 1236 dvmCompilerAppendMIR(curBlock, insn); 1237 1238 codePtr += width; 1239 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode); 1240 1241 if (flags & kInstrCanBranch) { 1242 processCanBranch(&cUnit, curBlock, insn, curOffset, width, flags, 1243 codePtr, codeEnd); 1244 } else if (flags & kInstrCanReturn) { 1245 curBlock->fallThrough = exitBlock; 1246 dvmCompilerSetBit(exitBlock->predecessors, curBlock->id); 1247 /* 1248 * Terminate the current block if there are instructions 1249 * afterwards. 1250 */ 1251 if (codePtr < codeEnd) { 1252 /* 1253 * Create a fallthrough block for real instructions 1254 * (incl. OP_NOP). 1255 */ 1256 if (contentIsInsn(codePtr)) { 1257 findBlock(&cUnit, curOffset + width, 1258 /* split */ 1259 false, 1260 /* create */ 1261 true, 1262 /* immedPredBlockP */ 1263 NULL); 1264 } 1265 } 1266 } else if (flags & kInstrCanThrow) { 1267 processCanThrow(&cUnit, curBlock, insn, curOffset, width, flags, 1268 tryBlockAddr, codePtr, codeEnd); 1269 } else if (flags & kInstrCanSwitch) { 1270 processCanSwitch(&cUnit, curBlock, insn, curOffset, width, flags); 1271 } 1272 curOffset += width; 1273 BasicBlock *nextBlock = findBlock(&cUnit, curOffset, 1274 /* split */ 1275 false, 1276 /* create */ 1277 false, 1278 /* immedPredBlockP */ 1279 NULL); 1280 if (nextBlock) { 1281 /* 1282 * The next instruction could be the target of a previously parsed 1283 * forward branch so a block is already created. If the current 1284 * instruction is not an unconditional branch, connect them through 1285 * the fall-through link. 1286 */ 1287 assert(curBlock->fallThrough == NULL || 1288 curBlock->fallThrough == nextBlock || 1289 curBlock->fallThrough == exitBlock); 1290 1291 if ((curBlock->fallThrough == NULL) && 1292 (flags & kInstrCanContinue)) { 1293 curBlock->fallThrough = nextBlock; 1294 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id); 1295 } 1296 curBlock = nextBlock; 1297 } 1298 } 1299 1300 if (cUnit.printMe) { 1301 dvmCompilerDumpCompilationUnit(&cUnit); 1302 } 1303 1304 /* Adjust this value accordingly once inlining is performed */ 1305 cUnit.numDalvikRegisters = cUnit.method->registersSize; 1306 1307 /* Verify if all blocks are connected as claimed */ 1308 /* FIXME - to be disabled in the future */ 1309 dvmCompilerDataFlowAnalysisDispatcher(&cUnit, verifyPredInfo, 1310 kAllNodes, 1311 false /* isIterative */); 1312 1313 1314 /* Perform SSA transformation for the whole method */ 1315 dvmCompilerMethodSSATransformation(&cUnit); 1316 1317 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming 1318 1319 /* Allocate Registers using simple local allocation scheme */ 1320 dvmCompilerLocalRegAlloc(&cUnit); 1321 1322 /* Convert MIR to LIR, etc. */ 1323 dvmCompilerMethodMIR2LIR(&cUnit); 1324 1325 // Debugging only 1326 //dvmDumpCFG(&cUnit, "/sdcard/cfg/"); 1327 1328 /* Method is not empty */ 1329 if (cUnit.firstLIRInsn) { 1330 /* Convert LIR into machine code. Loop for recoverable retries */ 1331 do { 1332 dvmCompilerAssembleLIR(&cUnit, info); 1333 cUnit.assemblerRetries++; 1334 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess) 1335 ALOGD("Assembler abort #%d on %d",cUnit.assemblerRetries, 1336 cUnit.assemblerStatus); 1337 } while (cUnit.assemblerStatus == kRetryAll); 1338 1339 if (cUnit.printMe) { 1340 dvmCompilerCodegenDump(&cUnit); 1341 } 1342 1343 if (info->codeAddress) { 1344 dvmJitSetCodeAddr(dexCode->insns, info->codeAddress, 1345 info->instructionSet, true, 0); 1346 /* 1347 * Clear the codeAddress for the enclosing trace to reuse the info 1348 */ 1349 info->codeAddress = NULL; 1350 } 1351 } 1352 1353 return false; 1354 } 1355 1356 /* Extending the trace by crawling the code from curBlock */ 1357 static bool exhaustTrace(CompilationUnit *cUnit, BasicBlock *curBlock) 1358 { 1359 unsigned int curOffset = curBlock->startOffset; 1360 const u2 *codePtr = cUnit->method->insns + curOffset; 1361 1362 if (curBlock->visited == true) return false; 1363 1364 curBlock->visited = true; 1365 1366 if (curBlock->blockType == kEntryBlock || 1367 curBlock->blockType == kExitBlock) { 1368 return false; 1369 } 1370 1371 /* 1372 * Block has been parsed - check the taken/fallThrough in case it is a split 1373 * block. 1374 */ 1375 if (curBlock->firstMIRInsn != NULL) { 1376 bool changed = false; 1377 if (curBlock->taken) 1378 changed |= exhaustTrace(cUnit, curBlock->taken); 1379 if (curBlock->fallThrough) 1380 changed |= exhaustTrace(cUnit, curBlock->fallThrough); 1381 return changed; 1382 } 1383 while (true) { 1384 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true); 1385 insn->offset = curOffset; 1386 int width = parseInsn(codePtr, &insn->dalvikInsn, false); 1387 insn->width = width; 1388 1389 /* Terminate when the data section is seen */ 1390 if (width == 0) 1391 break; 1392 1393 dvmCompilerAppendMIR(curBlock, insn); 1394 1395 codePtr += width; 1396 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode); 1397 1398 /* Stop extending the trace after seeing these instructions */ 1399 if (flags & (kInstrCanReturn | kInstrCanSwitch | kInstrInvoke)) { 1400 curBlock->fallThrough = cUnit->exitBlock; 1401 dvmCompilerSetBit(cUnit->exitBlock->predecessors, curBlock->id); 1402 break; 1403 } else if (flags & kInstrCanBranch) { 1404 processCanBranch(cUnit, curBlock, insn, curOffset, width, flags, 1405 codePtr, NULL); 1406 if (curBlock->taken) { 1407 exhaustTrace(cUnit, curBlock->taken); 1408 } 1409 if (curBlock->fallThrough) { 1410 exhaustTrace(cUnit, curBlock->fallThrough); 1411 } 1412 break; 1413 } 1414 curOffset += width; 1415 BasicBlock *nextBlock = findBlock(cUnit, curOffset, 1416 /* split */ 1417 false, 1418 /* create */ 1419 false, 1420 /* immedPredBlockP */ 1421 NULL); 1422 if (nextBlock) { 1423 /* 1424 * The next instruction could be the target of a previously parsed 1425 * forward branch so a block is already created. If the current 1426 * instruction is not an unconditional branch, connect them through 1427 * the fall-through link. 1428 */ 1429 assert(curBlock->fallThrough == NULL || 1430 curBlock->fallThrough == nextBlock || 1431 curBlock->fallThrough == cUnit->exitBlock); 1432 1433 if ((curBlock->fallThrough == NULL) && 1434 (flags & kInstrCanContinue)) { 1435 curBlock->needFallThroughBranch = true; 1436 curBlock->fallThrough = nextBlock; 1437 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id); 1438 } 1439 /* Block has been visited - no more parsing needed */ 1440 if (nextBlock->visited == true) { 1441 return true; 1442 } 1443 curBlock = nextBlock; 1444 } 1445 } 1446 return true; 1447 } 1448 1449 /* Compile a loop */ 1450 static bool compileLoop(CompilationUnit *cUnit, unsigned int startOffset, 1451 JitTraceDescription *desc, int numMaxInsts, 1452 JitTranslationInfo *info, jmp_buf *bailPtr, 1453 int optHints) 1454 { 1455 int numBlocks = 0; 1456 unsigned int curOffset = startOffset; 1457 bool changed; 1458 BasicBlock *bb; 1459 #if defined(WITH_JIT_TUNING) 1460 CompilerMethodStats *methodStats; 1461 #endif 1462 1463 cUnit->jitMode = kJitLoop; 1464 1465 /* Initialize the block list */ 1466 dvmInitGrowableList(&cUnit->blockList, 4); 1467 1468 /* Initialize the PC reconstruction list */ 1469 dvmInitGrowableList(&cUnit->pcReconstructionList, 8); 1470 1471 /* Create the default entry and exit blocks and enter them to the list */ 1472 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++); 1473 entryBlock->startOffset = curOffset; 1474 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++); 1475 1476 cUnit->entryBlock = entryBlock; 1477 cUnit->exitBlock = exitBlock; 1478 1479 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) entryBlock); 1480 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) exitBlock); 1481 1482 /* Current block to record parsed instructions */ 1483 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1484 curBlock->startOffset = curOffset; 1485 1486 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) curBlock); 1487 entryBlock->fallThrough = curBlock; 1488 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id); 1489 1490 /* 1491 * Store back the number of blocks since new blocks may be created of 1492 * accessing cUnit. 1493 */ 1494 cUnit->numBlocks = numBlocks; 1495 1496 do { 1497 dvmCompilerDataFlowAnalysisDispatcher(cUnit, 1498 dvmCompilerClearVisitedFlag, 1499 kAllNodes, 1500 false /* isIterative */); 1501 changed = exhaustTrace(cUnit, curBlock); 1502 } while (changed); 1503 1504 /* Backward chaining block */ 1505 bb = dvmCompilerNewBB(kChainingCellBackwardBranch, cUnit->numBlocks++); 1506 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 1507 cUnit->backChainBlock = bb; 1508 1509 /* A special block to host PC reconstruction code */ 1510 bb = dvmCompilerNewBB(kPCReconstruction, cUnit->numBlocks++); 1511 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 1512 1513 /* And one final block that publishes the PC and raises the exception */ 1514 bb = dvmCompilerNewBB(kExceptionHandling, cUnit->numBlocks++); 1515 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 1516 cUnit->puntBlock = bb; 1517 1518 cUnit->numDalvikRegisters = cUnit->method->registersSize; 1519 1520 /* Verify if all blocks are connected as claimed */ 1521 /* FIXME - to be disabled in the future */ 1522 dvmCompilerDataFlowAnalysisDispatcher(cUnit, verifyPredInfo, 1523 kAllNodes, 1524 false /* isIterative */); 1525 1526 1527 /* Try to identify a loop */ 1528 if (!dvmCompilerBuildLoop(cUnit)) 1529 goto bail; 1530 1531 dvmCompilerLoopOpt(cUnit); 1532 1533 /* 1534 * Change the backward branch to the backward chaining cell after dataflow 1535 * analsys/optimizations are done. 1536 */ 1537 dvmCompilerInsertBackwardChaining(cUnit); 1538 1539 dvmCompilerInitializeRegAlloc(cUnit); 1540 1541 /* Allocate Registers using simple local allocation scheme */ 1542 dvmCompilerLocalRegAlloc(cUnit); 1543 1544 /* Convert MIR to LIR, etc. */ 1545 dvmCompilerMIR2LIR(cUnit); 1546 1547 /* Loop contains never executed blocks / heavy instructions */ 1548 if (cUnit->quitLoopMode) { 1549 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) { 1550 ALOGD("Loop trace @ offset %04x aborted due to unresolved code info", 1551 cUnit->entryBlock->startOffset); 1552 } 1553 goto bail; 1554 } 1555 1556 /* Convert LIR into machine code. Loop for recoverable retries */ 1557 do { 1558 dvmCompilerAssembleLIR(cUnit, info); 1559 cUnit->assemblerRetries++; 1560 if (cUnit->printMe && cUnit->assemblerStatus != kSuccess) 1561 ALOGD("Assembler abort #%d on %d", cUnit->assemblerRetries, 1562 cUnit->assemblerStatus); 1563 } while (cUnit->assemblerStatus == kRetryAll); 1564 1565 /* Loop is too big - bail out */ 1566 if (cUnit->assemblerStatus == kRetryHalve) { 1567 goto bail; 1568 } 1569 1570 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) { 1571 ALOGD("Loop trace @ offset %04x", cUnit->entryBlock->startOffset); 1572 dvmCompilerCodegenDump(cUnit); 1573 } 1574 1575 /* 1576 * If this trace uses class objects as constants, 1577 * dvmJitInstallClassObjectPointers will switch the thread state 1578 * to running and look up the class pointers using the descriptor/loader 1579 * tuple stored in the callsite info structure. We need to make this window 1580 * as short as possible since it is blocking GC. 1581 */ 1582 if (cUnit->hasClassLiterals && info->codeAddress) { 1583 dvmJitInstallClassObjectPointers(cUnit, (char *) info->codeAddress); 1584 } 1585 1586 /* 1587 * Since callsiteinfo is allocated from the arena, delay the reset until 1588 * class pointers are resolved. 1589 */ 1590 dvmCompilerArenaReset(); 1591 1592 assert(cUnit->assemblerStatus == kSuccess); 1593 #if defined(WITH_JIT_TUNING) 1594 /* Locate the entry to store compilation statistics for this method */ 1595 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false); 1596 methodStats->nativeSize += cUnit->totalSize; 1597 #endif 1598 return info->codeAddress != NULL; 1599 1600 bail: 1601 /* Retry the original trace with JIT_OPT_NO_LOOP disabled */ 1602 dvmCompilerArenaReset(); 1603 return dvmCompileTrace(desc, numMaxInsts, info, bailPtr, 1604 optHints | JIT_OPT_NO_LOOP); 1605 } 1606 1607 /* 1608 * Main entry point to start trace compilation. Basic blocks are constructed 1609 * first and they will be passed to the codegen routines to convert Dalvik 1610 * bytecode into machine code. 1611 */ 1612 bool dvmCompileTrace(JitTraceDescription *desc, int numMaxInsts, 1613 JitTranslationInfo *info, jmp_buf *bailPtr, 1614 int optHints) 1615 { 1616 const DexCode *dexCode = dvmGetMethodCode(desc->method); 1617 const JitTraceRun* currRun = &desc->trace[0]; 1618 unsigned int curOffset = currRun->info.frag.startOffset; 1619 unsigned int startOffset = curOffset; 1620 unsigned int numInsts = currRun->info.frag.numInsts; 1621 const u2 *codePtr = dexCode->insns + curOffset; 1622 int traceSize = 0; // # of half-words 1623 const u2 *startCodePtr = codePtr; 1624 BasicBlock *curBB, *entryCodeBB; 1625 int numBlocks = 0; 1626 static int compilationId; 1627 CompilationUnit cUnit; 1628 GrowableList *blockList; 1629 #if defined(WITH_JIT_TUNING) 1630 CompilerMethodStats *methodStats; 1631 #endif 1632 1633 /* If we've already compiled this trace, just return success */ 1634 if (dvmJitGetTraceAddr(startCodePtr) && !info->discardResult) { 1635 /* 1636 * Make sure the codeAddress is NULL so that it won't clobber the 1637 * existing entry. 1638 */ 1639 info->codeAddress = NULL; 1640 return true; 1641 } 1642 1643 /* If the work order is stale, discard it */ 1644 if (info->cacheVersion != gDvmJit.cacheVersion) { 1645 return false; 1646 } 1647 1648 compilationId++; 1649 memset(&cUnit, 0, sizeof(CompilationUnit)); 1650 1651 #if defined(WITH_JIT_TUNING) 1652 /* Locate the entry to store compilation statistics for this method */ 1653 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false); 1654 #endif 1655 1656 /* Set the recover buffer pointer */ 1657 cUnit.bailPtr = bailPtr; 1658 1659 /* Initialize the printMe flag */ 1660 cUnit.printMe = gDvmJit.printMe; 1661 1662 /* Setup the method */ 1663 cUnit.method = desc->method; 1664 1665 /* Store the trace descriptor and set the initial mode */ 1666 cUnit.traceDesc = desc; 1667 cUnit.jitMode = kJitTrace; 1668 1669 /* Initialize the PC reconstruction list */ 1670 dvmInitGrowableList(&cUnit.pcReconstructionList, 8); 1671 1672 /* Initialize the basic block list */ 1673 blockList = &cUnit.blockList; 1674 dvmInitGrowableList(blockList, 8); 1675 1676 /* Identify traces that we don't want to compile */ 1677 if (gDvmJit.methodTable) { 1678 int len = strlen(desc->method->clazz->descriptor) + 1679 strlen(desc->method->name) + 1; 1680 char *fullSignature = (char *)dvmCompilerNew(len, true); 1681 strcpy(fullSignature, desc->method->clazz->descriptor); 1682 strcat(fullSignature, desc->method->name); 1683 1684 int hashValue = dvmComputeUtf8Hash(fullSignature); 1685 1686 /* 1687 * Doing three levels of screening to see whether we want to skip 1688 * compiling this method 1689 */ 1690 1691 /* First, check the full "class;method" signature */ 1692 bool methodFound = 1693 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 1694 fullSignature, (HashCompareFunc) strcmp, 1695 false) != 1696 NULL; 1697 1698 /* Full signature not found - check the enclosing class */ 1699 if (methodFound == false) { 1700 int hashValue = dvmComputeUtf8Hash(desc->method->clazz->descriptor); 1701 methodFound = 1702 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 1703 (char *) desc->method->clazz->descriptor, 1704 (HashCompareFunc) strcmp, false) != 1705 NULL; 1706 /* Enclosing class not found - check the method name */ 1707 if (methodFound == false) { 1708 int hashValue = dvmComputeUtf8Hash(desc->method->name); 1709 methodFound = 1710 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 1711 (char *) desc->method->name, 1712 (HashCompareFunc) strcmp, false) != 1713 NULL; 1714 1715 /* 1716 * Debug by call-graph is enabled. Check if the debug list 1717 * covers any methods on the VM stack. 1718 */ 1719 if (methodFound == false && gDvmJit.checkCallGraph == true) { 1720 methodFound = 1721 filterMethodByCallGraph(info->requestingThread, 1722 desc->method->name); 1723 } 1724 } 1725 } 1726 1727 /* 1728 * Under the following conditions, the trace will be *conservatively* 1729 * compiled by only containing single-step instructions to and from the 1730 * interpreter. 1731 * 1) If includeSelectedMethod == false, the method matches the full or 1732 * partial signature stored in the hash table. 1733 * 1734 * 2) If includeSelectedMethod == true, the method does not match the 1735 * full and partial signature stored in the hash table. 1736 */ 1737 if (gDvmJit.includeSelectedMethod != methodFound) { 1738 cUnit.allSingleStep = true; 1739 } else { 1740 /* Compile the trace as normal */ 1741 1742 /* Print the method we cherry picked */ 1743 if (gDvmJit.includeSelectedMethod == true) { 1744 cUnit.printMe = true; 1745 } 1746 } 1747 } 1748 1749 /* Allocate the entry block */ 1750 curBB = dvmCompilerNewBB(kEntryBlock, numBlocks++); 1751 dvmInsertGrowableList(blockList, (intptr_t) curBB); 1752 curBB->startOffset = curOffset; 1753 1754 entryCodeBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1755 dvmInsertGrowableList(blockList, (intptr_t) entryCodeBB); 1756 entryCodeBB->startOffset = curOffset; 1757 curBB->fallThrough = entryCodeBB; 1758 curBB = entryCodeBB; 1759 1760 if (cUnit.printMe) { 1761 ALOGD("--------\nCompiler: Building trace for %s, offset %#x", 1762 desc->method->name, curOffset); 1763 } 1764 1765 /* 1766 * Analyze the trace descriptor and include up to the maximal number 1767 * of Dalvik instructions into the IR. 1768 */ 1769 while (1) { 1770 MIR *insn; 1771 int width; 1772 insn = (MIR *)dvmCompilerNew(sizeof(MIR), true); 1773 insn->offset = curOffset; 1774 width = parseInsn(codePtr, &insn->dalvikInsn, cUnit.printMe); 1775 1776 /* The trace should never incude instruction data */ 1777 assert(width); 1778 insn->width = width; 1779 traceSize += width; 1780 dvmCompilerAppendMIR(curBB, insn); 1781 cUnit.numInsts++; 1782 1783 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode); 1784 1785 if (flags & kInstrInvoke) { 1786 const Method *calleeMethod = (const Method *) 1787 currRun[JIT_TRACE_CUR_METHOD].info.meta; 1788 assert(numInsts == 1); 1789 CallsiteInfo *callsiteInfo = 1790 (CallsiteInfo *)dvmCompilerNew(sizeof(CallsiteInfo), true); 1791 callsiteInfo->classDescriptor = (const char *) 1792 currRun[JIT_TRACE_CLASS_DESC].info.meta; 1793 callsiteInfo->classLoader = (Object *) 1794 currRun[JIT_TRACE_CLASS_LOADER].info.meta; 1795 callsiteInfo->method = calleeMethod; 1796 insn->meta.callsiteInfo = callsiteInfo; 1797 } 1798 1799 /* Instruction limit reached - terminate the trace here */ 1800 if (cUnit.numInsts >= numMaxInsts) { 1801 break; 1802 } 1803 if (--numInsts == 0) { 1804 if (currRun->info.frag.runEnd) { 1805 break; 1806 } else { 1807 /* Advance to the next trace description (ie non-meta info) */ 1808 do { 1809 currRun++; 1810 } while (!currRun->isCode); 1811 1812 /* Dummy end-of-run marker seen */ 1813 if (currRun->info.frag.numInsts == 0) { 1814 break; 1815 } 1816 1817 curBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1818 dvmInsertGrowableList(blockList, (intptr_t) curBB); 1819 curOffset = currRun->info.frag.startOffset; 1820 numInsts = currRun->info.frag.numInsts; 1821 curBB->startOffset = curOffset; 1822 codePtr = dexCode->insns + curOffset; 1823 } 1824 } else { 1825 curOffset += width; 1826 codePtr += width; 1827 } 1828 } 1829 1830 #if defined(WITH_JIT_TUNING) 1831 /* Convert # of half-word to bytes */ 1832 methodStats->compiledDalvikSize += traceSize * 2; 1833 #endif 1834 1835 /* 1836 * Now scan basic blocks containing real code to connect the 1837 * taken/fallthrough links. Also create chaining cells for code not included 1838 * in the trace. 1839 */ 1840 size_t blockId; 1841 for (blockId = 0; blockId < blockList->numUsed; blockId++) { 1842 curBB = (BasicBlock *) dvmGrowableListGetElement(blockList, blockId); 1843 MIR *lastInsn = curBB->lastMIRInsn; 1844 /* Skip empty blocks */ 1845 if (lastInsn == NULL) { 1846 continue; 1847 } 1848 curOffset = lastInsn->offset; 1849 unsigned int targetOffset = curOffset; 1850 unsigned int fallThroughOffset = curOffset + lastInsn->width; 1851 bool isInvoke = false; 1852 const Method *callee = NULL; 1853 1854 findBlockBoundary(desc->method, curBB->lastMIRInsn, curOffset, 1855 &targetOffset, &isInvoke, &callee); 1856 1857 /* Link the taken and fallthrough blocks */ 1858 BasicBlock *searchBB; 1859 1860 int flags = dexGetFlagsFromOpcode(lastInsn->dalvikInsn.opcode); 1861 1862 if (flags & kInstrInvoke) { 1863 cUnit.hasInvoke = true; 1864 } 1865 1866 /* Backward branch seen */ 1867 if (isInvoke == false && 1868 (flags & kInstrCanBranch) != 0 && 1869 targetOffset < curOffset && 1870 (optHints & JIT_OPT_NO_LOOP) == 0) { 1871 dvmCompilerArenaReset(); 1872 return compileLoop(&cUnit, startOffset, desc, numMaxInsts, 1873 info, bailPtr, optHints); 1874 } 1875 1876 /* No backward branch in the trace - start searching the next BB */ 1877 size_t searchBlockId; 1878 for (searchBlockId = blockId+1; searchBlockId < blockList->numUsed; 1879 searchBlockId++) { 1880 searchBB = (BasicBlock *) dvmGrowableListGetElement(blockList, 1881 searchBlockId); 1882 if (targetOffset == searchBB->startOffset) { 1883 curBB->taken = searchBB; 1884 dvmCompilerSetBit(searchBB->predecessors, curBB->id); 1885 } 1886 if (fallThroughOffset == searchBB->startOffset) { 1887 curBB->fallThrough = searchBB; 1888 dvmCompilerSetBit(searchBB->predecessors, curBB->id); 1889 1890 /* 1891 * Fallthrough block of an invoke instruction needs to be 1892 * aligned to 4-byte boundary (alignment instruction to be 1893 * inserted later. 1894 */ 1895 if (flags & kInstrInvoke) { 1896 searchBB->isFallThroughFromInvoke = true; 1897 } 1898 } 1899 } 1900 1901 /* 1902 * Some blocks are ended by non-control-flow-change instructions, 1903 * currently only due to trace length constraint. In this case we need 1904 * to generate an explicit branch at the end of the block to jump to 1905 * the chaining cell. 1906 */ 1907 curBB->needFallThroughBranch = 1908 ((flags & (kInstrCanBranch | kInstrCanSwitch | kInstrCanReturn | 1909 kInstrInvoke)) == 0); 1910 if (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH || 1911 lastInsn->dalvikInsn.opcode == OP_SPARSE_SWITCH) { 1912 int i; 1913 const u2 *switchData = desc->method->insns + lastInsn->offset + 1914 lastInsn->dalvikInsn.vB; 1915 int size = switchData[1]; 1916 int maxChains = MIN(size, MAX_CHAINED_SWITCH_CASES); 1917 1918 /* 1919 * Generate the landing pad for cases whose ranks are higher than 1920 * MAX_CHAINED_SWITCH_CASES. The code will re-enter the interpreter 1921 * through the NoChain point. 1922 */ 1923 if (maxChains != size) { 1924 cUnit.switchOverflowPad = 1925 desc->method->insns + lastInsn->offset; 1926 } 1927 1928 s4 *targets = (s4 *) (switchData + 2 + 1929 (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ? 1930 2 : size * 2)); 1931 1932 /* One chaining cell for the first MAX_CHAINED_SWITCH_CASES cases */ 1933 for (i = 0; i < maxChains; i++) { 1934 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal, 1935 numBlocks++); 1936 dvmInsertGrowableList(blockList, (intptr_t) caseChain); 1937 caseChain->startOffset = lastInsn->offset + targets[i]; 1938 } 1939 1940 /* One more chaining cell for the default case */ 1941 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal, 1942 numBlocks++); 1943 dvmInsertGrowableList(blockList, (intptr_t) caseChain); 1944 caseChain->startOffset = lastInsn->offset + lastInsn->width; 1945 /* Fallthrough block not included in the trace */ 1946 } else if (!isUnconditionalBranch(lastInsn) && 1947 curBB->fallThrough == NULL) { 1948 BasicBlock *fallThroughBB; 1949 /* 1950 * If the chaining cell is after an invoke or 1951 * instruction that cannot change the control flow, request a hot 1952 * chaining cell. 1953 */ 1954 if (isInvoke || curBB->needFallThroughBranch) { 1955 fallThroughBB = dvmCompilerNewBB(kChainingCellHot, numBlocks++); 1956 } else { 1957 fallThroughBB = dvmCompilerNewBB(kChainingCellNormal, 1958 numBlocks++); 1959 } 1960 dvmInsertGrowableList(blockList, (intptr_t) fallThroughBB); 1961 fallThroughBB->startOffset = fallThroughOffset; 1962 curBB->fallThrough = fallThroughBB; 1963 dvmCompilerSetBit(fallThroughBB->predecessors, curBB->id); 1964 } 1965 /* Target block not included in the trace */ 1966 if (curBB->taken == NULL && 1967 (isGoto(lastInsn) || isInvoke || 1968 (targetOffset != UNKNOWN_TARGET && targetOffset != curOffset))) { 1969 BasicBlock *newBB = NULL; 1970 if (isInvoke) { 1971 /* Monomorphic callee */ 1972 if (callee) { 1973 /* JNI call doesn't need a chaining cell */ 1974 if (!dvmIsNativeMethod(callee)) { 1975 newBB = dvmCompilerNewBB(kChainingCellInvokeSingleton, 1976 numBlocks++); 1977 newBB->startOffset = 0; 1978 newBB->containingMethod = callee; 1979 } 1980 /* Will resolve at runtime */ 1981 } else { 1982 newBB = dvmCompilerNewBB(kChainingCellInvokePredicted, 1983 numBlocks++); 1984 newBB->startOffset = 0; 1985 } 1986 /* For unconditional branches, request a hot chaining cell */ 1987 } else { 1988 #if !defined(WITH_SELF_VERIFICATION) 1989 newBB = dvmCompilerNewBB(dexIsGoto(flags) ? 1990 kChainingCellHot : 1991 kChainingCellNormal, 1992 numBlocks++); 1993 newBB->startOffset = targetOffset; 1994 #else 1995 /* Handle branches that branch back into the block */ 1996 if (targetOffset >= curBB->firstMIRInsn->offset && 1997 targetOffset <= curBB->lastMIRInsn->offset) { 1998 newBB = dvmCompilerNewBB(kChainingCellBackwardBranch, 1999 numBlocks++); 2000 } else { 2001 newBB = dvmCompilerNewBB(dexIsGoto(flags) ? 2002 kChainingCellHot : 2003 kChainingCellNormal, 2004 numBlocks++); 2005 } 2006 newBB->startOffset = targetOffset; 2007 #endif 2008 } 2009 if (newBB) { 2010 curBB->taken = newBB; 2011 dvmCompilerSetBit(newBB->predecessors, curBB->id); 2012 dvmInsertGrowableList(blockList, (intptr_t) newBB); 2013 } 2014 } 2015 } 2016 2017 /* Now create a special block to host PC reconstruction code */ 2018 curBB = dvmCompilerNewBB(kPCReconstruction, numBlocks++); 2019 dvmInsertGrowableList(blockList, (intptr_t) curBB); 2020 2021 /* And one final block that publishes the PC and raise the exception */ 2022 curBB = dvmCompilerNewBB(kExceptionHandling, numBlocks++); 2023 dvmInsertGrowableList(blockList, (intptr_t) curBB); 2024 cUnit.puntBlock = curBB; 2025 2026 if (cUnit.printMe) { 2027 char* signature = 2028 dexProtoCopyMethodDescriptor(&desc->method->prototype); 2029 ALOGD("TRACEINFO (%d): 0x%08x %s%s.%s %#x %d of %d, %d blocks", 2030 compilationId, 2031 (intptr_t) desc->method->insns, 2032 desc->method->clazz->descriptor, 2033 desc->method->name, 2034 signature, 2035 desc->trace[0].info.frag.startOffset, 2036 traceSize, 2037 dexCode->insnsSize, 2038 numBlocks); 2039 free(signature); 2040 } 2041 2042 cUnit.numBlocks = numBlocks; 2043 2044 /* Set the instruction set to use (NOTE: later components may change it) */ 2045 cUnit.instructionSet = dvmCompilerInstructionSet(); 2046 2047 /* Inline transformation @ the MIR level */ 2048 if (cUnit.hasInvoke && !(gDvmJit.disableOpt & (1 << kMethodInlining))) { 2049 dvmCompilerInlineMIR(&cUnit, info); 2050 } 2051 2052 cUnit.numDalvikRegisters = cUnit.method->registersSize; 2053 2054 /* Preparation for SSA conversion */ 2055 dvmInitializeSSAConversion(&cUnit); 2056 2057 dvmCompilerNonLoopAnalysis(&cUnit); 2058 2059 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming 2060 2061 if (cUnit.printMe) { 2062 dvmCompilerDumpCompilationUnit(&cUnit); 2063 } 2064 2065 /* Allocate Registers using simple local allocation scheme */ 2066 dvmCompilerLocalRegAlloc(&cUnit); 2067 2068 /* Convert MIR to LIR, etc. */ 2069 dvmCompilerMIR2LIR(&cUnit); 2070 2071 /* Convert LIR into machine code. Loop for recoverable retries */ 2072 do { 2073 dvmCompilerAssembleLIR(&cUnit, info); 2074 cUnit.assemblerRetries++; 2075 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess) 2076 ALOGD("Assembler abort #%d on %d",cUnit.assemblerRetries, 2077 cUnit.assemblerStatus); 2078 } while (cUnit.assemblerStatus == kRetryAll); 2079 2080 if (cUnit.printMe) { 2081 ALOGD("Trace Dalvik PC: %p", startCodePtr); 2082 dvmCompilerCodegenDump(&cUnit); 2083 ALOGD("End %s%s, %d Dalvik instructions", 2084 desc->method->clazz->descriptor, desc->method->name, 2085 cUnit.numInsts); 2086 } 2087 2088 if (cUnit.assemblerStatus == kRetryHalve) { 2089 /* Reset the compiler resource pool before retry */ 2090 dvmCompilerArenaReset(); 2091 2092 /* Halve the instruction count and start from the top */ 2093 return dvmCompileTrace(desc, cUnit.numInsts / 2, info, bailPtr, 2094 optHints); 2095 } 2096 2097 /* 2098 * If this trace uses class objects as constants, 2099 * dvmJitInstallClassObjectPointers will switch the thread state 2100 * to running and look up the class pointers using the descriptor/loader 2101 * tuple stored in the callsite info structure. We need to make this window 2102 * as short as possible since it is blocking GC. 2103 */ 2104 if (cUnit.hasClassLiterals && info->codeAddress) { 2105 dvmJitInstallClassObjectPointers(&cUnit, (char *) info->codeAddress); 2106 } 2107 2108 /* 2109 * Since callsiteinfo is allocated from the arena, delay the reset until 2110 * class pointers are resolved. 2111 */ 2112 dvmCompilerArenaReset(); 2113 2114 assert(cUnit.assemblerStatus == kSuccess); 2115 #if defined(WITH_JIT_TUNING) 2116 methodStats->nativeSize += cUnit.totalSize; 2117 #endif 2118 2119 return info->codeAddress != NULL; 2120 } 2121