2 * Copyright (C) 2009 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 #include "libdex/DexOpcodes.h"
19 #include "libdex/DexCatch.h"
20 #include "interp/Jit.h"
21 #include "CompilerInternals.h"
24 static inline bool contentIsInsn(const u2 *codePtr) {
26 Opcode opcode = (Opcode)(instr & 0xff);
29 * Since the low 8-bit in metadata may look like OP_NOP, we need to check
30 * both the low and whole sub-word to determine whether it is code or data.
32 return (opcode != OP_NOP || instr == 0);
36 * Parse an instruction, return the length of the instruction
38 static inline int parseInsn(const u2 *codePtr, DecodedInstruction *decInsn,
41 // Don't parse instruction data
42 if (!contentIsInsn(codePtr)) {
47 Opcode opcode = dexOpcodeFromCodeUnit(instr);
49 dexDecodeInstruction(codePtr, decInsn);
51 char *decodedString = dvmCompilerGetDalvikDisassembly(decInsn, NULL);
52 LOGD("%p: %#06x %s", codePtr, opcode, decodedString);
54 return dexGetWidthFromOpcode(opcode);
57 #define UNKNOWN_TARGET 0xffffffff
60 * Identify block-ending instructions and collect supplemental information
61 * regarding the following instructions.
63 static inline bool findBlockBoundary(const Method *caller, MIR *insn,
64 unsigned int curOffset,
65 unsigned int *target, bool *isInvoke,
66 const Method **callee)
68 switch (insn->dalvikInsn.opcode) {
69 /* Target is not compile-time constant */
73 case OP_RETURN_OBJECT:
75 *target = UNKNOWN_TARGET;
77 case OP_INVOKE_VIRTUAL:
78 case OP_INVOKE_VIRTUAL_RANGE:
79 case OP_INVOKE_VIRTUAL_JUMBO:
80 case OP_INVOKE_INTERFACE:
81 case OP_INVOKE_INTERFACE_RANGE:
82 case OP_INVOKE_INTERFACE_JUMBO:
83 case OP_INVOKE_VIRTUAL_QUICK:
84 case OP_INVOKE_VIRTUAL_QUICK_RANGE:
88 case OP_INVOKE_SUPER_RANGE:
89 case OP_INVOKE_SUPER_JUMBO: {
90 int mIndex = caller->clazz->pDvmDex->
91 pResMethods[insn->dalvikInsn.vB]->methodIndex;
92 const Method *calleeMethod =
93 caller->clazz->super->vtable[mIndex];
95 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
96 *target = (unsigned int) calleeMethod->insns;
99 *callee = calleeMethod;
102 case OP_INVOKE_STATIC:
103 case OP_INVOKE_STATIC_RANGE:
104 case OP_INVOKE_STATIC_JUMBO: {
105 const Method *calleeMethod =
106 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB];
108 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
109 *target = (unsigned int) calleeMethod->insns;
112 *callee = calleeMethod;
115 case OP_INVOKE_SUPER_QUICK:
116 case OP_INVOKE_SUPER_QUICK_RANGE: {
117 const Method *calleeMethod =
118 caller->clazz->super->vtable[insn->dalvikInsn.vB];
120 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
121 *target = (unsigned int) calleeMethod->insns;
124 *callee = calleeMethod;
127 case OP_INVOKE_DIRECT:
128 case OP_INVOKE_DIRECT_RANGE:
129 case OP_INVOKE_DIRECT_JUMBO: {
130 const Method *calleeMethod =
131 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB];
132 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
133 *target = (unsigned int) calleeMethod->insns;
136 *callee = calleeMethod;
142 *target = curOffset + (int) insn->dalvikInsn.vA;
151 *target = curOffset + (int) insn->dalvikInsn.vC;
160 *target = curOffset + (int) insn->dalvikInsn.vB;
169 static inline bool isGoto(MIR *insn)
171 switch (insn->dalvikInsn.opcode) {
182 * Identify unconditional branch instructions
184 static inline bool isUnconditionalBranch(MIR *insn)
186 switch (insn->dalvikInsn.opcode) {
190 case OP_RETURN_OBJECT:
198 * dvmHashTableLookup() callback
200 static int compareMethod(const CompilerMethodStats *m1,
201 const CompilerMethodStats *m2)
203 return (int) m1->method - (int) m2->method;
207 * Analyze the body of the method to collect high-level information regarding
210 * - is getter/setter?
211 * - can throw exception?
213 * Currently the inliner only handles getters and setters. When its capability
214 * becomes more sophisticated more information will be retrieved here.
216 static int analyzeInlineTarget(DecodedInstruction *dalvikInsn, int attributes,
219 int flags = dexGetFlagsFromOpcode(dalvikInsn->opcode);
220 int dalvikOpcode = dalvikInsn->opcode;
222 if (flags & kInstrInvoke) {
223 attributes &= ~METHOD_IS_LEAF;
226 if (!(flags & kInstrCanReturn)) {
227 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] &
229 attributes &= ~METHOD_IS_GETTER;
231 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] &
233 attributes &= ~METHOD_IS_SETTER;
238 * The expected instruction sequence is setter will never return value and
239 * getter will also do. Clear the bits if the behavior is discovered
242 if (flags & kInstrCanReturn) {
243 if (dalvikOpcode == OP_RETURN_VOID) {
244 attributes &= ~METHOD_IS_GETTER;
247 attributes &= ~METHOD_IS_SETTER;
251 if (flags & kInstrCanThrow) {
252 attributes &= ~METHOD_IS_THROW_FREE;
255 if (offset == 0 && dalvikOpcode == OP_RETURN_VOID) {
256 attributes |= METHOD_IS_EMPTY;
260 * Check if this opcode is selected for single stepping.
261 * If so, don't inline the callee as there is no stack frame for the
262 * interpreter to single-step through the instruction.
264 if (SINGLE_STEP_OP(dalvikOpcode)) {
265 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER);
272 * Analyze each method whose traces are ever compiled. Collect a variety of
273 * statistics like the ratio of exercised vs overall code and code bloat
274 * ratios. If isCallee is true, also analyze each instruction in more details
275 * to see if it is suitable for inlining.
277 CompilerMethodStats *dvmCompilerAnalyzeMethodBody(const Method *method,
280 const DexCode *dexCode = dvmGetMethodCode(method);
281 const u2 *codePtr = dexCode->insns;
282 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize;
284 int hashValue = dvmComputeUtf8Hash(method->name);
286 CompilerMethodStats dummyMethodEntry; // For hash table lookup
287 CompilerMethodStats *realMethodEntry; // For hash table storage
289 /* For lookup only */
290 dummyMethodEntry.method = method;
291 realMethodEntry = (CompilerMethodStats *)
292 dvmHashTableLookup(gDvmJit.methodStatsTable,
295 (HashCompareFunc) compareMethod,
298 /* This method has never been analyzed before - create an entry */
299 if (realMethodEntry == NULL) {
301 (CompilerMethodStats *) calloc(1, sizeof(CompilerMethodStats));
302 realMethodEntry->method = method;
304 dvmHashTableLookup(gDvmJit.methodStatsTable, hashValue,
306 (HashCompareFunc) compareMethod,
310 /* This method is invoked as a callee and has been analyzed - just return */
311 if ((isCallee == true) && (realMethodEntry->attributes & METHOD_IS_CALLEE))
312 return realMethodEntry;
315 * Similarly, return if this method has been compiled before as a hot
318 if ((isCallee == false) &&
319 (realMethodEntry->attributes & METHOD_IS_HOT))
320 return realMethodEntry;
324 /* Method hasn't been analyzed for the desired purpose yet */
326 /* Aggressively set the attributes until proven otherwise */
327 attributes = METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE |
328 METHOD_IS_GETTER | METHOD_IS_SETTER;
330 attributes = METHOD_IS_HOT;
333 /* Count the number of instructions */
334 while (codePtr < codeEnd) {
335 DecodedInstruction dalvikInsn;
336 int width = parseInsn(codePtr, &dalvikInsn, false);
338 /* Terminate when the data section is seen */
343 attributes = analyzeInlineTarget(&dalvikInsn, attributes, insnSize);
351 * Only handle simple getters/setters with one instruction followed by
354 if ((attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) &&
356 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER);
359 realMethodEntry->dalvikSize = insnSize * 2;
360 realMethodEntry->attributes |= attributes;
363 /* Uncomment the following to explore various callee patterns */
364 if (attributes & METHOD_IS_THROW_FREE) {
365 LOGE("%s%s is inlinable%s", method->clazz->descriptor, method->name,
366 (attributes & METHOD_IS_EMPTY) ? " empty" : "");
369 if (attributes & METHOD_IS_LEAF) {
370 LOGE("%s%s is leaf %d%s", method->clazz->descriptor, method->name,
371 insnSize, insnSize < 5 ? " (small)" : "");
374 if (attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) {
375 LOGE("%s%s is %s", method->clazz->descriptor, method->name,
376 attributes & METHOD_IS_GETTER ? "getter": "setter");
379 (METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE)) {
380 LOGE("%s%s is inlinable non setter/getter", method->clazz->descriptor,
385 return realMethodEntry;
389 * Crawl the stack of the thread that requesed compilation to see if any of the
390 * ancestors are on the blacklist.
392 static bool filterMethodByCallGraph(Thread *thread, const char *curMethodName)
394 /* Crawl the Dalvik stack frames and compare the method name*/
395 StackSaveArea *ssaPtr = ((StackSaveArea *) thread->interpSave.curFrame) - 1;
396 while (ssaPtr != ((StackSaveArea *) NULL) - 1) {
397 const Method *method = ssaPtr->method;
399 int hashValue = dvmComputeUtf8Hash(method->name);
401 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
402 (char *) method->name,
403 (HashCompareFunc) strcmp, false) !=
406 LOGD("Method %s (--> %s) found on the JIT %s list",
407 method->name, curMethodName,
408 gDvmJit.includeSelectedMethod ? "white" : "black");
413 ssaPtr = ((StackSaveArea *) ssaPtr->prevFrame) - 1;
419 * Since we are including instructions from possibly a cold method into the
420 * current trace, we need to make sure that all the associated information
421 * with the callee is properly initialized. If not, we punt on this inline
424 * TODO: volatile instructions will be handled later.
426 bool dvmCompilerCanIncludeThisInstruction(const Method *method,
427 const DecodedInstruction *insn)
429 switch (insn->opcode) {
430 case OP_NEW_INSTANCE:
431 case OP_NEW_INSTANCE_JUMBO:
433 case OP_CHECK_CAST_JUMBO: {
434 ClassObject *classPtr = (ClassObject *)(void*)
435 (method->clazz->pDvmDex->pResClasses[insn->vB]);
437 /* Class hasn't been initialized yet */
438 if (classPtr == NULL) {
446 case OP_SGET_WIDE_JUMBO:
448 case OP_SGET_OBJECT_JUMBO:
449 case OP_SGET_BOOLEAN:
450 case OP_SGET_BOOLEAN_JUMBO:
452 case OP_SGET_BYTE_JUMBO:
454 case OP_SGET_CHAR_JUMBO:
456 case OP_SGET_SHORT_JUMBO:
460 case OP_SPUT_WIDE_JUMBO:
462 case OP_SPUT_OBJECT_JUMBO:
463 case OP_SPUT_BOOLEAN:
464 case OP_SPUT_BOOLEAN_JUMBO:
466 case OP_SPUT_BYTE_JUMBO:
468 case OP_SPUT_CHAR_JUMBO:
470 case OP_SPUT_SHORT_JUMBO: {
471 void *fieldPtr = (void*)
472 (method->clazz->pDvmDex->pResFields[insn->vB]);
474 if (fieldPtr == NULL) {
479 case OP_INVOKE_SUPER:
480 case OP_INVOKE_SUPER_RANGE:
481 case OP_INVOKE_SUPER_JUMBO: {
482 int mIndex = method->clazz->pDvmDex->
483 pResMethods[insn->vB]->methodIndex;
484 const Method *calleeMethod = method->clazz->super->vtable[mIndex];
485 if (calleeMethod == NULL) {
490 case OP_INVOKE_SUPER_QUICK:
491 case OP_INVOKE_SUPER_QUICK_RANGE: {
492 const Method *calleeMethod = method->clazz->super->vtable[insn->vB];
493 if (calleeMethod == NULL) {
498 case OP_INVOKE_STATIC:
499 case OP_INVOKE_STATIC_RANGE:
500 case OP_INVOKE_STATIC_JUMBO:
501 case OP_INVOKE_DIRECT:
502 case OP_INVOKE_DIRECT_RANGE:
503 case OP_INVOKE_DIRECT_JUMBO: {
504 const Method *calleeMethod =
505 method->clazz->pDvmDex->pResMethods[insn->vB];
506 if (calleeMethod == NULL) {
512 case OP_CONST_CLASS_JUMBO: {
513 void *classPtr = (void*)
514 (method->clazz->pDvmDex->pResClasses[insn->vB]);
516 if (classPtr == NULL) {
521 case OP_CONST_STRING_JUMBO:
522 case OP_CONST_STRING: {
523 void *strPtr = (void*)
524 (method->clazz->pDvmDex->pResStrings[insn->vB]);
526 if (strPtr == NULL) {
536 /* Split an existing block from the specified code offset into two */
537 static BasicBlock *splitBlock(CompilationUnit *cUnit,
538 unsigned int codeOffset,
539 BasicBlock *origBlock)
541 MIR *insn = origBlock->firstMIRInsn;
543 if (insn->offset == codeOffset) break;
547 LOGE("Break split failed");
550 BasicBlock *bottomBlock = dvmCompilerNewBB(kDalvikByteCode,
552 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bottomBlock);
554 bottomBlock->startOffset = codeOffset;
555 bottomBlock->firstMIRInsn = insn;
556 bottomBlock->lastMIRInsn = origBlock->lastMIRInsn;
558 /* Handle the taken path */
559 bottomBlock->taken = origBlock->taken;
560 if (bottomBlock->taken) {
561 origBlock->taken = NULL;
562 dvmCompilerClearBit(bottomBlock->taken->predecessors, origBlock->id);
563 dvmCompilerSetBit(bottomBlock->taken->predecessors, bottomBlock->id);
566 /* Handle the fallthrough path */
567 bottomBlock->needFallThroughBranch = origBlock->needFallThroughBranch;
568 bottomBlock->fallThrough = origBlock->fallThrough;
569 origBlock->fallThrough = bottomBlock;
570 origBlock->needFallThroughBranch = true;
571 dvmCompilerSetBit(bottomBlock->predecessors, origBlock->id);
572 if (bottomBlock->fallThrough) {
573 dvmCompilerClearBit(bottomBlock->fallThrough->predecessors,
575 dvmCompilerSetBit(bottomBlock->fallThrough->predecessors,
579 /* Handle the successor list */
580 if (origBlock->successorBlockList.blockListType != kNotUsed) {
581 bottomBlock->successorBlockList = origBlock->successorBlockList;
582 origBlock->successorBlockList.blockListType = kNotUsed;
583 GrowableListIterator iterator;
585 dvmGrowableListIteratorInit(&bottomBlock->successorBlockList.blocks,
588 SuccessorBlockInfo *successorBlockInfo =
589 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
590 if (successorBlockInfo == NULL) break;
591 BasicBlock *bb = successorBlockInfo->block;
592 dvmCompilerClearBit(bb->predecessors, origBlock->id);
593 dvmCompilerSetBit(bb->predecessors, bottomBlock->id);
597 origBlock->lastMIRInsn = insn->prev;
599 insn->prev->next = NULL;
605 * Given a code offset, find out the block that starts with it. If the offset
606 * is in the middle of an existing block, split it into two.
608 static BasicBlock *findBlock(CompilationUnit *cUnit,
609 unsigned int codeOffset,
610 bool split, bool create)
612 GrowableList *blockList = &cUnit->blockList;
616 for (i = 0; i < blockList->numUsed; i++) {
617 bb = (BasicBlock *) blockList->elemList[i];
618 if (bb->blockType != kDalvikByteCode) continue;
619 if (bb->startOffset == codeOffset) return bb;
620 /* Check if a branch jumps into the middle of an existing block */
621 if ((split == true) && (codeOffset > bb->startOffset) &&
622 (bb->lastMIRInsn != NULL) &&
623 (codeOffset <= bb->lastMIRInsn->offset)) {
624 BasicBlock *newBB = splitBlock(cUnit, codeOffset, bb);
629 bb = dvmCompilerNewBB(kDalvikByteCode, cUnit->numBlocks++);
630 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
631 bb->startOffset = codeOffset;
637 /* Dump the CFG into a DOT graph */
638 void dvmDumpCFG(CompilationUnit *cUnit, const char *dirPrefix)
640 const Method *method = cUnit->method;
642 char *signature = dexProtoCopyMethodDescriptor(&method->prototype);
643 char startOffset[80];
644 sprintf(startOffset, "_%x", cUnit->entryBlock->fallThrough->startOffset);
645 char *fileName = (char *) dvmCompilerNew(
647 strlen(method->clazz->descriptor) +
648 strlen(method->name) +
650 strlen(startOffset) +
651 strlen(".dot") + 1, true);
652 sprintf(fileName, "%s%s%s%s%s.dot", dirPrefix,
653 method->clazz->descriptor, method->name, signature, startOffset);
657 * Convert the special characters into a filesystem- and shell-friendly
661 for (i = strlen(dirPrefix); fileName[i]; i++) {
662 if (fileName[i] == '/') {
664 } else if (fileName[i] == ';') {
666 } else if (fileName[i] == '$') {
668 } else if (fileName[i] == '(' || fileName[i] == ')') {
670 } else if (fileName[i] == '<' || fileName[i] == '>') {
674 file = fopen(fileName, "w");
678 fprintf(file, "digraph G {\n");
680 fprintf(file, " rankdir=TB\n");
682 int numReachableBlocks = cUnit->numReachableBlocks;
684 const GrowableList *blockList = &cUnit->blockList;
686 for (idx = 0; idx < numReachableBlocks; idx++) {
687 int blockIdx = cUnit->dfsOrder.elemList[idx];
688 BasicBlock *bb = (BasicBlock *) dvmGrowableListGetElement(blockList,
690 if (bb == NULL) break;
691 if (bb->blockType == kEntryBlock) {
692 fprintf(file, " entry [shape=Mdiamond];\n");
693 } else if (bb->blockType == kExitBlock) {
694 fprintf(file, " exit [shape=Mdiamond];\n");
695 } else if (bb->blockType == kDalvikByteCode) {
696 fprintf(file, " block%04x [shape=record,label = \"{ \\\n",
699 fprintf(file, " {block id %d\\l}%s\\\n", bb->id,
700 bb->firstMIRInsn ? " | " : " ");
701 for (mir = bb->firstMIRInsn; mir; mir = mir->next) {
702 fprintf(file, " {%04x %s\\l}%s\\\n", mir->offset,
704 dvmCompilerFullDisassembler(cUnit, mir) :
705 dexGetOpcodeName(mir->dalvikInsn.opcode),
706 mir->next ? " | " : " ");
708 fprintf(file, " }\"];\n\n");
709 } else if (bb->blockType == kExceptionHandling) {
710 char blockName[BLOCK_NAME_LEN];
712 dvmGetBlockName(bb, blockName);
713 fprintf(file, " %s [shape=invhouse];\n", blockName);
716 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN];
719 dvmGetBlockName(bb, blockName1);
720 dvmGetBlockName(bb->taken, blockName2);
721 fprintf(file, " %s:s -> %s:n [style=dotted]\n",
722 blockName1, blockName2);
724 if (bb->fallThrough) {
725 dvmGetBlockName(bb, blockName1);
726 dvmGetBlockName(bb->fallThrough, blockName2);
727 fprintf(file, " %s:s -> %s:n\n", blockName1, blockName2);
730 if (bb->successorBlockList.blockListType != kNotUsed) {
731 fprintf(file, " succ%04x [shape=%s,label = \"{ \\\n",
733 (bb->successorBlockList.blockListType == kCatch) ?
734 "Mrecord" : "record");
735 GrowableListIterator iterator;
736 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks,
738 SuccessorBlockInfo *successorBlockInfo =
739 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
743 if (successorBlockInfo == NULL) break;
745 BasicBlock *destBlock = successorBlockInfo->block;
746 SuccessorBlockInfo *nextSuccessorBlockInfo =
747 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
749 fprintf(file, " {<f%d> %04x: %04x\\l}%s\\\n",
751 successorBlockInfo->key,
752 destBlock->startOffset,
753 (nextSuccessorBlockInfo != NULL) ? " | " : " ");
755 successorBlockInfo = nextSuccessorBlockInfo;
757 fprintf(file, " }\"];\n\n");
759 dvmGetBlockName(bb, blockName1);
760 fprintf(file, " %s:s -> succ%04x:n [style=dashed]\n",
761 blockName1, bb->startOffset);
763 if (bb->successorBlockList.blockListType == kPackedSwitch ||
764 bb->successorBlockList.blockListType == kSparseSwitch) {
766 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks,
771 SuccessorBlockInfo *successorBlockInfo =
772 (SuccessorBlockInfo *)
773 dvmGrowableListIteratorNext(&iterator);
774 if (successorBlockInfo == NULL) break;
776 BasicBlock *destBlock = successorBlockInfo->block;
778 dvmGetBlockName(destBlock, blockName2);
779 fprintf(file, " succ%04x:f%d:e -> %s:n\n",
780 bb->startOffset, succId++,
788 * If we need to debug the dominator tree, uncomment the following code
791 dvmGetBlockName(bb, blockName1);
792 fprintf(file, " cfg%s [label=\"%s\", shape=none];\n",
793 blockName1, blockName1);
795 dvmGetBlockName(bb->iDom, blockName2);
796 fprintf(file, " cfg%s:s -> cfg%s:n\n\n",
797 blockName2, blockName1);
801 fprintf(file, "}\n");
805 /* Verify if all the successor is connected with all the claimed predecessors */
806 static bool verifyPredInfo(CompilationUnit *cUnit, BasicBlock *bb)
808 BitVectorIterator bvIterator;
810 dvmBitVectorIteratorInit(bb->predecessors, &bvIterator);
812 int blockIdx = dvmBitVectorIteratorNext(&bvIterator);
813 if (blockIdx == -1) break;
814 BasicBlock *predBB = (BasicBlock *)
815 dvmGrowableListGetElement(&cUnit->blockList, blockIdx);
817 if (predBB->taken == bb) {
819 } else if (predBB->fallThrough == bb) {
821 } else if (predBB->successorBlockList.blockListType != kNotUsed) {
822 GrowableListIterator iterator;
823 dvmGrowableListIteratorInit(&predBB->successorBlockList.blocks,
826 SuccessorBlockInfo *successorBlockInfo =
827 (SuccessorBlockInfo *)
828 dvmGrowableListIteratorNext(&iterator);
829 if (successorBlockInfo == NULL) break;
830 BasicBlock *succBB = successorBlockInfo->block;
837 if (found == false) {
838 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN];
839 dvmGetBlockName(bb, blockName1);
840 dvmGetBlockName(predBB, blockName2);
841 dvmDumpCFG(cUnit, "/sdcard/cfg/");
842 LOGE("Successor %s not found from %s",
843 blockName1, blockName2);
850 /* Identify code range in try blocks and set up the empty catch blocks */
851 static void processTryCatchBlocks(CompilationUnit *cUnit)
853 const Method *meth = cUnit->method;
854 const DexCode *pCode = dvmGetMethodCode(meth);
855 int triesSize = pCode->triesSize;
859 if (triesSize == 0) {
863 const DexTry *pTries = dexGetTries(pCode);
864 BitVector *tryBlockAddr = cUnit->tryBlockAddr;
866 /* Mark all the insn offsets in Try blocks */
867 for (i = 0; i < triesSize; i++) {
868 const DexTry* pTry = &pTries[i];
869 /* all in 16-bit units */
870 int startOffset = pTry->startAddr;
871 int endOffset = startOffset + pTry->insnCount;
873 for (offset = startOffset; offset < endOffset; offset++) {
874 dvmCompilerSetBit(tryBlockAddr, offset);
878 /* Iterate over each of the handlers to enqueue the empty Catch blocks */
879 offset = dexGetFirstHandlerOffset(pCode);
880 int handlersSize = dexGetHandlersSize(pCode);
882 for (i = 0; i < handlersSize; i++) {
883 DexCatchIterator iterator;
884 dexCatchIteratorInit(&iterator, pCode, offset);
887 DexCatchHandler* handler = dexCatchIteratorNext(&iterator);
889 if (handler == NULL) {
894 * Create dummy catch blocks first. Since these are created before
895 * other blocks are processed, "split" is specified as false.
897 findBlock(cUnit, handler->address,
904 offset = dexCatchIteratorGetEndOffset(&iterator, pCode);
908 /* Process instructions with the kInstrCanBranch flag */
909 static void processCanBranch(CompilationUnit *cUnit, BasicBlock *curBlock,
910 MIR *insn, int curOffset, int width, int flags,
911 const u2* codePtr, const u2* codeEnd)
913 int target = curOffset;
914 switch (insn->dalvikInsn.opcode) {
918 target += (int) insn->dalvikInsn.vA;
926 target += (int) insn->dalvikInsn.vC;
934 target += (int) insn->dalvikInsn.vB;
937 LOGE("Unexpected opcode(%d) with kInstrCanBranch set",
938 insn->dalvikInsn.opcode);
941 BasicBlock *takenBlock = findBlock(cUnit, target,
946 curBlock->taken = takenBlock;
947 dvmCompilerSetBit(takenBlock->predecessors, curBlock->id);
949 /* Always terminate the current block for conditional branches */
950 if (flags & kInstrCanContinue) {
951 BasicBlock *fallthroughBlock = findBlock(cUnit,
954 * If the method is processed
955 * in sequential order from the
956 * beginning, we don't need to
957 * specify split for continue
958 * blocks. However, this
959 * routine can be called by
960 * compileLoop, which starts
961 * parsing the method from an
962 * arbitrary address in the
968 curBlock->fallThrough = fallthroughBlock;
969 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
970 } else if (codePtr < codeEnd) {
971 /* Create a fallthrough block for real instructions (incl. OP_NOP) */
972 if (contentIsInsn(codePtr)) {
973 findBlock(cUnit, curOffset + width,
982 /* Process instructions with the kInstrCanSwitch flag */
983 static void processCanSwitch(CompilationUnit *cUnit, BasicBlock *curBlock,
984 MIR *insn, int curOffset, int width, int flags)
986 u2 *switchData= (u2 *) (cUnit->method->insns + curOffset +
987 insn->dalvikInsn.vB);
995 * Packed switch data format:
996 * ushort ident = 0x0100 magic value
997 * ushort size number of entries in the table
998 * int first_key first (and lowest) switch case value
999 * int targets[size] branch targets, relative to switch opcode
1001 * Total size is (4+size*2) 16-bit code units.
1003 if (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) {
1004 assert(switchData[0] == kPackedSwitchSignature);
1005 size = switchData[1];
1006 firstKey = switchData[2] | (switchData[3] << 16);
1007 targetTable = (int *) &switchData[4];
1008 keyTable = NULL; // Make the compiler happy
1010 * Sparse switch data format:
1011 * ushort ident = 0x0200 magic value
1012 * ushort size number of entries in the table; > 0
1013 * int keys[size] keys, sorted low-to-high; 32-bit aligned
1014 * int targets[size] branch targets, relative to switch opcode
1016 * Total size is (2+size*4) 16-bit code units.
1019 assert(switchData[0] == kSparseSwitchSignature);
1020 size = switchData[1];
1021 keyTable = (int *) &switchData[2];
1022 targetTable = (int *) &switchData[2 + size*2];
1023 firstKey = 0; // To make the compiler happy
1026 if (curBlock->successorBlockList.blockListType != kNotUsed) {
1027 LOGE("Successor block list already in use: %d",
1028 curBlock->successorBlockList.blockListType);
1031 curBlock->successorBlockList.blockListType =
1032 (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) ?
1033 kPackedSwitch : kSparseSwitch;
1034 dvmInitGrowableList(&curBlock->successorBlockList.blocks, size);
1036 for (i = 0; i < size; i++) {
1037 BasicBlock *caseBlock = findBlock(cUnit, curOffset + targetTable[i],
1042 SuccessorBlockInfo *successorBlockInfo =
1043 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo),
1045 successorBlockInfo->block = caseBlock;
1046 successorBlockInfo->key = (insn->dalvikInsn.opcode == OP_PACKED_SWITCH)?
1047 firstKey + i : keyTable[i];
1048 dvmInsertGrowableList(&curBlock->successorBlockList.blocks,
1049 (intptr_t) successorBlockInfo);
1050 dvmCompilerSetBit(caseBlock->predecessors, curBlock->id);
1053 /* Fall-through case */
1054 BasicBlock *fallthroughBlock = findBlock(cUnit,
1060 curBlock->fallThrough = fallthroughBlock;
1061 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
1064 /* Process instructions with the kInstrCanThrow flag */
1065 static void processCanThrow(CompilationUnit *cUnit, BasicBlock *curBlock,
1066 MIR *insn, int curOffset, int width, int flags,
1067 BitVector *tryBlockAddr, const u2 *codePtr,
1070 const Method *method = cUnit->method;
1071 const DexCode *dexCode = dvmGetMethodCode(method);
1074 if (dvmIsBitSet(tryBlockAddr, curOffset)) {
1075 DexCatchIterator iterator;
1077 if (!dexFindCatchHandler(&iterator, dexCode, curOffset)) {
1078 LOGE("Catch block not found in dexfile for insn %x in %s",
1079 curOffset, method->name);
1083 if (curBlock->successorBlockList.blockListType != kNotUsed) {
1084 LOGE("Successor block list already in use: %d",
1085 curBlock->successorBlockList.blockListType);
1088 curBlock->successorBlockList.blockListType = kCatch;
1089 dvmInitGrowableList(&curBlock->successorBlockList.blocks, 2);
1092 DexCatchHandler* handler = dexCatchIteratorNext(&iterator);
1094 if (handler == NULL) {
1098 BasicBlock *catchBlock = findBlock(cUnit, handler->address,
1104 SuccessorBlockInfo *successorBlockInfo =
1105 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo),
1107 successorBlockInfo->block = catchBlock;
1108 successorBlockInfo->key = handler->typeIdx;
1109 dvmInsertGrowableList(&curBlock->successorBlockList.blocks,
1110 (intptr_t) successorBlockInfo);
1111 dvmCompilerSetBit(catchBlock->predecessors, curBlock->id);
1114 BasicBlock *ehBlock = dvmCompilerNewBB(kExceptionHandling,
1115 cUnit->numBlocks++);
1116 curBlock->taken = ehBlock;
1117 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) ehBlock);
1118 ehBlock->startOffset = curOffset;
1119 dvmCompilerSetBit(ehBlock->predecessors, curBlock->id);
1123 * Force the current block to terminate.
1125 * Data may be present before codeEnd, so we need to parse it to know
1126 * whether it is code or data.
1128 if (codePtr < codeEnd) {
1129 /* Create a fallthrough block for real instructions (incl. OP_NOP) */
1130 if (contentIsInsn(codePtr)) {
1131 BasicBlock *fallthroughBlock = findBlock(cUnit,
1138 * OP_THROW and OP_THROW_VERIFICATION_ERROR are unconditional
1141 if (insn->dalvikInsn.opcode != OP_THROW_VERIFICATION_ERROR &&
1142 insn->dalvikInsn.opcode != OP_THROW) {
1143 curBlock->fallThrough = fallthroughBlock;
1144 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
1151 * Similar to dvmCompileTrace, but the entity processed here is the whole
1154 * TODO: implementation will be revisited when the trace builder can provide
1155 * whole-method traces.
1157 bool dvmCompileMethod(const Method *method, JitTranslationInfo *info)
1159 CompilationUnit cUnit;
1160 const DexCode *dexCode = dvmGetMethodCode(method);
1161 const u2 *codePtr = dexCode->insns;
1162 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize;
1164 unsigned int curOffset = 0;
1166 /* Method already compiled */
1167 if (dvmJitGetMethodAddr(codePtr)) {
1168 info->codeAddress = NULL;
1172 memset(&cUnit, 0, sizeof(cUnit));
1173 cUnit.method = method;
1175 cUnit.jitMode = kJitMethod;
1177 /* Initialize the block list */
1178 dvmInitGrowableList(&cUnit.blockList, 4);
1181 * FIXME - PC reconstruction list won't be needed after the codegen routines
1182 * are enhanced to true method mode.
1184 /* Initialize the PC reconstruction list */
1185 dvmInitGrowableList(&cUnit.pcReconstructionList, 8);
1187 /* Allocate the bit-vector to track the beginning of basic blocks */
1188 BitVector *tryBlockAddr = dvmCompilerAllocBitVector(dexCode->insnsSize,
1189 true /* expandable */);
1190 cUnit.tryBlockAddr = tryBlockAddr;
1192 /* Create the default entry and exit blocks and enter them to the list */
1193 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1194 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++);
1196 cUnit.entryBlock = entryBlock;
1197 cUnit.exitBlock = exitBlock;
1199 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) entryBlock);
1200 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) exitBlock);
1202 /* Current block to record parsed instructions */
1203 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1204 curBlock->startOffset = 0;
1205 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) curBlock);
1206 entryBlock->fallThrough = curBlock;
1207 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id);
1210 * Store back the number of blocks since new blocks may be created of
1213 cUnit.numBlocks = numBlocks;
1215 /* Identify code range in try blocks and set up the empty catch blocks */
1216 processTryCatchBlocks(&cUnit);
1218 /* Parse all instructions and put them into containing basic blocks */
1219 while (codePtr < codeEnd) {
1220 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true);
1221 insn->offset = curOffset;
1222 int width = parseInsn(codePtr, &insn->dalvikInsn, false);
1223 insn->width = width;
1225 /* Terminate when the data section is seen */
1229 dvmCompilerAppendMIR(curBlock, insn);
1232 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1234 if (flags & kInstrCanBranch) {
1235 processCanBranch(&cUnit, curBlock, insn, curOffset, width, flags,
1237 } else if (flags & kInstrCanReturn) {
1238 curBlock->fallThrough = exitBlock;
1239 dvmCompilerSetBit(exitBlock->predecessors, curBlock->id);
1241 * Terminate the current block if there are instructions
1244 if (codePtr < codeEnd) {
1246 * Create a fallthrough block for real instructions
1249 if (contentIsInsn(codePtr)) {
1250 findBlock(&cUnit, curOffset + width,
1257 } else if (flags & kInstrCanThrow) {
1258 processCanThrow(&cUnit, curBlock, insn, curOffset, width, flags,
1259 tryBlockAddr, codePtr, codeEnd);
1260 } else if (flags & kInstrCanSwitch) {
1261 processCanSwitch(&cUnit, curBlock, insn, curOffset, width, flags);
1264 BasicBlock *nextBlock = findBlock(&cUnit, curOffset,
1271 * The next instruction could be the target of a previously parsed
1272 * forward branch so a block is already created. If the current
1273 * instruction is not an unconditional branch, connect them through
1274 * the fall-through link.
1276 assert(curBlock->fallThrough == NULL ||
1277 curBlock->fallThrough == nextBlock ||
1278 curBlock->fallThrough == exitBlock);
1280 if ((curBlock->fallThrough == NULL) &&
1281 (flags & kInstrCanContinue)) {
1282 curBlock->fallThrough = nextBlock;
1283 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id);
1285 curBlock = nextBlock;
1289 if (cUnit.printMe) {
1290 dvmCompilerDumpCompilationUnit(&cUnit);
1293 /* Adjust this value accordingly once inlining is performed */
1294 cUnit.numDalvikRegisters = cUnit.method->registersSize;
1296 /* Verify if all blocks are connected as claimed */
1297 /* FIXME - to be disabled in the future */
1298 dvmCompilerDataFlowAnalysisDispatcher(&cUnit, verifyPredInfo,
1300 false /* isIterative */);
1303 /* Perform SSA transformation for the whole method */
1304 dvmCompilerMethodSSATransformation(&cUnit);
1306 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming
1308 /* Allocate Registers using simple local allocation scheme */
1309 dvmCompilerLocalRegAlloc(&cUnit);
1311 /* Convert MIR to LIR, etc. */
1312 dvmCompilerMethodMIR2LIR(&cUnit);
1315 //dvmDumpCFG(&cUnit, "/sdcard/cfg/");
1317 /* Method is not empty */
1318 if (cUnit.firstLIRInsn) {
1319 /* Convert LIR into machine code. Loop for recoverable retries */
1321 dvmCompilerAssembleLIR(&cUnit, info);
1322 cUnit.assemblerRetries++;
1323 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess)
1324 LOGD("Assembler abort #%d on %d",cUnit.assemblerRetries,
1325 cUnit.assemblerStatus);
1326 } while (cUnit.assemblerStatus == kRetryAll);
1328 if (cUnit.printMe) {
1329 dvmCompilerCodegenDump(&cUnit);
1332 if (info->codeAddress) {
1333 dvmJitSetCodeAddr(dexCode->insns, info->codeAddress,
1334 info->instructionSet, true, 0);
1336 * Clear the codeAddress for the enclosing trace to reuse the info
1338 info->codeAddress = NULL;
1345 /* Extending the trace by crawling the code from curBlock */
1346 static bool exhaustTrace(CompilationUnit *cUnit, BasicBlock *curBlock)
1348 unsigned int curOffset = curBlock->startOffset;
1349 const u2 *codePtr = cUnit->method->insns + curOffset;
1351 if (curBlock->visited == true) return false;
1353 curBlock->visited = true;
1355 if (curBlock->blockType == kEntryBlock ||
1356 curBlock->blockType == kExitBlock) {
1361 * Block has been parsed - check the taken/fallThrough in case it is a split
1364 if (curBlock->firstMIRInsn != NULL) {
1365 bool changed = false;
1366 if (curBlock->taken)
1367 changed |= exhaustTrace(cUnit, curBlock->taken);
1368 if (curBlock->fallThrough)
1369 changed |= exhaustTrace(cUnit, curBlock->fallThrough);
1373 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true);
1374 insn->offset = curOffset;
1375 int width = parseInsn(codePtr, &insn->dalvikInsn, false);
1376 insn->width = width;
1378 /* Terminate when the data section is seen */
1382 dvmCompilerAppendMIR(curBlock, insn);
1385 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1387 /* Stop extending the trace after seeing these instructions */
1388 if (flags & (kInstrCanReturn | kInstrCanSwitch | kInstrInvoke)) {
1389 curBlock->fallThrough = cUnit->exitBlock;
1390 dvmCompilerSetBit(cUnit->exitBlock->predecessors, curBlock->id);
1392 } else if (flags & kInstrCanBranch) {
1393 processCanBranch(cUnit, curBlock, insn, curOffset, width, flags,
1395 if (curBlock->taken) {
1396 exhaustTrace(cUnit, curBlock->taken);
1398 if (curBlock->fallThrough) {
1399 exhaustTrace(cUnit, curBlock->fallThrough);
1404 BasicBlock *nextBlock = findBlock(cUnit, curOffset,
1411 * The next instruction could be the target of a previously parsed
1412 * forward branch so a block is already created. If the current
1413 * instruction is not an unconditional branch, connect them through
1414 * the fall-through link.
1416 assert(curBlock->fallThrough == NULL ||
1417 curBlock->fallThrough == nextBlock ||
1418 curBlock->fallThrough == cUnit->exitBlock);
1420 if ((curBlock->fallThrough == NULL) &&
1421 (flags & kInstrCanContinue)) {
1422 curBlock->needFallThroughBranch = true;
1423 curBlock->fallThrough = nextBlock;
1424 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id);
1426 /* Block has been visited - no more parsing needed */
1427 if (nextBlock->visited == true) {
1430 curBlock = nextBlock;
1436 /* Compile a loop */
1437 static bool compileLoop(CompilationUnit *cUnit, unsigned int startOffset,
1438 JitTraceDescription *desc, int numMaxInsts,
1439 JitTranslationInfo *info, jmp_buf *bailPtr,
1443 unsigned int curOffset = startOffset;
1446 #if defined(WITH_JIT_TUNING)
1447 CompilerMethodStats *methodStats;
1450 cUnit->jitMode = kJitLoop;
1452 /* Initialize the block list */
1453 dvmInitGrowableList(&cUnit->blockList, 4);
1455 /* Initialize the PC reconstruction list */
1456 dvmInitGrowableList(&cUnit->pcReconstructionList, 8);
1458 /* Create the default entry and exit blocks and enter them to the list */
1459 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1460 entryBlock->startOffset = curOffset;
1461 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++);
1463 cUnit->entryBlock = entryBlock;
1464 cUnit->exitBlock = exitBlock;
1466 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) entryBlock);
1467 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) exitBlock);
1469 /* Current block to record parsed instructions */
1470 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1471 curBlock->startOffset = curOffset;
1473 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) curBlock);
1474 entryBlock->fallThrough = curBlock;
1475 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id);
1478 * Store back the number of blocks since new blocks may be created of
1481 cUnit->numBlocks = numBlocks;
1484 dvmCompilerDataFlowAnalysisDispatcher(cUnit,
1485 dvmCompilerClearVisitedFlag,
1487 false /* isIterative */);
1488 changed = exhaustTrace(cUnit, curBlock);
1491 /* Backward chaining block */
1492 bb = dvmCompilerNewBB(kChainingCellBackwardBranch, cUnit->numBlocks++);
1493 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1494 cUnit->backChainBlock = bb;
1496 /* A special block to host PC reconstruction code */
1497 bb = dvmCompilerNewBB(kPCReconstruction, cUnit->numBlocks++);
1498 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1500 /* And one final block that publishes the PC and raises the exception */
1501 bb = dvmCompilerNewBB(kExceptionHandling, cUnit->numBlocks++);
1502 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1503 cUnit->puntBlock = bb;
1505 cUnit->numDalvikRegisters = cUnit->method->registersSize;
1507 /* Verify if all blocks are connected as claimed */
1508 /* FIXME - to be disabled in the future */
1509 dvmCompilerDataFlowAnalysisDispatcher(cUnit, verifyPredInfo,
1511 false /* isIterative */);
1514 /* Try to identify a loop */
1515 if (!dvmCompilerBuildLoop(cUnit))
1518 dvmCompilerLoopOpt(cUnit);
1521 * Change the backward branch to the backward chaining cell after dataflow
1522 * analsys/optimizations are done.
1524 dvmCompilerInsertBackwardChaining(cUnit);
1526 dvmCompilerInitializeRegAlloc(cUnit);
1528 /* Allocate Registers using simple local allocation scheme */
1529 dvmCompilerLocalRegAlloc(cUnit);
1531 /* Convert MIR to LIR, etc. */
1532 dvmCompilerMIR2LIR(cUnit);
1534 /* Loop contains never executed blocks / heavy instructions */
1535 if (cUnit->quitLoopMode) {
1536 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) {
1537 LOGD("Loop trace @ offset %04x aborted due to unresolved code info",
1538 cUnit->entryBlock->startOffset);
1543 /* Convert LIR into machine code. Loop for recoverable retries */
1545 dvmCompilerAssembleLIR(cUnit, info);
1546 cUnit->assemblerRetries++;
1547 if (cUnit->printMe && cUnit->assemblerStatus != kSuccess)
1548 LOGD("Assembler abort #%d on %d", cUnit->assemblerRetries,
1549 cUnit->assemblerStatus);
1550 } while (cUnit->assemblerStatus == kRetryAll);
1552 /* Loop is too big - bail out */
1553 if (cUnit->assemblerStatus == kRetryHalve) {
1557 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) {
1558 LOGD("Loop trace @ offset %04x", cUnit->entryBlock->startOffset);
1559 dvmCompilerCodegenDump(cUnit);
1563 * If this trace uses class objects as constants,
1564 * dvmJitInstallClassObjectPointers will switch the thread state
1565 * to running and look up the class pointers using the descriptor/loader
1566 * tuple stored in the callsite info structure. We need to make this window
1567 * as short as possible since it is blocking GC.
1569 if (cUnit->hasClassLiterals && info->codeAddress) {
1570 dvmJitInstallClassObjectPointers(cUnit, (char *) info->codeAddress);
1574 * Since callsiteinfo is allocated from the arena, delay the reset until
1575 * class pointers are resolved.
1577 dvmCompilerArenaReset();
1579 assert(cUnit->assemblerStatus == kSuccess);
1580 #if defined(WITH_JIT_TUNING)
1581 /* Locate the entry to store compilation statistics for this method */
1582 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false);
1583 methodStats->nativeSize += cUnit->totalSize;
1585 return info->codeAddress != NULL;
1588 /* Retry the original trace with JIT_OPT_NO_LOOP disabled */
1589 dvmCompilerArenaReset();
1590 return dvmCompileTrace(desc, numMaxInsts, info, bailPtr,
1591 optHints | JIT_OPT_NO_LOOP);
1595 * Main entry point to start trace compilation. Basic blocks are constructed
1596 * first and they will be passed to the codegen routines to convert Dalvik
1597 * bytecode into machine code.
1599 bool dvmCompileTrace(JitTraceDescription *desc, int numMaxInsts,
1600 JitTranslationInfo *info, jmp_buf *bailPtr,
1603 const DexCode *dexCode = dvmGetMethodCode(desc->method);
1604 const JitTraceRun* currRun = &desc->trace[0];
1605 unsigned int curOffset = currRun->info.frag.startOffset;
1606 unsigned int startOffset = curOffset;
1607 unsigned int numInsts = currRun->info.frag.numInsts;
1608 const u2 *codePtr = dexCode->insns + curOffset;
1609 int traceSize = 0; // # of half-words
1610 const u2 *startCodePtr = codePtr;
1611 BasicBlock *curBB, *entryCodeBB;
1613 static int compilationId;
1614 CompilationUnit cUnit;
1615 GrowableList *blockList;
1616 #if defined(WITH_JIT_TUNING)
1617 CompilerMethodStats *methodStats;
1620 /* If we've already compiled this trace, just return success */
1621 if (dvmJitGetTraceAddr(startCodePtr) && !info->discardResult) {
1623 * Make sure the codeAddress is NULL so that it won't clobber the
1626 info->codeAddress = NULL;
1630 /* If the work order is stale, discard it */
1631 if (info->cacheVersion != gDvmJit.cacheVersion) {
1636 memset(&cUnit, 0, sizeof(CompilationUnit));
1638 #if defined(WITH_JIT_TUNING)
1639 /* Locate the entry to store compilation statistics for this method */
1640 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false);
1643 /* Set the recover buffer pointer */
1644 cUnit.bailPtr = bailPtr;
1646 /* Initialize the printMe flag */
1647 cUnit.printMe = gDvmJit.printMe;
1649 /* Setup the method */
1650 cUnit.method = desc->method;
1652 /* Store the trace descriptor and set the initial mode */
1653 cUnit.traceDesc = desc;
1654 cUnit.jitMode = kJitTrace;
1656 /* Initialize the PC reconstruction list */
1657 dvmInitGrowableList(&cUnit.pcReconstructionList, 8);
1659 /* Initialize the basic block list */
1660 blockList = &cUnit.blockList;
1661 dvmInitGrowableList(blockList, 8);
1663 /* Identify traces that we don't want to compile */
1664 if (gDvmJit.methodTable) {
1665 int len = strlen(desc->method->clazz->descriptor) +
1666 strlen(desc->method->name) + 1;
1667 char *fullSignature = (char *)dvmCompilerNew(len, true);
1668 strcpy(fullSignature, desc->method->clazz->descriptor);
1669 strcat(fullSignature, desc->method->name);
1671 int hashValue = dvmComputeUtf8Hash(fullSignature);
1674 * Doing three levels of screening to see whether we want to skip
1675 * compiling this method
1678 /* First, check the full "class;method" signature */
1680 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1681 fullSignature, (HashCompareFunc) strcmp,
1685 /* Full signature not found - check the enclosing class */
1686 if (methodFound == false) {
1687 int hashValue = dvmComputeUtf8Hash(desc->method->clazz->descriptor);
1689 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1690 (char *) desc->method->clazz->descriptor,
1691 (HashCompareFunc) strcmp, false) !=
1693 /* Enclosing class not found - check the method name */
1694 if (methodFound == false) {
1695 int hashValue = dvmComputeUtf8Hash(desc->method->name);
1697 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1698 (char *) desc->method->name,
1699 (HashCompareFunc) strcmp, false) !=
1703 * Debug by call-graph is enabled. Check if the debug list
1704 * covers any methods on the VM stack.
1706 if (methodFound == false && gDvmJit.checkCallGraph == true) {
1708 filterMethodByCallGraph(info->requestingThread,
1709 desc->method->name);
1715 * Under the following conditions, the trace will be *conservatively*
1716 * compiled by only containing single-step instructions to and from the
1718 * 1) If includeSelectedMethod == false, the method matches the full or
1719 * partial signature stored in the hash table.
1721 * 2) If includeSelectedMethod == true, the method does not match the
1722 * full and partial signature stored in the hash table.
1724 if (gDvmJit.includeSelectedMethod != methodFound) {
1725 cUnit.allSingleStep = true;
1727 /* Compile the trace as normal */
1729 /* Print the method we cherry picked */
1730 if (gDvmJit.includeSelectedMethod == true) {
1731 cUnit.printMe = true;
1736 /* Allocate the entry block */
1737 curBB = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1738 dvmInsertGrowableList(blockList, (intptr_t) curBB);
1739 curBB->startOffset = curOffset;
1741 entryCodeBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1742 dvmInsertGrowableList(blockList, (intptr_t) entryCodeBB);
1743 entryCodeBB->startOffset = curOffset;
1744 curBB->fallThrough = entryCodeBB;
1745 curBB = entryCodeBB;
1747 if (cUnit.printMe) {
1748 LOGD("--------\nCompiler: Building trace for %s, offset 0x%x",
1749 desc->method->name, curOffset);
1753 * Analyze the trace descriptor and include up to the maximal number
1754 * of Dalvik instructions into the IR.
1759 insn = (MIR *)dvmCompilerNew(sizeof(MIR), true);
1760 insn->offset = curOffset;
1761 width = parseInsn(codePtr, &insn->dalvikInsn, cUnit.printMe);
1763 /* The trace should never incude instruction data */
1765 insn->width = width;
1767 dvmCompilerAppendMIR(curBB, insn);
1770 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1772 if (flags & kInstrInvoke) {
1773 const Method *calleeMethod = (const Method *)
1774 currRun[JIT_TRACE_CUR_METHOD].info.meta;
1775 assert(numInsts == 1);
1776 CallsiteInfo *callsiteInfo =
1777 (CallsiteInfo *)dvmCompilerNew(sizeof(CallsiteInfo), true);
1778 callsiteInfo->classDescriptor = (const char *)
1779 currRun[JIT_TRACE_CLASS_DESC].info.meta;
1780 callsiteInfo->classLoader = (Object *)
1781 currRun[JIT_TRACE_CLASS_LOADER].info.meta;
1782 callsiteInfo->method = calleeMethod;
1783 insn->meta.callsiteInfo = callsiteInfo;
1786 /* Instruction limit reached - terminate the trace here */
1787 if (cUnit.numInsts >= numMaxInsts) {
1790 if (--numInsts == 0) {
1791 if (currRun->info.frag.runEnd) {
1794 /* Advance to the next trace description (ie non-meta info) */
1797 } while (!currRun->isCode);
1799 /* Dummy end-of-run marker seen */
1800 if (currRun->info.frag.numInsts == 0) {
1804 curBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1805 dvmInsertGrowableList(blockList, (intptr_t) curBB);
1806 curOffset = currRun->info.frag.startOffset;
1807 numInsts = currRun->info.frag.numInsts;
1808 curBB->startOffset = curOffset;
1809 codePtr = dexCode->insns + curOffset;
1817 #if defined(WITH_JIT_TUNING)
1818 /* Convert # of half-word to bytes */
1819 methodStats->compiledDalvikSize += traceSize * 2;
1823 * Now scan basic blocks containing real code to connect the
1824 * taken/fallthrough links. Also create chaining cells for code not included
1828 for (blockId = 0; blockId < blockList->numUsed; blockId++) {
1829 curBB = (BasicBlock *) dvmGrowableListGetElement(blockList, blockId);
1830 MIR *lastInsn = curBB->lastMIRInsn;
1831 /* Skip empty blocks */
1832 if (lastInsn == NULL) {
1835 curOffset = lastInsn->offset;
1836 unsigned int targetOffset = curOffset;
1837 unsigned int fallThroughOffset = curOffset + lastInsn->width;
1838 bool isInvoke = false;
1839 const Method *callee = NULL;
1841 findBlockBoundary(desc->method, curBB->lastMIRInsn, curOffset,
1842 &targetOffset, &isInvoke, &callee);
1844 /* Link the taken and fallthrough blocks */
1845 BasicBlock *searchBB;
1847 int flags = dexGetFlagsFromOpcode(lastInsn->dalvikInsn.opcode);
1849 if (flags & kInstrInvoke) {
1850 cUnit.hasInvoke = true;
1853 /* Backward branch seen */
1854 if (isInvoke == false &&
1855 (flags & kInstrCanBranch) != 0 &&
1856 targetOffset < curOffset &&
1857 (optHints & JIT_OPT_NO_LOOP) == 0) {
1858 dvmCompilerArenaReset();
1859 return compileLoop(&cUnit, startOffset, desc, numMaxInsts,
1860 info, bailPtr, optHints);
1863 /* No backward branch in the trace - start searching the next BB */
1864 size_t searchBlockId;
1865 for (searchBlockId = blockId+1; searchBlockId < blockList->numUsed;
1867 searchBB = (BasicBlock *) dvmGrowableListGetElement(blockList,
1869 if (targetOffset == searchBB->startOffset) {
1870 curBB->taken = searchBB;
1871 dvmCompilerSetBit(searchBB->predecessors, curBB->id);
1873 if (fallThroughOffset == searchBB->startOffset) {
1874 curBB->fallThrough = searchBB;
1875 dvmCompilerSetBit(searchBB->predecessors, curBB->id);
1878 * Fallthrough block of an invoke instruction needs to be
1879 * aligned to 4-byte boundary (alignment instruction to be
1882 if (flags & kInstrInvoke) {
1883 searchBB->isFallThroughFromInvoke = true;
1889 * Some blocks are ended by non-control-flow-change instructions,
1890 * currently only due to trace length constraint. In this case we need
1891 * to generate an explicit branch at the end of the block to jump to
1892 * the chaining cell.
1894 curBB->needFallThroughBranch =
1895 ((flags & (kInstrCanBranch | kInstrCanSwitch | kInstrCanReturn |
1896 kInstrInvoke)) == 0);
1897 if (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ||
1898 lastInsn->dalvikInsn.opcode == OP_SPARSE_SWITCH) {
1900 const u2 *switchData = desc->method->insns + lastInsn->offset +
1901 lastInsn->dalvikInsn.vB;
1902 int size = switchData[1];
1903 int maxChains = MIN(size, MAX_CHAINED_SWITCH_CASES);
1906 * Generate the landing pad for cases whose ranks are higher than
1907 * MAX_CHAINED_SWITCH_CASES. The code will re-enter the interpreter
1908 * through the NoChain point.
1910 if (maxChains != size) {
1911 cUnit.switchOverflowPad =
1912 desc->method->insns + lastInsn->offset;
1915 s4 *targets = (s4 *) (switchData + 2 +
1916 (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ?
1919 /* One chaining cell for the first MAX_CHAINED_SWITCH_CASES cases */
1920 for (i = 0; i < maxChains; i++) {
1921 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal,
1923 dvmInsertGrowableList(blockList, (intptr_t) caseChain);
1924 caseChain->startOffset = lastInsn->offset + targets[i];
1927 /* One more chaining cell for the default case */
1928 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal,
1930 dvmInsertGrowableList(blockList, (intptr_t) caseChain);
1931 caseChain->startOffset = lastInsn->offset + lastInsn->width;
1932 /* Fallthrough block not included in the trace */
1933 } else if (!isUnconditionalBranch(lastInsn) &&
1934 curBB->fallThrough == NULL) {
1935 BasicBlock *fallThroughBB;
1937 * If the chaining cell is after an invoke or
1938 * instruction that cannot change the control flow, request a hot
1941 if (isInvoke || curBB->needFallThroughBranch) {
1942 fallThroughBB = dvmCompilerNewBB(kChainingCellHot, numBlocks++);
1944 fallThroughBB = dvmCompilerNewBB(kChainingCellNormal,
1947 dvmInsertGrowableList(blockList, (intptr_t) fallThroughBB);
1948 fallThroughBB->startOffset = fallThroughOffset;
1949 curBB->fallThrough = fallThroughBB;
1950 dvmCompilerSetBit(fallThroughBB->predecessors, curBB->id);
1952 /* Target block not included in the trace */
1953 if (curBB->taken == NULL &&
1954 (isGoto(lastInsn) || isInvoke ||
1955 (targetOffset != UNKNOWN_TARGET && targetOffset != curOffset))) {
1956 BasicBlock *newBB = NULL;
1958 /* Monomorphic callee */
1960 /* JNI call doesn't need a chaining cell */
1961 if (!dvmIsNativeMethod(callee)) {
1962 newBB = dvmCompilerNewBB(kChainingCellInvokeSingleton,
1964 newBB->startOffset = 0;
1965 newBB->containingMethod = callee;
1967 /* Will resolve at runtime */
1969 newBB = dvmCompilerNewBB(kChainingCellInvokePredicted,
1971 newBB->startOffset = 0;
1973 /* For unconditional branches, request a hot chaining cell */
1975 #if !defined(WITH_SELF_VERIFICATION)
1976 newBB = dvmCompilerNewBB(dexIsGoto(flags) ?
1978 kChainingCellNormal,
1980 newBB->startOffset = targetOffset;
1982 /* Handle branches that branch back into the block */
1983 if (targetOffset >= curBB->firstMIRInsn->offset &&
1984 targetOffset <= curBB->lastMIRInsn->offset) {
1985 newBB = dvmCompilerNewBB(kChainingCellBackwardBranch,
1988 newBB = dvmCompilerNewBB(dexIsGoto(flags) ?
1990 kChainingCellNormal,
1993 newBB->startOffset = targetOffset;
1997 curBB->taken = newBB;
1998 dvmCompilerSetBit(newBB->predecessors, curBB->id);
1999 dvmInsertGrowableList(blockList, (intptr_t) newBB);
2004 /* Now create a special block to host PC reconstruction code */
2005 curBB = dvmCompilerNewBB(kPCReconstruction, numBlocks++);
2006 dvmInsertGrowableList(blockList, (intptr_t) curBB);
2008 /* And one final block that publishes the PC and raise the exception */
2009 curBB = dvmCompilerNewBB(kExceptionHandling, numBlocks++);
2010 dvmInsertGrowableList(blockList, (intptr_t) curBB);
2011 cUnit.puntBlock = curBB;
2013 if (cUnit.printMe) {
2015 dexProtoCopyMethodDescriptor(&desc->method->prototype);
2016 LOGD("TRACEINFO (%d): 0x%08x %s%s.%s 0x%x %d of %d, %d blocks",
2018 (intptr_t) desc->method->insns,
2019 desc->method->clazz->descriptor,
2022 desc->trace[0].info.frag.startOffset,
2029 cUnit.numBlocks = numBlocks;
2031 /* Set the instruction set to use (NOTE: later components may change it) */
2032 cUnit.instructionSet = dvmCompilerInstructionSet();
2034 /* Inline transformation @ the MIR level */
2035 if (cUnit.hasInvoke && !(gDvmJit.disableOpt & (1 << kMethodInlining))) {
2036 dvmCompilerInlineMIR(&cUnit, info);
2039 cUnit.numDalvikRegisters = cUnit.method->registersSize;
2041 /* Preparation for SSA conversion */
2042 dvmInitializeSSAConversion(&cUnit);
2044 dvmCompilerNonLoopAnalysis(&cUnit);
2046 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming
2048 if (cUnit.printMe) {
2049 dvmCompilerDumpCompilationUnit(&cUnit);
2052 /* Allocate Registers using simple local allocation scheme */
2053 dvmCompilerLocalRegAlloc(&cUnit);
2055 /* Convert MIR to LIR, etc. */
2056 dvmCompilerMIR2LIR(&cUnit);
2058 /* Convert LIR into machine code. Loop for recoverable retries */
2060 dvmCompilerAssembleLIR(&cUnit, info);
2061 cUnit.assemblerRetries++;
2062 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess)
2063 LOGD("Assembler abort #%d on %d",cUnit.assemblerRetries,
2064 cUnit.assemblerStatus);
2065 } while (cUnit.assemblerStatus == kRetryAll);
2067 if (cUnit.printMe) {
2068 LOGD("Trace Dalvik PC: %p", startCodePtr);
2069 dvmCompilerCodegenDump(&cUnit);
2070 LOGD("End %s%s, %d Dalvik instructions",
2071 desc->method->clazz->descriptor, desc->method->name,
2075 if (cUnit.assemblerStatus == kRetryHalve) {
2076 /* Reset the compiler resource pool before retry */
2077 dvmCompilerArenaReset();
2079 /* Halve the instruction count and start from the top */
2080 return dvmCompileTrace(desc, cUnit.numInsts / 2, info, bailPtr,
2085 * If this trace uses class objects as constants,
2086 * dvmJitInstallClassObjectPointers will switch the thread state
2087 * to running and look up the class pointers using the descriptor/loader
2088 * tuple stored in the callsite info structure. We need to make this window
2089 * as short as possible since it is blocking GC.
2091 if (cUnit.hasClassLiterals && info->codeAddress) {
2092 dvmJitInstallClassObjectPointers(&cUnit, (char *) info->codeAddress);
2096 * Since callsiteinfo is allocated from the arena, delay the reset until
2097 * class pointers are resolved.
2099 dvmCompilerArenaReset();
2101 assert(cUnit.assemblerStatus == kSuccess);
2102 #if defined(WITH_JIT_TUNING)
2103 methodStats->nativeSize += cUnit.totalSize;
2106 return info->codeAddress != NULL;