2 * Copyright (C) 2009 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 #include "libdex/DexOpcodes.h"
19 #include "libdex/DexCatch.h"
20 #include "interp/Jit.h"
21 #include "CompilerInternals.h"
24 static inline bool contentIsInsn(const u2 *codePtr) {
26 Opcode opcode = (Opcode)(instr & 0xff);
29 * Since the low 8-bit in metadata may look like OP_NOP, we need to check
30 * both the low and whole sub-word to determine whether it is code or data.
32 return (opcode != OP_NOP || instr == 0);
36 * Parse an instruction, return the length of the instruction
38 static inline int parseInsn(const u2 *codePtr, DecodedInstruction *decInsn,
41 // Don't parse instruction data
42 if (!contentIsInsn(codePtr)) {
47 Opcode opcode = dexOpcodeFromCodeUnit(instr);
49 dexDecodeInstruction(codePtr, decInsn);
51 char *decodedString = dvmCompilerGetDalvikDisassembly(decInsn, NULL);
52 LOGD("%p: %#06x %s\n", codePtr, opcode, decodedString);
54 return dexGetWidthFromOpcode(opcode);
57 #define UNKNOWN_TARGET 0xffffffff
60 * Identify block-ending instructions and collect supplemental information
61 * regarding the following instructions.
63 static inline bool findBlockBoundary(const Method *caller, MIR *insn,
64 unsigned int curOffset,
65 unsigned int *target, bool *isInvoke,
66 const Method **callee)
68 switch (insn->dalvikInsn.opcode) {
69 /* Target is not compile-time constant */
73 case OP_RETURN_OBJECT:
75 *target = UNKNOWN_TARGET;
77 case OP_INVOKE_VIRTUAL:
78 case OP_INVOKE_VIRTUAL_RANGE:
79 case OP_INVOKE_VIRTUAL_JUMBO:
80 case OP_INVOKE_INTERFACE:
81 case OP_INVOKE_INTERFACE_RANGE:
82 case OP_INVOKE_INTERFACE_JUMBO:
83 case OP_INVOKE_VIRTUAL_QUICK:
84 case OP_INVOKE_VIRTUAL_QUICK_RANGE:
88 case OP_INVOKE_SUPER_RANGE:
89 case OP_INVOKE_SUPER_JUMBO: {
90 int mIndex = caller->clazz->pDvmDex->
91 pResMethods[insn->dalvikInsn.vB]->methodIndex;
92 const Method *calleeMethod =
93 caller->clazz->super->vtable[mIndex];
95 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
96 *target = (unsigned int) calleeMethod->insns;
99 *callee = calleeMethod;
102 case OP_INVOKE_STATIC:
103 case OP_INVOKE_STATIC_RANGE:
104 case OP_INVOKE_STATIC_JUMBO: {
105 const Method *calleeMethod =
106 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB];
108 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
109 *target = (unsigned int) calleeMethod->insns;
112 *callee = calleeMethod;
115 case OP_INVOKE_SUPER_QUICK:
116 case OP_INVOKE_SUPER_QUICK_RANGE: {
117 const Method *calleeMethod =
118 caller->clazz->super->vtable[insn->dalvikInsn.vB];
120 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
121 *target = (unsigned int) calleeMethod->insns;
124 *callee = calleeMethod;
127 case OP_INVOKE_DIRECT:
128 case OP_INVOKE_DIRECT_RANGE:
129 case OP_INVOKE_DIRECT_JUMBO: {
130 const Method *calleeMethod =
131 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB];
132 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
133 *target = (unsigned int) calleeMethod->insns;
136 *callee = calleeMethod;
142 *target = curOffset + (int) insn->dalvikInsn.vA;
151 *target = curOffset + (int) insn->dalvikInsn.vC;
160 *target = curOffset + (int) insn->dalvikInsn.vB;
169 static inline bool isGoto(MIR *insn)
171 switch (insn->dalvikInsn.opcode) {
182 * Identify unconditional branch instructions
184 static inline bool isUnconditionalBranch(MIR *insn)
186 switch (insn->dalvikInsn.opcode) {
190 case OP_RETURN_OBJECT:
198 * dvmHashTableLookup() callback
200 static int compareMethod(const CompilerMethodStats *m1,
201 const CompilerMethodStats *m2)
203 return (int) m1->method - (int) m2->method;
207 * Analyze the body of the method to collect high-level information regarding
210 * - is getter/setter?
211 * - can throw exception?
213 * Currently the inliner only handles getters and setters. When its capability
214 * becomes more sophisticated more information will be retrieved here.
216 static int analyzeInlineTarget(DecodedInstruction *dalvikInsn, int attributes,
219 int flags = dexGetFlagsFromOpcode(dalvikInsn->opcode);
220 int dalvikOpcode = dalvikInsn->opcode;
222 if (flags & kInstrInvoke) {
223 attributes &= ~METHOD_IS_LEAF;
226 if (!(flags & kInstrCanReturn)) {
227 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] &
229 attributes &= ~METHOD_IS_GETTER;
231 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] &
233 attributes &= ~METHOD_IS_SETTER;
238 * The expected instruction sequence is setter will never return value and
239 * getter will also do. Clear the bits if the behavior is discovered
242 if (flags & kInstrCanReturn) {
243 if (dalvikOpcode == OP_RETURN_VOID) {
244 attributes &= ~METHOD_IS_GETTER;
247 attributes &= ~METHOD_IS_SETTER;
251 if (flags & kInstrCanThrow) {
252 attributes &= ~METHOD_IS_THROW_FREE;
255 if (offset == 0 && dalvikOpcode == OP_RETURN_VOID) {
256 attributes |= METHOD_IS_EMPTY;
260 * Check if this opcode is selected for single stepping.
261 * If so, don't inline the callee as there is no stack frame for the
262 * interpreter to single-step through the instruction.
264 if (SINGLE_STEP_OP(dalvikOpcode)) {
265 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER);
272 * Analyze each method whose traces are ever compiled. Collect a variety of
273 * statistics like the ratio of exercised vs overall code and code bloat
274 * ratios. If isCallee is true, also analyze each instruction in more details
275 * to see if it is suitable for inlining.
277 CompilerMethodStats *dvmCompilerAnalyzeMethodBody(const Method *method,
280 const DexCode *dexCode = dvmGetMethodCode(method);
281 const u2 *codePtr = dexCode->insns;
282 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize;
284 int hashValue = dvmComputeUtf8Hash(method->name);
286 CompilerMethodStats dummyMethodEntry; // For hash table lookup
287 CompilerMethodStats *realMethodEntry; // For hash table storage
289 /* For lookup only */
290 dummyMethodEntry.method = method;
291 realMethodEntry = (CompilerMethodStats *)
292 dvmHashTableLookup(gDvmJit.methodStatsTable,
295 (HashCompareFunc) compareMethod,
298 /* This method has never been analyzed before - create an entry */
299 if (realMethodEntry == NULL) {
301 (CompilerMethodStats *) calloc(1, sizeof(CompilerMethodStats));
302 realMethodEntry->method = method;
304 dvmHashTableLookup(gDvmJit.methodStatsTable, hashValue,
306 (HashCompareFunc) compareMethod,
310 /* This method is invoked as a callee and has been analyzed - just return */
311 if ((isCallee == true) && (realMethodEntry->attributes & METHOD_IS_CALLEE))
312 return realMethodEntry;
315 * Similarly, return if this method has been compiled before as a hot
318 if ((isCallee == false) &&
319 (realMethodEntry->attributes & METHOD_IS_HOT))
320 return realMethodEntry;
324 /* Method hasn't been analyzed for the desired purpose yet */
326 /* Aggressively set the attributes until proven otherwise */
327 attributes = METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE |
328 METHOD_IS_GETTER | METHOD_IS_SETTER;
330 attributes = METHOD_IS_HOT;
333 /* Count the number of instructions */
334 while (codePtr < codeEnd) {
335 DecodedInstruction dalvikInsn;
336 int width = parseInsn(codePtr, &dalvikInsn, false);
338 /* Terminate when the data section is seen */
343 attributes = analyzeInlineTarget(&dalvikInsn, attributes, insnSize);
351 * Only handle simple getters/setters with one instruction followed by
354 if ((attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) &&
356 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER);
359 realMethodEntry->dalvikSize = insnSize * 2;
360 realMethodEntry->attributes |= attributes;
363 /* Uncomment the following to explore various callee patterns */
364 if (attributes & METHOD_IS_THROW_FREE) {
365 LOGE("%s%s is inlinable%s", method->clazz->descriptor, method->name,
366 (attributes & METHOD_IS_EMPTY) ? " empty" : "");
369 if (attributes & METHOD_IS_LEAF) {
370 LOGE("%s%s is leaf %d%s", method->clazz->descriptor, method->name,
371 insnSize, insnSize < 5 ? " (small)" : "");
374 if (attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) {
375 LOGE("%s%s is %s", method->clazz->descriptor, method->name,
376 attributes & METHOD_IS_GETTER ? "getter": "setter");
379 (METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE)) {
380 LOGE("%s%s is inlinable non setter/getter", method->clazz->descriptor,
385 return realMethodEntry;
389 * Crawl the stack of the thread that requesed compilation to see if any of the
390 * ancestors are on the blacklist.
392 static bool filterMethodByCallGraph(Thread *thread, const char *curMethodName)
394 /* Crawl the Dalvik stack frames and compare the method name*/
395 StackSaveArea *ssaPtr = ((StackSaveArea *) thread->curFrame) - 1;
396 while (ssaPtr != ((StackSaveArea *) NULL) - 1) {
397 const Method *method = ssaPtr->method;
399 int hashValue = dvmComputeUtf8Hash(method->name);
401 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
402 (char *) method->name,
403 (HashCompareFunc) strcmp, false) !=
406 LOGD("Method %s (--> %s) found on the JIT %s list",
407 method->name, curMethodName,
408 gDvmJit.includeSelectedMethod ? "white" : "black");
413 ssaPtr = ((StackSaveArea *) ssaPtr->prevFrame) - 1;
419 * Since we are including instructions from possibly a cold method into the
420 * current trace, we need to make sure that all the associated information
421 * with the callee is properly initialized. If not, we punt on this inline
424 * TODO: volatile instructions will be handled later.
426 bool dvmCompilerCanIncludeThisInstruction(const Method *method,
427 const DecodedInstruction *insn)
429 switch (insn->opcode) {
430 case OP_NEW_INSTANCE:
431 case OP_NEW_INSTANCE_JUMBO:
433 case OP_CHECK_CAST_JUMBO: {
434 ClassObject *classPtr = (ClassObject *)(void*)
435 (method->clazz->pDvmDex->pResClasses[insn->vB]);
437 /* Class hasn't been initialized yet */
438 if (classPtr == NULL) {
446 case OP_SGET_WIDE_JUMBO:
448 case OP_SGET_OBJECT_JUMBO:
449 case OP_SGET_BOOLEAN:
450 case OP_SGET_BOOLEAN_JUMBO:
452 case OP_SGET_BYTE_JUMBO:
454 case OP_SGET_CHAR_JUMBO:
456 case OP_SGET_SHORT_JUMBO:
460 case OP_SPUT_WIDE_JUMBO:
462 case OP_SPUT_OBJECT_JUMBO:
463 case OP_SPUT_BOOLEAN:
464 case OP_SPUT_BOOLEAN_JUMBO:
466 case OP_SPUT_BYTE_JUMBO:
468 case OP_SPUT_CHAR_JUMBO:
470 case OP_SPUT_SHORT_JUMBO: {
471 void *fieldPtr = (void*)
472 (method->clazz->pDvmDex->pResFields[insn->vB]);
474 if (fieldPtr == NULL) {
479 case OP_INVOKE_SUPER:
480 case OP_INVOKE_SUPER_RANGE:
481 case OP_INVOKE_SUPER_JUMBO: {
482 int mIndex = method->clazz->pDvmDex->
483 pResMethods[insn->vB]->methodIndex;
484 const Method *calleeMethod = method->clazz->super->vtable[mIndex];
485 if (calleeMethod == NULL) {
490 case OP_INVOKE_SUPER_QUICK:
491 case OP_INVOKE_SUPER_QUICK_RANGE: {
492 const Method *calleeMethod = method->clazz->super->vtable[insn->vB];
493 if (calleeMethod == NULL) {
498 case OP_INVOKE_STATIC:
499 case OP_INVOKE_STATIC_RANGE:
500 case OP_INVOKE_STATIC_JUMBO:
501 case OP_INVOKE_DIRECT:
502 case OP_INVOKE_DIRECT_RANGE:
503 case OP_INVOKE_DIRECT_JUMBO: {
504 const Method *calleeMethod =
505 method->clazz->pDvmDex->pResMethods[insn->vB];
506 if (calleeMethod == NULL) {
512 case OP_CONST_CLASS_JUMBO: {
513 void *classPtr = (void*)
514 (method->clazz->pDvmDex->pResClasses[insn->vB]);
516 if (classPtr == NULL) {
521 case OP_CONST_STRING_JUMBO:
522 case OP_CONST_STRING: {
523 void *strPtr = (void*)
524 (method->clazz->pDvmDex->pResStrings[insn->vB]);
526 if (strPtr == NULL) {
536 /* Split an existing block from the specified code offset into two */
537 static BasicBlock *splitBlock(CompilationUnit *cUnit,
538 unsigned int codeOffset,
539 BasicBlock *origBlock)
541 MIR *insn = origBlock->firstMIRInsn;
543 if (insn->offset == codeOffset) break;
547 LOGE("Break split failed");
550 BasicBlock *bottomBlock = dvmCompilerNewBB(kDalvikByteCode,
552 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bottomBlock);
554 bottomBlock->startOffset = codeOffset;
555 bottomBlock->firstMIRInsn = insn;
556 bottomBlock->lastMIRInsn = origBlock->lastMIRInsn;
558 /* Handle the taken path */
559 bottomBlock->taken = origBlock->taken;
560 if (bottomBlock->taken) {
561 origBlock->taken = NULL;
562 dvmCompilerClearBit(bottomBlock->taken->predecessors, origBlock->id);
563 dvmCompilerSetBit(bottomBlock->taken->predecessors, bottomBlock->id);
566 /* Handle the fallthrough path */
567 bottomBlock->fallThrough = origBlock->fallThrough;
568 origBlock->fallThrough = bottomBlock;
569 origBlock->needFallThroughBranch = true;
570 dvmCompilerSetBit(bottomBlock->predecessors, origBlock->id);
571 if (bottomBlock->fallThrough) {
572 dvmCompilerClearBit(bottomBlock->fallThrough->predecessors,
574 dvmCompilerSetBit(bottomBlock->fallThrough->predecessors,
578 /* Handle the successor list */
579 if (origBlock->successorBlockList.blockListType != kNotUsed) {
580 bottomBlock->successorBlockList = origBlock->successorBlockList;
581 origBlock->successorBlockList.blockListType = kNotUsed;
582 GrowableListIterator iterator;
584 dvmGrowableListIteratorInit(&bottomBlock->successorBlockList.blocks,
587 SuccessorBlockInfo *successorBlockInfo =
588 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
589 if (successorBlockInfo == NULL) break;
590 BasicBlock *bb = successorBlockInfo->block;
591 dvmCompilerClearBit(bb->predecessors, origBlock->id);
592 dvmCompilerSetBit(bb->predecessors, bottomBlock->id);
596 origBlock->lastMIRInsn = insn->prev;
598 insn->prev->next = NULL;
604 * Given a code offset, find out the block that starts with it. If the offset
605 * is in the middle of an existing block, split it into two.
607 static BasicBlock *findBlock(CompilationUnit *cUnit,
608 unsigned int codeOffset,
609 bool split, bool create)
611 GrowableList *blockList = &cUnit->blockList;
615 for (i = 0; i < blockList->numUsed; i++) {
616 bb = (BasicBlock *) blockList->elemList[i];
617 if (bb->blockType != kDalvikByteCode) continue;
618 if (bb->startOffset == codeOffset) return bb;
619 /* Check if a branch jumps into the middle of an existing block */
620 if ((split == true) && (codeOffset > bb->startOffset) &&
621 (bb->lastMIRInsn != NULL) &&
622 (codeOffset <= bb->lastMIRInsn->offset)) {
623 BasicBlock *newBB = splitBlock(cUnit, codeOffset, bb);
628 bb = dvmCompilerNewBB(kDalvikByteCode, cUnit->numBlocks++);
629 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
630 bb->startOffset = codeOffset;
636 /* Dump the CFG into a DOT graph */
637 void dvmDumpCFG(CompilationUnit *cUnit, const char *dirPrefix)
639 const Method *method = cUnit->method;
641 char *signature = dexProtoCopyMethodDescriptor(&method->prototype);
642 char startOffset[80];
643 sprintf(startOffset, "_%x", cUnit->entryBlock->fallThrough->startOffset);
644 char *fileName = (char *) dvmCompilerNew(
646 strlen(method->clazz->descriptor) +
647 strlen(method->name) +
649 strlen(startOffset) +
650 strlen(".dot") + 1, true);
651 sprintf(fileName, "%s%s%s%s%s.dot", dirPrefix,
652 method->clazz->descriptor, method->name, signature, startOffset);
656 * Convert the special characters into a filesystem- and shell-friendly
660 for (i = strlen(dirPrefix); fileName[i]; i++) {
661 if (fileName[i] == '/') {
663 } else if (fileName[i] == ';') {
665 } else if (fileName[i] == '$') {
667 } else if (fileName[i] == '(' || fileName[i] == ')') {
669 } else if (fileName[i] == '<' || fileName[i] == '>') {
673 file = fopen(fileName, "w");
677 fprintf(file, "digraph G {\n");
679 fprintf(file, " rankdir=TB\n");
681 int numReachableBlocks = cUnit->numReachableBlocks;
683 const GrowableList *blockList = &cUnit->blockList;
685 for (idx = 0; idx < numReachableBlocks; idx++) {
686 int blockIdx = cUnit->dfsOrder.elemList[idx];
687 BasicBlock *bb = (BasicBlock *) dvmGrowableListGetElement(blockList,
689 if (bb == NULL) break;
690 if (bb->blockType == kEntryBlock) {
691 fprintf(file, " entry [shape=Mdiamond];\n");
692 } else if (bb->blockType == kExitBlock) {
693 fprintf(file, " exit [shape=Mdiamond];\n");
694 } else if (bb->blockType == kDalvikByteCode) {
695 fprintf(file, " block%04x [shape=record,label = \"{ \\\n",
698 fprintf(file, " {block id %d\\l}%s\\\n", bb->id,
699 bb->firstMIRInsn ? " | " : " ");
700 for (mir = bb->firstMIRInsn; mir; mir = mir->next) {
701 fprintf(file, " {%04x %s\\l}%s\\\n", mir->offset,
703 dvmCompilerFullDisassembler(cUnit, mir) :
704 dexGetOpcodeName(mir->dalvikInsn.opcode),
705 mir->next ? " | " : " ");
707 fprintf(file, " }\"];\n\n");
708 } else if (bb->blockType == kExceptionHandling) {
709 char blockName[BLOCK_NAME_LEN];
711 dvmGetBlockName(bb, blockName);
712 fprintf(file, " %s [shape=invhouse];\n", blockName);
715 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN];
718 dvmGetBlockName(bb, blockName1);
719 dvmGetBlockName(bb->taken, blockName2);
720 fprintf(file, " %s:s -> %s:n [style=dotted]\n",
721 blockName1, blockName2);
723 if (bb->fallThrough) {
724 dvmGetBlockName(bb, blockName1);
725 dvmGetBlockName(bb->fallThrough, blockName2);
726 fprintf(file, " %s:s -> %s:n\n", blockName1, blockName2);
729 if (bb->successorBlockList.blockListType != kNotUsed) {
730 fprintf(file, " succ%04x [shape=%s,label = \"{ \\\n",
732 (bb->successorBlockList.blockListType == kCatch) ?
733 "Mrecord" : "record");
734 GrowableListIterator iterator;
735 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks,
737 SuccessorBlockInfo *successorBlockInfo =
738 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
742 if (successorBlockInfo == NULL) break;
744 BasicBlock *destBlock = successorBlockInfo->block;
745 SuccessorBlockInfo *nextSuccessorBlockInfo =
746 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
748 fprintf(file, " {<f%d> %04x: %04x\\l}%s\\\n",
750 successorBlockInfo->key,
751 destBlock->startOffset,
752 (nextSuccessorBlockInfo != NULL) ? " | " : " ");
754 successorBlockInfo = nextSuccessorBlockInfo;
756 fprintf(file, " }\"];\n\n");
758 dvmGetBlockName(bb, blockName1);
759 fprintf(file, " %s:s -> succ%04x:n [style=dashed]\n",
760 blockName1, bb->startOffset);
762 if (bb->successorBlockList.blockListType == kPackedSwitch ||
763 bb->successorBlockList.blockListType == kSparseSwitch) {
765 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks,
770 SuccessorBlockInfo *successorBlockInfo =
771 (SuccessorBlockInfo *)
772 dvmGrowableListIteratorNext(&iterator);
773 if (successorBlockInfo == NULL) break;
775 BasicBlock *destBlock = successorBlockInfo->block;
777 dvmGetBlockName(destBlock, blockName2);
778 fprintf(file, " succ%04x:f%d:e -> %s:n\n",
779 bb->startOffset, succId++,
787 * If we need to debug the dominator tree, uncomment the following code
790 dvmGetBlockName(bb, blockName1);
791 fprintf(file, " cfg%s [label=\"%s\", shape=none];\n",
792 blockName1, blockName1);
794 dvmGetBlockName(bb->iDom, blockName2);
795 fprintf(file, " cfg%s:s -> cfg%s:n\n\n",
796 blockName2, blockName1);
800 fprintf(file, "}\n");
804 /* Verify if all the successor is connected with all the claimed predecessors */
805 static bool verifyPredInfo(CompilationUnit *cUnit, BasicBlock *bb)
807 BitVectorIterator bvIterator;
809 dvmBitVectorIteratorInit(bb->predecessors, &bvIterator);
811 int blockIdx = dvmBitVectorIteratorNext(&bvIterator);
812 if (blockIdx == -1) break;
813 BasicBlock *predBB = (BasicBlock *)
814 dvmGrowableListGetElement(&cUnit->blockList, blockIdx);
816 if (predBB->taken == bb) {
818 } else if (predBB->fallThrough == bb) {
820 } else if (predBB->successorBlockList.blockListType != kNotUsed) {
821 GrowableListIterator iterator;
822 dvmGrowableListIteratorInit(&predBB->successorBlockList.blocks,
825 SuccessorBlockInfo *successorBlockInfo =
826 (SuccessorBlockInfo *)
827 dvmGrowableListIteratorNext(&iterator);
828 if (successorBlockInfo == NULL) break;
829 BasicBlock *succBB = successorBlockInfo->block;
836 if (found == false) {
837 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN];
838 dvmGetBlockName(bb, blockName1);
839 dvmGetBlockName(predBB, blockName2);
840 dvmDumpCFG(cUnit, "/sdcard/cfg/");
841 LOGE("Successor %s not found from %s",
842 blockName1, blockName2);
849 /* Identify code range in try blocks and set up the empty catch blocks */
850 static void processTryCatchBlocks(CompilationUnit *cUnit)
852 const Method *meth = cUnit->method;
853 const DexCode *pCode = dvmGetMethodCode(meth);
854 int triesSize = pCode->triesSize;
858 if (triesSize == 0) {
862 const DexTry *pTries = dexGetTries(pCode);
863 BitVector *tryBlockAddr = cUnit->tryBlockAddr;
865 /* Mark all the insn offsets in Try blocks */
866 for (i = 0; i < triesSize; i++) {
867 const DexTry* pTry = &pTries[i];
868 /* all in 16-bit units */
869 int startOffset = pTry->startAddr;
870 int endOffset = startOffset + pTry->insnCount;
872 for (offset = startOffset; offset < endOffset; offset++) {
873 dvmCompilerSetBit(tryBlockAddr, offset);
877 /* Iterate over each of the handlers to enqueue the empty Catch blocks */
878 offset = dexGetFirstHandlerOffset(pCode);
879 int handlersSize = dexGetHandlersSize(pCode);
881 for (i = 0; i < handlersSize; i++) {
882 DexCatchIterator iterator;
883 dexCatchIteratorInit(&iterator, pCode, offset);
886 DexCatchHandler* handler = dexCatchIteratorNext(&iterator);
888 if (handler == NULL) {
893 * Create dummy catch blocks first. Since these are created before
894 * other blocks are processed, "split" is specified as false.
896 findBlock(cUnit, handler->address,
903 offset = dexCatchIteratorGetEndOffset(&iterator, pCode);
907 /* Process instructions with the kInstrCanBranch flag */
908 static void processCanBranch(CompilationUnit *cUnit, BasicBlock *curBlock,
909 MIR *insn, int curOffset, int width, int flags,
910 const u2* codePtr, const u2* codeEnd)
912 int target = curOffset;
913 switch (insn->dalvikInsn.opcode) {
917 target += (int) insn->dalvikInsn.vA;
925 target += (int) insn->dalvikInsn.vC;
933 target += (int) insn->dalvikInsn.vB;
936 LOGE("Unexpected opcode(%d) with kInstrCanBranch set",
937 insn->dalvikInsn.opcode);
940 BasicBlock *takenBlock = findBlock(cUnit, target,
945 curBlock->taken = takenBlock;
946 dvmCompilerSetBit(takenBlock->predecessors, curBlock->id);
948 /* Always terminate the current block for conditional branches */
949 if (flags & kInstrCanContinue) {
950 BasicBlock *fallthroughBlock = findBlock(cUnit,
953 * If the method is processed
954 * in sequential order from the
955 * beginning, we don't need to
956 * specify split for continue
957 * blocks. However, this
958 * routine can be called by
959 * compileLoop, which starts
960 * parsing the method from an
961 * arbitrary address in the
967 curBlock->fallThrough = fallthroughBlock;
968 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
969 } else if (codePtr < codeEnd) {
970 /* Create a fallthrough block for real instructions (incl. OP_NOP) */
971 if (contentIsInsn(codePtr)) {
972 findBlock(cUnit, curOffset + width,
981 /* Process instructions with the kInstrCanSwitch flag */
982 static void processCanSwitch(CompilationUnit *cUnit, BasicBlock *curBlock,
983 MIR *insn, int curOffset, int width, int flags)
985 u2 *switchData= (u2 *) (cUnit->method->insns + curOffset +
986 insn->dalvikInsn.vB);
994 * Packed switch data format:
995 * ushort ident = 0x0100 magic value
996 * ushort size number of entries in the table
997 * int first_key first (and lowest) switch case value
998 * int targets[size] branch targets, relative to switch opcode
1000 * Total size is (4+size*2) 16-bit code units.
1002 if (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) {
1003 assert(switchData[0] == kPackedSwitchSignature);
1004 size = switchData[1];
1005 firstKey = switchData[2] | (switchData[3] << 16);
1006 targetTable = (int *) &switchData[4];
1007 keyTable = NULL; // Make the compiler happy
1009 * Sparse switch data format:
1010 * ushort ident = 0x0200 magic value
1011 * ushort size number of entries in the table; > 0
1012 * int keys[size] keys, sorted low-to-high; 32-bit aligned
1013 * int targets[size] branch targets, relative to switch opcode
1015 * Total size is (2+size*4) 16-bit code units.
1018 assert(switchData[0] == kSparseSwitchSignature);
1019 size = switchData[1];
1020 keyTable = (int *) &switchData[2];
1021 targetTable = (int *) &switchData[2 + size*2];
1022 firstKey = 0; // To make the compiler happy
1025 if (curBlock->successorBlockList.blockListType != kNotUsed) {
1026 LOGE("Successor block list already in use: %d",
1027 curBlock->successorBlockList.blockListType);
1030 curBlock->successorBlockList.blockListType =
1031 (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) ?
1032 kPackedSwitch : kSparseSwitch;
1033 dvmInitGrowableList(&curBlock->successorBlockList.blocks, size);
1035 for (i = 0; i < size; i++) {
1036 BasicBlock *caseBlock = findBlock(cUnit, curOffset + targetTable[i],
1041 SuccessorBlockInfo *successorBlockInfo =
1042 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo),
1044 successorBlockInfo->block = caseBlock;
1045 successorBlockInfo->key = (insn->dalvikInsn.opcode == OP_PACKED_SWITCH)?
1046 firstKey + i : keyTable[i];
1047 dvmInsertGrowableList(&curBlock->successorBlockList.blocks,
1048 (intptr_t) successorBlockInfo);
1049 dvmCompilerSetBit(caseBlock->predecessors, curBlock->id);
1052 /* Fall-through case */
1053 BasicBlock *fallthroughBlock = findBlock(cUnit,
1059 curBlock->fallThrough = fallthroughBlock;
1060 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
1063 /* Process instructions with the kInstrCanThrow flag */
1064 static void processCanThrow(CompilationUnit *cUnit, BasicBlock *curBlock,
1065 MIR *insn, int curOffset, int width, int flags,
1066 BitVector *tryBlockAddr, const u2 *codePtr,
1069 const Method *method = cUnit->method;
1070 const DexCode *dexCode = dvmGetMethodCode(method);
1073 if (dvmIsBitSet(tryBlockAddr, curOffset)) {
1074 DexCatchIterator iterator;
1076 if (!dexFindCatchHandler(&iterator, dexCode, curOffset)) {
1077 LOGE("Catch block not found in dexfile for insn %x in %s",
1078 curOffset, method->name);
1082 if (curBlock->successorBlockList.blockListType != kNotUsed) {
1083 LOGE("Successor block list already in use: %d",
1084 curBlock->successorBlockList.blockListType);
1087 curBlock->successorBlockList.blockListType = kCatch;
1088 dvmInitGrowableList(&curBlock->successorBlockList.blocks, 2);
1091 DexCatchHandler* handler = dexCatchIteratorNext(&iterator);
1093 if (handler == NULL) {
1097 BasicBlock *catchBlock = findBlock(cUnit, handler->address,
1103 SuccessorBlockInfo *successorBlockInfo =
1104 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo),
1106 successorBlockInfo->block = catchBlock;
1107 successorBlockInfo->key = handler->typeIdx;
1108 dvmInsertGrowableList(&curBlock->successorBlockList.blocks,
1109 (intptr_t) successorBlockInfo);
1110 dvmCompilerSetBit(catchBlock->predecessors, curBlock->id);
1113 BasicBlock *ehBlock = dvmCompilerNewBB(kExceptionHandling,
1114 cUnit->numBlocks++);
1115 curBlock->taken = ehBlock;
1116 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) ehBlock);
1117 ehBlock->startOffset = curOffset;
1118 dvmCompilerSetBit(ehBlock->predecessors, curBlock->id);
1122 * Force the current block to terminate.
1124 * Data may be present before codeEnd, so we need to parse it to know
1125 * whether it is code or data.
1127 if (codePtr < codeEnd) {
1128 /* Create a fallthrough block for real instructions (incl. OP_NOP) */
1129 if (contentIsInsn(codePtr)) {
1130 BasicBlock *fallthroughBlock = findBlock(cUnit,
1137 * OP_THROW and OP_THROW_VERIFICATION_ERROR are unconditional
1140 if (insn->dalvikInsn.opcode != OP_THROW_VERIFICATION_ERROR &&
1141 insn->dalvikInsn.opcode != OP_THROW) {
1142 curBlock->fallThrough = fallthroughBlock;
1143 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
1150 * Similar to dvmCompileTrace, but the entity processed here is the whole
1153 * TODO: implementation will be revisited when the trace builder can provide
1154 * whole-method traces.
1156 bool dvmCompileMethod(const Method *method, JitTranslationInfo *info)
1158 CompilationUnit cUnit;
1159 const DexCode *dexCode = dvmGetMethodCode(method);
1160 const u2 *codePtr = dexCode->insns;
1161 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize;
1163 unsigned int curOffset = 0;
1165 /* Method already compiled */
1166 if (dvmJitGetMethodAddr(codePtr)) {
1167 info->codeAddress = NULL;
1171 memset(&cUnit, 0, sizeof(cUnit));
1172 cUnit.method = method;
1174 cUnit.jitMode = kJitMethod;
1176 /* Initialize the block list */
1177 dvmInitGrowableList(&cUnit.blockList, 4);
1180 * FIXME - PC reconstruction list won't be needed after the codegen routines
1181 * are enhanced to true method mode.
1183 /* Initialize the PC reconstruction list */
1184 dvmInitGrowableList(&cUnit.pcReconstructionList, 8);
1186 /* Allocate the bit-vector to track the beginning of basic blocks */
1187 BitVector *tryBlockAddr = dvmCompilerAllocBitVector(dexCode->insnsSize,
1188 true /* expandable */);
1189 cUnit.tryBlockAddr = tryBlockAddr;
1191 /* Create the default entry and exit blocks and enter them to the list */
1192 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1193 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++);
1195 cUnit.entryBlock = entryBlock;
1196 cUnit.exitBlock = exitBlock;
1198 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) entryBlock);
1199 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) exitBlock);
1201 /* Current block to record parsed instructions */
1202 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1203 curBlock->startOffset = 0;
1204 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) curBlock);
1205 entryBlock->fallThrough = curBlock;
1206 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id);
1209 * Store back the number of blocks since new blocks may be created of
1212 cUnit.numBlocks = numBlocks;
1214 /* Identify code range in try blocks and set up the empty catch blocks */
1215 processTryCatchBlocks(&cUnit);
1217 /* Parse all instructions and put them into containing basic blocks */
1218 while (codePtr < codeEnd) {
1219 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true);
1220 insn->offset = curOffset;
1221 int width = parseInsn(codePtr, &insn->dalvikInsn, false);
1222 insn->width = width;
1224 /* Terminate when the data section is seen */
1228 dvmCompilerAppendMIR(curBlock, insn);
1231 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1233 if (flags & kInstrCanBranch) {
1234 processCanBranch(&cUnit, curBlock, insn, curOffset, width, flags,
1236 } else if (flags & kInstrCanReturn) {
1237 curBlock->fallThrough = exitBlock;
1238 dvmCompilerSetBit(exitBlock->predecessors, curBlock->id);
1240 * Terminate the current block if there are instructions
1243 if (codePtr < codeEnd) {
1245 * Create a fallthrough block for real instructions
1248 if (contentIsInsn(codePtr)) {
1249 findBlock(&cUnit, curOffset + width,
1256 } else if (flags & kInstrCanThrow) {
1257 processCanThrow(&cUnit, curBlock, insn, curOffset, width, flags,
1258 tryBlockAddr, codePtr, codeEnd);
1259 } else if (flags & kInstrCanSwitch) {
1260 processCanSwitch(&cUnit, curBlock, insn, curOffset, width, flags);
1263 BasicBlock *nextBlock = findBlock(&cUnit, curOffset,
1270 * The next instruction could be the target of a previously parsed
1271 * forward branch so a block is already created. If the current
1272 * instruction is not an unconditional branch, connect them through
1273 * the fall-through link.
1275 assert(curBlock->fallThrough == NULL ||
1276 curBlock->fallThrough == nextBlock ||
1277 curBlock->fallThrough == exitBlock);
1279 if ((curBlock->fallThrough == NULL) &&
1280 (flags & kInstrCanContinue)) {
1281 curBlock->fallThrough = nextBlock;
1282 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id);
1284 curBlock = nextBlock;
1288 if (cUnit.printMe) {
1289 dvmCompilerDumpCompilationUnit(&cUnit);
1292 /* Adjust this value accordingly once inlining is performed */
1293 cUnit.numDalvikRegisters = cUnit.method->registersSize;
1295 /* Verify if all blocks are connected as claimed */
1296 /* FIXME - to be disabled in the future */
1297 dvmCompilerDataFlowAnalysisDispatcher(&cUnit, verifyPredInfo,
1299 false /* isIterative */);
1302 /* Perform SSA transformation for the whole method */
1303 dvmCompilerMethodSSATransformation(&cUnit);
1305 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming
1307 /* Allocate Registers using simple local allocation scheme */
1308 dvmCompilerLocalRegAlloc(&cUnit);
1310 /* Convert MIR to LIR, etc. */
1311 dvmCompilerMethodMIR2LIR(&cUnit);
1314 //dvmDumpCFG(&cUnit, "/sdcard/cfg/");
1316 /* Method is not empty */
1317 if (cUnit.firstLIRInsn) {
1318 /* Convert LIR into machine code. Loop for recoverable retries */
1320 dvmCompilerAssembleLIR(&cUnit, info);
1321 cUnit.assemblerRetries++;
1322 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess)
1323 LOGD("Assembler abort #%d on %d",cUnit.assemblerRetries,
1324 cUnit.assemblerStatus);
1325 } while (cUnit.assemblerStatus == kRetryAll);
1327 if (cUnit.printMe) {
1328 dvmCompilerCodegenDump(&cUnit);
1331 if (info->codeAddress) {
1332 dvmJitSetCodeAddr(dexCode->insns, info->codeAddress,
1333 info->instructionSet, true, 0);
1335 * Clear the codeAddress for the enclosing trace to reuse the info
1337 info->codeAddress = NULL;
1344 /* Extending the trace by crawling the code from curBlock */
1345 static bool exhaustTrace(CompilationUnit *cUnit, BasicBlock *curBlock)
1347 unsigned int curOffset = curBlock->startOffset;
1348 const u2 *codePtr = cUnit->method->insns + curOffset;
1350 if (curBlock->visited == true) return false;
1352 curBlock->visited = true;
1354 if (curBlock->blockType == kEntryBlock ||
1355 curBlock->blockType == kExitBlock) {
1360 * Block has been parsed - check the taken/fallThrough in case it is a split
1363 if (curBlock->firstMIRInsn != NULL) {
1364 bool changed = false;
1365 if (curBlock->taken)
1366 changed |= exhaustTrace(cUnit, curBlock->taken);
1367 if (curBlock->fallThrough)
1368 changed |= exhaustTrace(cUnit, curBlock->fallThrough);
1372 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true);
1373 insn->offset = curOffset;
1374 int width = parseInsn(codePtr, &insn->dalvikInsn, false);
1375 insn->width = width;
1377 /* Terminate when the data section is seen */
1381 dvmCompilerAppendMIR(curBlock, insn);
1384 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1386 /* Stop extending the trace after seeing these instructions */
1387 if (flags & (kInstrCanReturn | kInstrCanSwitch | kInstrInvoke)) {
1388 curBlock->fallThrough = cUnit->exitBlock;
1389 dvmCompilerSetBit(cUnit->exitBlock->predecessors, curBlock->id);
1391 } else if (flags & kInstrCanBranch) {
1392 processCanBranch(cUnit, curBlock, insn, curOffset, width, flags,
1394 if (curBlock->taken) {
1395 exhaustTrace(cUnit, curBlock->taken);
1397 if (curBlock->fallThrough) {
1398 exhaustTrace(cUnit, curBlock->fallThrough);
1403 BasicBlock *nextBlock = findBlock(cUnit, curOffset,
1410 * The next instruction could be the target of a previously parsed
1411 * forward branch so a block is already created. If the current
1412 * instruction is not an unconditional branch, connect them through
1413 * the fall-through link.
1415 assert(curBlock->fallThrough == NULL ||
1416 curBlock->fallThrough == nextBlock ||
1417 curBlock->fallThrough == cUnit->exitBlock);
1419 if ((curBlock->fallThrough == NULL) &&
1420 (flags & kInstrCanContinue)) {
1421 curBlock->needFallThroughBranch = true;
1422 curBlock->fallThrough = nextBlock;
1423 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id);
1425 /* Block has been visited - no more parsing needed */
1426 if (nextBlock->visited == true) {
1429 curBlock = nextBlock;
1435 /* Compile a loop */
1436 static bool compileLoop(CompilationUnit *cUnit, unsigned int startOffset,
1437 JitTraceDescription *desc, int numMaxInsts,
1438 JitTranslationInfo *info, jmp_buf *bailPtr,
1442 unsigned int curOffset = startOffset;
1445 #if defined(WITH_JIT_TUNING)
1446 CompilerMethodStats *methodStats;
1449 cUnit->jitMode = kJitLoop;
1451 /* Initialize the block list */
1452 dvmInitGrowableList(&cUnit->blockList, 4);
1454 /* Initialize the PC reconstruction list */
1455 dvmInitGrowableList(&cUnit->pcReconstructionList, 8);
1457 /* Create the default entry and exit blocks and enter them to the list */
1458 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1459 entryBlock->startOffset = curOffset;
1460 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++);
1462 cUnit->entryBlock = entryBlock;
1463 cUnit->exitBlock = exitBlock;
1465 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) entryBlock);
1466 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) exitBlock);
1468 /* Current block to record parsed instructions */
1469 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1470 curBlock->startOffset = curOffset;
1472 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) curBlock);
1473 entryBlock->fallThrough = curBlock;
1474 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id);
1477 * Store back the number of blocks since new blocks may be created of
1480 cUnit->numBlocks = numBlocks;
1483 dvmCompilerDataFlowAnalysisDispatcher(cUnit,
1484 dvmCompilerClearVisitedFlag,
1486 false /* isIterative */);
1487 changed = exhaustTrace(cUnit, curBlock);
1490 /* Backward chaining block */
1491 bb = dvmCompilerNewBB(kChainingCellBackwardBranch, cUnit->numBlocks++);
1492 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1493 cUnit->backChainBlock = bb;
1495 /* A special block to host PC reconstruction code */
1496 bb = dvmCompilerNewBB(kPCReconstruction, cUnit->numBlocks++);
1497 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1499 /* And one final block that publishes the PC and raises the exception */
1500 bb = dvmCompilerNewBB(kExceptionHandling, cUnit->numBlocks++);
1501 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1502 cUnit->puntBlock = bb;
1504 cUnit->numDalvikRegisters = cUnit->method->registersSize;
1506 /* Verify if all blocks are connected as claimed */
1507 /* FIXME - to be disabled in the future */
1508 dvmCompilerDataFlowAnalysisDispatcher(cUnit, verifyPredInfo,
1510 false /* isIterative */);
1513 /* Try to identify a loop */
1514 if (!dvmCompilerBuildLoop(cUnit))
1517 dvmCompilerLoopOpt(cUnit);
1520 * Change the backward branch to the backward chaining cell after dataflow
1521 * analsys/optimizations are done.
1523 dvmCompilerInsertBackwardChaining(cUnit);
1525 dvmCompilerInitializeRegAlloc(cUnit);
1527 /* Allocate Registers using simple local allocation scheme */
1528 dvmCompilerLocalRegAlloc(cUnit);
1530 /* Convert MIR to LIR, etc. */
1531 dvmCompilerMIR2LIR(cUnit);
1533 /* Loop contains never executed blocks / heavy instructions */
1534 if (cUnit->quitLoopMode) {
1535 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) {
1536 LOGD("Loop trace @ offset %04x aborted due to unresolved code info",
1537 cUnit->entryBlock->startOffset);
1542 /* Convert LIR into machine code. Loop for recoverable retries */
1544 dvmCompilerAssembleLIR(cUnit, info);
1545 cUnit->assemblerRetries++;
1546 if (cUnit->printMe && cUnit->assemblerStatus != kSuccess)
1547 LOGD("Assembler abort #%d on %d", cUnit->assemblerRetries,
1548 cUnit->assemblerStatus);
1549 } while (cUnit->assemblerStatus == kRetryAll);
1551 /* Loop is too big - bail out */
1552 if (cUnit->assemblerStatus == kRetryHalve) {
1556 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) {
1557 LOGD("Loop trace @ offset %04x", cUnit->entryBlock->startOffset);
1558 dvmCompilerCodegenDump(cUnit);
1562 * If this trace uses class objects as constants,
1563 * dvmJitInstallClassObjectPointers will switch the thread state
1564 * to running and look up the class pointers using the descriptor/loader
1565 * tuple stored in the callsite info structure. We need to make this window
1566 * as short as possible since it is blocking GC.
1568 if (cUnit->hasClassLiterals && info->codeAddress) {
1569 dvmJitInstallClassObjectPointers(cUnit, (char *) info->codeAddress);
1573 * Since callsiteinfo is allocated from the arena, delay the reset until
1574 * class pointers are resolved.
1576 dvmCompilerArenaReset();
1578 assert(cUnit->assemblerStatus == kSuccess);
1579 #if defined(WITH_JIT_TUNING)
1580 /* Locate the entry to store compilation statistics for this method */
1581 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false);
1582 methodStats->nativeSize += cUnit->totalSize;
1584 return info->codeAddress != NULL;
1587 /* Retry the original trace with JIT_OPT_NO_LOOP disabled */
1588 dvmCompilerArenaReset();
1589 return dvmCompileTrace(desc, numMaxInsts, info, bailPtr,
1590 optHints | JIT_OPT_NO_LOOP);
1594 * Main entry point to start trace compilation. Basic blocks are constructed
1595 * first and they will be passed to the codegen routines to convert Dalvik
1596 * bytecode into machine code.
1598 bool dvmCompileTrace(JitTraceDescription *desc, int numMaxInsts,
1599 JitTranslationInfo *info, jmp_buf *bailPtr,
1602 const DexCode *dexCode = dvmGetMethodCode(desc->method);
1603 const JitTraceRun* currRun = &desc->trace[0];
1604 unsigned int curOffset = currRun->info.frag.startOffset;
1605 unsigned int startOffset = curOffset;
1606 unsigned int numInsts = currRun->info.frag.numInsts;
1607 const u2 *codePtr = dexCode->insns + curOffset;
1608 int traceSize = 0; // # of half-words
1609 const u2 *startCodePtr = codePtr;
1610 BasicBlock *curBB, *entryCodeBB;
1612 static int compilationId;
1613 CompilationUnit cUnit;
1614 GrowableList *blockList;
1615 #if defined(WITH_JIT_TUNING)
1616 CompilerMethodStats *methodStats;
1619 /* If we've already compiled this trace, just return success */
1620 if (dvmJitGetTraceAddr(startCodePtr) && !info->discardResult) {
1622 * Make sure the codeAddress is NULL so that it won't clobber the
1625 info->codeAddress = NULL;
1629 /* If the work order is stale, discard it */
1630 if (info->cacheVersion != gDvmJit.cacheVersion) {
1635 memset(&cUnit, 0, sizeof(CompilationUnit));
1637 #if defined(WITH_JIT_TUNING)
1638 /* Locate the entry to store compilation statistics for this method */
1639 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false);
1642 /* Set the recover buffer pointer */
1643 cUnit.bailPtr = bailPtr;
1645 /* Initialize the printMe flag */
1646 cUnit.printMe = gDvmJit.printMe;
1648 /* Setup the method */
1649 cUnit.method = desc->method;
1651 /* Store the trace descriptor and set the initial mode */
1652 cUnit.traceDesc = desc;
1653 cUnit.jitMode = kJitTrace;
1655 /* Initialize the PC reconstruction list */
1656 dvmInitGrowableList(&cUnit.pcReconstructionList, 8);
1658 /* Initialize the basic block list */
1659 blockList = &cUnit.blockList;
1660 dvmInitGrowableList(blockList, 8);
1662 /* Identify traces that we don't want to compile */
1663 if (gDvmJit.methodTable) {
1664 int len = strlen(desc->method->clazz->descriptor) +
1665 strlen(desc->method->name) + 1;
1666 char *fullSignature = (char *)dvmCompilerNew(len, true);
1667 strcpy(fullSignature, desc->method->clazz->descriptor);
1668 strcat(fullSignature, desc->method->name);
1670 int hashValue = dvmComputeUtf8Hash(fullSignature);
1673 * Doing three levels of screening to see whether we want to skip
1674 * compiling this method
1677 /* First, check the full "class;method" signature */
1679 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1680 fullSignature, (HashCompareFunc) strcmp,
1684 /* Full signature not found - check the enclosing class */
1685 if (methodFound == false) {
1686 int hashValue = dvmComputeUtf8Hash(desc->method->clazz->descriptor);
1688 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1689 (char *) desc->method->clazz->descriptor,
1690 (HashCompareFunc) strcmp, false) !=
1692 /* Enclosing class not found - check the method name */
1693 if (methodFound == false) {
1694 int hashValue = dvmComputeUtf8Hash(desc->method->name);
1696 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1697 (char *) desc->method->name,
1698 (HashCompareFunc) strcmp, false) !=
1702 * Debug by call-graph is enabled. Check if the debug list
1703 * covers any methods on the VM stack.
1705 if (methodFound == false && gDvmJit.checkCallGraph == true) {
1707 filterMethodByCallGraph(info->requestingThread,
1708 desc->method->name);
1714 * Under the following conditions, the trace will be *conservatively*
1715 * compiled by only containing single-step instructions to and from the
1717 * 1) If includeSelectedMethod == false, the method matches the full or
1718 * partial signature stored in the hash table.
1720 * 2) If includeSelectedMethod == true, the method does not match the
1721 * full and partial signature stored in the hash table.
1723 if (gDvmJit.includeSelectedMethod != methodFound) {
1724 cUnit.allSingleStep = true;
1726 /* Compile the trace as normal */
1728 /* Print the method we cherry picked */
1729 if (gDvmJit.includeSelectedMethod == true) {
1730 cUnit.printMe = true;
1735 /* Allocate the entry block */
1736 curBB = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1737 dvmInsertGrowableList(blockList, (intptr_t) curBB);
1738 curBB->startOffset = curOffset;
1740 entryCodeBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1741 dvmInsertGrowableList(blockList, (intptr_t) entryCodeBB);
1742 entryCodeBB->startOffset = curOffset;
1743 curBB->fallThrough = entryCodeBB;
1744 curBB = entryCodeBB;
1746 if (cUnit.printMe) {
1747 LOGD("--------\nCompiler: Building trace for %s, offset 0x%x\n",
1748 desc->method->name, curOffset);
1752 * Analyze the trace descriptor and include up to the maximal number
1753 * of Dalvik instructions into the IR.
1758 insn = (MIR *)dvmCompilerNew(sizeof(MIR), true);
1759 insn->offset = curOffset;
1760 width = parseInsn(codePtr, &insn->dalvikInsn, cUnit.printMe);
1762 /* The trace should never incude instruction data */
1764 insn->width = width;
1766 dvmCompilerAppendMIR(curBB, insn);
1769 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1771 if (flags & kInstrInvoke) {
1772 const Method *calleeMethod = (const Method *)
1773 currRun[JIT_TRACE_CUR_METHOD].info.meta;
1774 assert(numInsts == 1);
1775 CallsiteInfo *callsiteInfo =
1776 (CallsiteInfo *)dvmCompilerNew(sizeof(CallsiteInfo), true);
1777 callsiteInfo->classDescriptor = (const char *)
1778 currRun[JIT_TRACE_CLASS_DESC].info.meta;
1779 callsiteInfo->classLoader = (Object *)
1780 currRun[JIT_TRACE_CLASS_LOADER].info.meta;
1781 callsiteInfo->method = calleeMethod;
1782 insn->meta.callsiteInfo = callsiteInfo;
1785 /* Instruction limit reached - terminate the trace here */
1786 if (cUnit.numInsts >= numMaxInsts) {
1789 if (--numInsts == 0) {
1790 if (currRun->info.frag.runEnd) {
1793 /* Advance to the next trace description (ie non-meta info) */
1796 } while (!currRun->isCode);
1798 /* Dummy end-of-run marker seen */
1799 if (currRun->info.frag.numInsts == 0) {
1803 curBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1804 dvmInsertGrowableList(blockList, (intptr_t) curBB);
1805 curOffset = currRun->info.frag.startOffset;
1806 numInsts = currRun->info.frag.numInsts;
1807 curBB->startOffset = curOffset;
1808 codePtr = dexCode->insns + curOffset;
1816 #if defined(WITH_JIT_TUNING)
1817 /* Convert # of half-word to bytes */
1818 methodStats->compiledDalvikSize += traceSize * 2;
1822 * Now scan basic blocks containing real code to connect the
1823 * taken/fallthrough links. Also create chaining cells for code not included
1827 for (blockId = 0; blockId < blockList->numUsed; blockId++) {
1828 curBB = (BasicBlock *) dvmGrowableListGetElement(blockList, blockId);
1829 MIR *lastInsn = curBB->lastMIRInsn;
1830 /* Skip empty blocks */
1831 if (lastInsn == NULL) {
1834 curOffset = lastInsn->offset;
1835 unsigned int targetOffset = curOffset;
1836 unsigned int fallThroughOffset = curOffset + lastInsn->width;
1837 bool isInvoke = false;
1838 const Method *callee = NULL;
1840 findBlockBoundary(desc->method, curBB->lastMIRInsn, curOffset,
1841 &targetOffset, &isInvoke, &callee);
1843 /* Link the taken and fallthrough blocks */
1844 BasicBlock *searchBB;
1846 int flags = dexGetFlagsFromOpcode(lastInsn->dalvikInsn.opcode);
1848 if (flags & kInstrInvoke) {
1849 cUnit.hasInvoke = true;
1852 /* Backward branch seen */
1853 if (isInvoke == false &&
1854 (flags & kInstrCanBranch) != 0 &&
1855 targetOffset < curOffset &&
1856 (optHints & JIT_OPT_NO_LOOP) == 0) {
1857 dvmCompilerArenaReset();
1858 return compileLoop(&cUnit, startOffset, desc, numMaxInsts,
1859 info, bailPtr, optHints);
1862 /* No backward branch in the trace - start searching the next BB */
1863 size_t searchBlockId;
1864 for (searchBlockId = blockId+1; searchBlockId < blockList->numUsed;
1866 searchBB = (BasicBlock *) dvmGrowableListGetElement(blockList,
1868 if (targetOffset == searchBB->startOffset) {
1869 curBB->taken = searchBB;
1870 dvmCompilerSetBit(searchBB->predecessors, curBB->id);
1872 if (fallThroughOffset == searchBB->startOffset) {
1873 curBB->fallThrough = searchBB;
1874 dvmCompilerSetBit(searchBB->predecessors, curBB->id);
1877 * Fallthrough block of an invoke instruction needs to be
1878 * aligned to 4-byte boundary (alignment instruction to be
1881 if (flags & kInstrInvoke) {
1882 searchBB->isFallThroughFromInvoke = true;
1888 * Some blocks are ended by non-control-flow-change instructions,
1889 * currently only due to trace length constraint. In this case we need
1890 * to generate an explicit branch at the end of the block to jump to
1891 * the chaining cell.
1893 curBB->needFallThroughBranch =
1894 ((flags & (kInstrCanBranch | kInstrCanSwitch | kInstrCanReturn |
1895 kInstrInvoke)) == 0);
1896 if (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ||
1897 lastInsn->dalvikInsn.opcode == OP_SPARSE_SWITCH) {
1899 const u2 *switchData = desc->method->insns + lastInsn->offset +
1900 lastInsn->dalvikInsn.vB;
1901 int size = switchData[1];
1902 int maxChains = MIN(size, MAX_CHAINED_SWITCH_CASES);
1905 * Generate the landing pad for cases whose ranks are higher than
1906 * MAX_CHAINED_SWITCH_CASES. The code will re-enter the interpreter
1907 * through the NoChain point.
1909 if (maxChains != size) {
1910 cUnit.switchOverflowPad =
1911 desc->method->insns + lastInsn->offset;
1914 s4 *targets = (s4 *) (switchData + 2 +
1915 (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ?
1918 /* One chaining cell for the first MAX_CHAINED_SWITCH_CASES cases */
1919 for (i = 0; i < maxChains; i++) {
1920 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal,
1922 dvmInsertGrowableList(blockList, (intptr_t) caseChain);
1923 caseChain->startOffset = lastInsn->offset + targets[i];
1926 /* One more chaining cell for the default case */
1927 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal,
1929 dvmInsertGrowableList(blockList, (intptr_t) caseChain);
1930 caseChain->startOffset = lastInsn->offset + lastInsn->width;
1931 /* Fallthrough block not included in the trace */
1932 } else if (!isUnconditionalBranch(lastInsn) &&
1933 curBB->fallThrough == NULL) {
1934 BasicBlock *fallThroughBB;
1936 * If the chaining cell is after an invoke or
1937 * instruction that cannot change the control flow, request a hot
1940 if (isInvoke || curBB->needFallThroughBranch) {
1941 fallThroughBB = dvmCompilerNewBB(kChainingCellHot, numBlocks++);
1943 fallThroughBB = dvmCompilerNewBB(kChainingCellNormal,
1946 dvmInsertGrowableList(blockList, (intptr_t) fallThroughBB);
1947 fallThroughBB->startOffset = fallThroughOffset;
1948 curBB->fallThrough = fallThroughBB;
1949 dvmCompilerSetBit(fallThroughBB->predecessors, curBB->id);
1951 /* Target block not included in the trace */
1952 if (curBB->taken == NULL &&
1953 (isGoto(lastInsn) || isInvoke ||
1954 (targetOffset != UNKNOWN_TARGET && targetOffset != curOffset))) {
1955 BasicBlock *newBB = NULL;
1957 /* Monomorphic callee */
1959 /* JNI call doesn't need a chaining cell */
1960 if (!dvmIsNativeMethod(callee)) {
1961 newBB = dvmCompilerNewBB(kChainingCellInvokeSingleton,
1963 newBB->startOffset = 0;
1964 newBB->containingMethod = callee;
1966 /* Will resolve at runtime */
1968 newBB = dvmCompilerNewBB(kChainingCellInvokePredicted,
1970 newBB->startOffset = 0;
1972 /* For unconditional branches, request a hot chaining cell */
1974 #if !defined(WITH_SELF_VERIFICATION)
1975 newBB = dvmCompilerNewBB(dexIsGoto(flags) ?
1977 kChainingCellNormal,
1979 newBB->startOffset = targetOffset;
1981 /* Handle branches that branch back into the block */
1982 if (targetOffset >= curBB->firstMIRInsn->offset &&
1983 targetOffset <= curBB->lastMIRInsn->offset) {
1984 newBB = dvmCompilerNewBB(kChainingCellBackwardBranch,
1987 newBB = dvmCompilerNewBB(dexIsGoto(flags) ?
1989 kChainingCellNormal,
1992 newBB->startOffset = targetOffset;
1996 curBB->taken = newBB;
1997 dvmCompilerSetBit(newBB->predecessors, curBB->id);
1998 dvmInsertGrowableList(blockList, (intptr_t) newBB);
2003 /* Now create a special block to host PC reconstruction code */
2004 curBB = dvmCompilerNewBB(kPCReconstruction, numBlocks++);
2005 dvmInsertGrowableList(blockList, (intptr_t) curBB);
2007 /* And one final block that publishes the PC and raise the exception */
2008 curBB = dvmCompilerNewBB(kExceptionHandling, numBlocks++);
2009 dvmInsertGrowableList(blockList, (intptr_t) curBB);
2010 cUnit.puntBlock = curBB;
2012 if (cUnit.printMe) {
2014 dexProtoCopyMethodDescriptor(&desc->method->prototype);
2015 LOGD("TRACEINFO (%d): 0x%08x %s%s.%s 0x%x %d of %d, %d blocks",
2017 (intptr_t) desc->method->insns,
2018 desc->method->clazz->descriptor,
2021 desc->trace[0].info.frag.startOffset,
2028 cUnit.numBlocks = numBlocks;
2030 /* Set the instruction set to use (NOTE: later components may change it) */
2031 cUnit.instructionSet = dvmCompilerInstructionSet();
2033 /* Inline transformation @ the MIR level */
2034 if (cUnit.hasInvoke && !(gDvmJit.disableOpt & (1 << kMethodInlining))) {
2035 dvmCompilerInlineMIR(&cUnit, info);
2038 cUnit.numDalvikRegisters = cUnit.method->registersSize;
2040 /* Preparation for SSA conversion */
2041 dvmInitializeSSAConversion(&cUnit);
2043 dvmCompilerNonLoopAnalysis(&cUnit);
2045 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming
2047 if (cUnit.printMe) {
2048 dvmCompilerDumpCompilationUnit(&cUnit);
2051 /* Allocate Registers using simple local allocation scheme */
2052 dvmCompilerLocalRegAlloc(&cUnit);
2054 /* Convert MIR to LIR, etc. */
2055 dvmCompilerMIR2LIR(&cUnit);
2057 /* Convert LIR into machine code. Loop for recoverable retries */
2059 dvmCompilerAssembleLIR(&cUnit, info);
2060 cUnit.assemblerRetries++;
2061 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess)
2062 LOGD("Assembler abort #%d on %d",cUnit.assemblerRetries,
2063 cUnit.assemblerStatus);
2064 } while (cUnit.assemblerStatus == kRetryAll);
2066 if (cUnit.printMe) {
2067 LOGD("Trace Dalvik PC: %p", startCodePtr);
2068 dvmCompilerCodegenDump(&cUnit);
2069 LOGD("End %s%s, %d Dalvik instructions",
2070 desc->method->clazz->descriptor, desc->method->name,
2074 if (cUnit.assemblerStatus == kRetryHalve) {
2075 /* Reset the compiler resource pool before retry */
2076 dvmCompilerArenaReset();
2078 /* Halve the instruction count and start from the top */
2079 return dvmCompileTrace(desc, cUnit.numInsts / 2, info, bailPtr,
2084 * If this trace uses class objects as constants,
2085 * dvmJitInstallClassObjectPointers will switch the thread state
2086 * to running and look up the class pointers using the descriptor/loader
2087 * tuple stored in the callsite info structure. We need to make this window
2088 * as short as possible since it is blocking GC.
2090 if (cUnit.hasClassLiterals && info->codeAddress) {
2091 dvmJitInstallClassObjectPointers(&cUnit, (char *) info->codeAddress);
2095 * Since callsiteinfo is allocated from the arena, delay the reset until
2096 * class pointers are resolved.
2098 dvmCompilerArenaReset();
2100 assert(cUnit.assemblerStatus == kSuccess);
2101 #if defined(WITH_JIT_TUNING)
2102 methodStats->nativeSize += cUnit.totalSize;
2105 return info->codeAddress != NULL;