From 3f4b63f47f50f200538e83fa3fac06947afa08b4 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Mon, 13 Sep 2010 14:04:02 -0700 Subject: [PATCH] Emit return-void-barrier when appropriate. If SMP is enabled, and we're optimizing a constructor, and the class in question has final fields, we replace all occurrences of "return-void" with "return-void-barrier". This is an "essential" optimization for SMP, meaning it will be done regardless of the verification/optimization settings. Also, split the updateCode() helper function into two versions (one to update the whole 16-bit code unit, one to replace just the opcode). Also, make the presence of a duplicate class in the bootstrap class path a failure for optimization as well as verification. Otherwise, if you have "-Xdexopt:all", you could end up trying to optimize a class loaded from a bootstrap DEX file, which doesn't work well since those classes are mapped read-only in dexopt. Bug 2965743. Change-Id: I29e67133731b59beb6af5003f3cd69302c5c20f5 --- vm/analysis/DexPrepare.c | 42 ++++++++++++--------- vm/analysis/Optimize.c | 96 ++++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 106 insertions(+), 32 deletions(-) diff --git a/vm/analysis/DexPrepare.c b/vm/analysis/DexPrepare.c index 11fd7be74..d361290c9 100644 --- a/vm/analysis/DexPrepare.c +++ b/vm/analysis/DexPrepare.c @@ -925,30 +925,38 @@ static void verifyAndOptimizeClass(DexFile* pDexFile, ClassObject* clazz, const char* classDescriptor; bool verified = false; + if (clazz->pDvmDex->pDexFile != pDexFile) { + /* + * The current DEX file defined a class that is also present in the + * bootstrap class path. The class loader favored the bootstrap + * version, which means that we have a pointer to a class that is + * (a) not the one we want to examine, and (b) mapped read-only, + * so we will seg fault if we try to rewrite instructions inside it. + */ + LOGD("DexOpt: not verifying/optimizing '%s': multiple definitions\n", + clazz->descriptor); + return; + } + classDescriptor = dexStringByTypeIdx(pDexFile, pClassDef->classIdx); /* * First, try to verify it. */ if (doVerify) { - if (clazz->pDvmDex->pDexFile != pDexFile) { - LOGD("DexOpt: not verifying '%s': multiple definitions\n", - classDescriptor); + if (dvmVerifyClass(clazz)) { + /* + * Set the "is preverified" flag in the DexClassDef. We + * do it here, rather than in the ClassObject structure, + * because the DexClassDef is part of the odex file. + */ + assert((clazz->accessFlags & JAVA_FLAGS_MASK) == + pClassDef->accessFlags); + ((DexClassDef*)pClassDef)->accessFlags |= CLASS_ISPREVERIFIED; + verified = true; } else { - if (dvmVerifyClass(clazz)) { - /* - * Set the "is preverified" flag in the DexClassDef. We - * do it here, rather than in the ClassObject structure, - * because the DexClassDef is part of the odex file. - */ - assert((clazz->accessFlags & JAVA_FLAGS_MASK) == - pClassDef->accessFlags); - ((DexClassDef*)pClassDef)->accessFlags |= CLASS_ISPREVERIFIED; - verified = true; - } else { - // TODO: log when in verbose mode - LOGV("DexOpt: '%s' failed verification\n", classDescriptor); - } + // TODO: log when in verbose mode + LOGV("DexOpt: '%s' failed verification\n", classDescriptor); } } diff --git a/vm/analysis/Optimize.c b/vm/analysis/Optimize.c index 7ad4e454d..f07addf01 100644 --- a/vm/analysis/Optimize.c +++ b/vm/analysis/Optimize.c @@ -46,6 +46,8 @@ static bool rewriteExecuteInline(Method* method, u2* insns, MethodType methodType); static bool rewriteExecuteInlineRange(Method* method, u2* insns, MethodType methodType); +static void rewriteReturnVoid(Method* method, u2* insns); +static bool needsReturnBarrier(Method* method); /* @@ -180,6 +182,9 @@ static void optimizeMethod(Method* method, bool essentialOnly) if (dvmIsNativeMethod(method) || dvmIsAbstractMethod(method)) return; + /* compute this once per method */ + bool needRetBar = needsReturnBarrier(method); + insns = (u2*) method->insns; assert(insns != NULL); insnsSize = dvmGetMethodInsnsSize(method); @@ -274,6 +279,11 @@ rewrite_static_field2: rewriteStaticField(method, insns, volatileOpc); notMatched = false; break; + case OP_RETURN_VOID: + if (needRetBar) + rewriteReturnVoid(method, insns); + notMatched = false; + break; default: assert(notMatched); break; @@ -337,7 +347,7 @@ rewrite_static_field2: /* * Update a 16-bit code unit in "meth". */ -static inline void updateCode(const Method* meth, u2* ptr, u2 newVal) +static inline void updateCodeUnit(const Method* meth, u2* ptr, u2 newVal) { if (gDvm.optimizing) { /* dexopt time, alter the output directly */ @@ -349,6 +359,14 @@ static inline void updateCode(const Method* meth, u2* ptr, u2 newVal) } /* + * Update the 8-bit opcode portion of a 16-bit code unit in "meth". + */ +static inline void updateOpCode(const Method* meth, u2* ptr, OpCode opCode) +{ + updateCodeUnit(meth, ptr, (ptr[0] & 0xff00) | (u2) opCode); +} + +/* * If "referrer" and "resClass" don't come from the same DEX file, and * the DEX we're working on is not destined for the bootstrap class path, * tweak the class loader so package-access checks work correctly. @@ -655,12 +673,12 @@ static bool rewriteInstField(Method* method, u2* insns, OpCode quickOpc, } if (volatileOpc != OP_NOP && dvmIsVolatileField(&instField->field)) { - updateCode(method, insns, (insns[0] & 0xff00) | (u2) volatileOpc); + updateOpCode(method, insns, volatileOpc); LOGV("DexOpt: rewrote ifield access %s.%s --> volatile\n", instField->field.clazz->descriptor, instField->field.name); } else if (quickOpc != OP_NOP) { - updateCode(method, insns, (insns[0] & 0xff00) | (u2) quickOpc); - updateCode(method, insns+1, (u2) instField->byteOffset); + updateOpCode(method, insns, quickOpc); + updateCodeUnit(method, insns+1, (u2) instField->byteOffset); LOGV("DexOpt: rewrote ifield access %s.%s --> %d\n", instField->field.clazz->descriptor, instField->field.name, instField->byteOffset); @@ -700,7 +718,7 @@ static bool rewriteStaticField(Method* method, u2* insns, OpCode volatileOpc) } if (dvmIsVolatileField(&staticField->field)) { - updateCode(method, insns, (insns[0] & 0xff00) | (u2) volatileOpc); + updateOpCode(method, insns, volatileOpc); LOGV("DexOpt: rewrote sfield access %s.%s --> volatile\n", staticField->field.clazz->descriptor, staticField->field.name); } @@ -873,8 +891,8 @@ static bool rewriteVirtualInvoke(Method* method, u2* insns, OpCode newOpc) * Note: Method->methodIndex is a u2 and is range checked during the * initial load. */ - updateCode(method, insns, (insns[0] & 0xff00) | (u2) newOpc); - updateCode(method, insns+1, baseMethod->methodIndex); + updateOpCode(method, insns, newOpc); + updateCodeUnit(method, insns+1, baseMethod->methodIndex); //LOGI("DexOpt: rewrote call to %s.%s --> %s.%s\n", // method->clazz->descriptor, method->name, @@ -918,8 +936,7 @@ static bool rewriteEmptyDirectInvoke(Method* method, u2* insns) * OP_INVOKE_DIRECT when debugging is enabled. */ assert((insns[0] & 0xff) == OP_INVOKE_DIRECT); - updateCode(method, insns, - (insns[0] & 0xff00) | (u2) OP_INVOKE_DIRECT_EMPTY); + updateOpCode(method, insns, OP_INVOKE_DIRECT_EMPTY); //LOGI("DexOpt: marked-empty call to %s.%s --> %s.%s\n", // method->clazz->descriptor, method->name, @@ -1050,9 +1067,8 @@ static bool rewriteExecuteInline(Method* method, u2* insns, assert((insns[0] & 0xff) == OP_INVOKE_DIRECT || (insns[0] & 0xff) == OP_INVOKE_STATIC || (insns[0] & 0xff) == OP_INVOKE_VIRTUAL); - updateCode(method, insns, - (insns[0] & 0xff00) | (u2) OP_EXECUTE_INLINE); - updateCode(method, insns+1, (u2) inlineSubs->inlineIdx); + updateOpCode(method, insns, OP_EXECUTE_INLINE); + updateCodeUnit(method, insns+1, (u2) inlineSubs->inlineIdx); //LOGI("DexOpt: execute-inline %s.%s --> %s.%s\n", // method->clazz->descriptor, method->name, @@ -1091,9 +1107,8 @@ static bool rewriteExecuteInlineRange(Method* method, u2* insns, assert((insns[0] & 0xff) == OP_INVOKE_DIRECT_RANGE || (insns[0] & 0xff) == OP_INVOKE_STATIC_RANGE || (insns[0] & 0xff) == OP_INVOKE_VIRTUAL_RANGE); - updateCode(method, insns, - (insns[0] & 0xff00) | (u2) OP_EXECUTE_INLINE_RANGE); - updateCode(method, insns+1, (u2) inlineSubs->inlineIdx); + updateOpCode(method, insns, OP_EXECUTE_INLINE_RANGE); + updateCodeUnit(method, insns+1, (u2) inlineSubs->inlineIdx); //LOGI("DexOpt: execute-inline/range %s.%s --> %s.%s\n", // method->clazz->descriptor, method->name, @@ -1106,3 +1121,54 @@ static bool rewriteExecuteInlineRange(Method* method, u2* insns, return false; } + +/* + * Returns "true" if the return-void instructions in this method should + * be converted to return-void-barrier. + * + * This is needed to satisfy a Java Memory Model requirement regarding + * the construction of objects with final fields. (This does not apply + * to or static fields, since appropriate barriers are guaranteed + * by the class initialization process.) + */ +static bool needsReturnBarrier(Method* method) +{ + if (!gDvm.dexOptForSmp) + return false; + if (strcmp(method->name, "") != 0) + return false; + + /* + * Check to see if the class has any final fields. If not, we don't + * need to generate a barrier instruction. + */ + const ClassObject* clazz = method->clazz; + int idx = clazz->ifieldCount; + while (--idx >= 0) { + if (dvmIsFinalField(&clazz->ifields[idx].field)) + break; + } + if (idx < 0) + return false; + + /* + * In theory, we only need to do this if the method actually modifies + * a final field. In practice, non-constructor methods are allowed + * to modify final fields by the VM, and there are tools that rely on + * this behavior. (The compiler does not allow it.) + * + * If we alter the verifier to restrict final-field updates to + * constructors, we can tighten this up as well. + */ + + return true; +} + +/* + * Convert a return-void to a return-void-barrier. + */ +static void rewriteReturnVoid(Method* method, u2* insns) +{ + assert((insns[0] & 0xff) == OP_RETURN_VOID); + updateOpCode(method, insns, OP_RETURN_VOID_BARRIER); +} -- 2.11.0