MethodType methodType);
static bool rewriteExecuteInlineRange(Method* method, u2* insns,
MethodType methodType);
+static void rewriteReturnVoid(Method* method, u2* insns);
+static bool needsReturnBarrier(Method* method);
/*
if (dvmIsNativeMethod(method) || dvmIsAbstractMethod(method))
return;
+ /* compute this once per method */
+ bool needRetBar = needsReturnBarrier(method);
+
insns = (u2*) method->insns;
assert(insns != NULL);
insnsSize = dvmGetMethodInsnsSize(method);
inst = *insns & 0xff;
+ /* "essential" substitutions, always checked */
switch (inst) {
case OP_IGET:
case OP_IGET_BOOLEAN:
case OP_IGET_CHAR:
case OP_IGET_SHORT:
quickOpc = OP_IGET_QUICK;
- if (ANDROID_SMP != 0)
+ if (gDvm.dexOptForSmp)
volatileOpc = OP_IGET_VOLATILE;
goto rewrite_inst_field;
case OP_IGET_WIDE:
goto rewrite_inst_field;
case OP_IGET_OBJECT:
quickOpc = OP_IGET_OBJECT_QUICK;
- if (ANDROID_SMP != 0)
+ if (gDvm.dexOptForSmp)
volatileOpc = OP_IGET_OBJECT_VOLATILE;
goto rewrite_inst_field;
case OP_IPUT:
case OP_IPUT_CHAR:
case OP_IPUT_SHORT:
quickOpc = OP_IPUT_QUICK;
- if (ANDROID_SMP != 0)
+ if (gDvm.dexOptForSmp)
volatileOpc = OP_IPUT_VOLATILE;
goto rewrite_inst_field;
case OP_IPUT_WIDE:
goto rewrite_inst_field;
case OP_IPUT_OBJECT:
quickOpc = OP_IPUT_OBJECT_QUICK;
- if (ANDROID_SMP != 0)
+ if (gDvm.dexOptForSmp)
volatileOpc = OP_IPUT_OBJECT_VOLATILE;
rewrite_inst_field:
if (essentialOnly)
rewriteInstField(method, insns, quickOpc, volatileOpc);
break;
-#if ANDROID_SMP != 0
- case OP_SGET:
- case OP_SGET_BOOLEAN:
- case OP_SGET_BYTE:
- case OP_SGET_CHAR:
- case OP_SGET_SHORT:
- volatileOpc = OP_SGET_VOLATILE;
- goto rewrite_static_field;
- case OP_SGET_OBJECT:
- volatileOpc = OP_SGET_OBJECT_VOLATILE;
- goto rewrite_static_field;
- case OP_SPUT:
- case OP_SPUT_BOOLEAN:
- case OP_SPUT_BYTE:
- case OP_SPUT_CHAR:
- case OP_SPUT_SHORT:
- volatileOpc = OP_SPUT_VOLATILE;
- goto rewrite_static_field;
- case OP_SPUT_OBJECT:
- volatileOpc = OP_SPUT_OBJECT_VOLATILE;
- goto rewrite_static_field;
-#endif
case OP_SGET_WIDE:
volatileOpc = OP_SGET_WIDE_VOLATILE;
goto rewrite_static_field;
rewrite_static_field:
rewriteStaticField(method, insns, volatileOpc);
break;
-
default:
- /* not one of the "essential" replacements; check for more */
notMatched = true;
+ break;
+ }
+
+ if (notMatched && gDvm.dexOptForSmp) {
+ /* additional "essential" substitutions for an SMP device */
+ switch (inst) {
+ case OP_SGET:
+ case OP_SGET_BOOLEAN:
+ case OP_SGET_BYTE:
+ case OP_SGET_CHAR:
+ case OP_SGET_SHORT:
+ volatileOpc = OP_SGET_VOLATILE;
+ goto rewrite_static_field2;
+ case OP_SGET_OBJECT:
+ volatileOpc = OP_SGET_OBJECT_VOLATILE;
+ goto rewrite_static_field2;
+ case OP_SPUT:
+ case OP_SPUT_BOOLEAN:
+ case OP_SPUT_BYTE:
+ case OP_SPUT_CHAR:
+ case OP_SPUT_SHORT:
+ volatileOpc = OP_SPUT_VOLATILE;
+ goto rewrite_static_field2;
+ case OP_SPUT_OBJECT:
+ volatileOpc = OP_SPUT_OBJECT_VOLATILE;
+rewrite_static_field2:
+ rewriteStaticField(method, insns, volatileOpc);
+ notMatched = false;
+ break;
+ case OP_RETURN_VOID:
+ if (needRetBar)
+ rewriteReturnVoid(method, insns);
+ notMatched = false;
+ break;
+ default:
+ assert(notMatched);
+ break;
+ }
}
+ /* non-essential substitutions */
if (notMatched && !essentialOnly) {
switch (inst) {
case OP_INVOKE_VIRTUAL:
/*
* Update a 16-bit code unit in "meth".
*/
-static inline void updateCode(const Method* meth, u2* ptr, u2 newVal)
+static inline void updateCodeUnit(const Method* meth, u2* ptr, u2 newVal)
{
if (gDvm.optimizing) {
/* dexopt time, alter the output directly */
}
/*
+ * Update the 8-bit opcode portion of a 16-bit code unit in "meth".
+ */
+static inline void updateOpCode(const Method* meth, u2* ptr, OpCode opCode)
+{
+ updateCodeUnit(meth, ptr, (ptr[0] & 0xff00) | (u2) opCode);
+}
+
+/*
* If "referrer" and "resClass" don't come from the same DEX file, and
* the DEX we're working on is not destined for the bootstrap class path,
* tweak the class loader so package-access checks work correctly.
}
if (volatileOpc != OP_NOP && dvmIsVolatileField(&instField->field)) {
- updateCode(method, insns, (insns[0] & 0xff00) | (u2) volatileOpc);
+ updateOpCode(method, insns, volatileOpc);
LOGV("DexOpt: rewrote ifield access %s.%s --> volatile\n",
instField->field.clazz->descriptor, instField->field.name);
} else if (quickOpc != OP_NOP) {
- updateCode(method, insns, (insns[0] & 0xff00) | (u2) quickOpc);
- updateCode(method, insns+1, (u2) instField->byteOffset);
+ updateOpCode(method, insns, quickOpc);
+ updateCodeUnit(method, insns+1, (u2) instField->byteOffset);
LOGV("DexOpt: rewrote ifield access %s.%s --> %d\n",
instField->field.clazz->descriptor, instField->field.name,
instField->byteOffset);
}
if (dvmIsVolatileField(&staticField->field)) {
- updateCode(method, insns, (insns[0] & 0xff00) | (u2) volatileOpc);
+ updateOpCode(method, insns, volatileOpc);
LOGV("DexOpt: rewrote sfield access %s.%s --> volatile\n",
staticField->field.clazz->descriptor, staticField->field.name);
}
* Note: Method->methodIndex is a u2 and is range checked during the
* initial load.
*/
- updateCode(method, insns, (insns[0] & 0xff00) | (u2) newOpc);
- updateCode(method, insns+1, baseMethod->methodIndex);
+ updateOpCode(method, insns, newOpc);
+ updateCodeUnit(method, insns+1, baseMethod->methodIndex);
//LOGI("DexOpt: rewrote call to %s.%s --> %s.%s\n",
// method->clazz->descriptor, method->name,
* OP_INVOKE_DIRECT when debugging is enabled.
*/
assert((insns[0] & 0xff) == OP_INVOKE_DIRECT);
- updateCode(method, insns,
- (insns[0] & 0xff00) | (u2) OP_INVOKE_DIRECT_EMPTY);
+ updateOpCode(method, insns, OP_INVOKE_DIRECT_EMPTY);
//LOGI("DexOpt: marked-empty call to %s.%s --> %s.%s\n",
// method->clazz->descriptor, method->name,
assert((insns[0] & 0xff) == OP_INVOKE_DIRECT ||
(insns[0] & 0xff) == OP_INVOKE_STATIC ||
(insns[0] & 0xff) == OP_INVOKE_VIRTUAL);
- updateCode(method, insns,
- (insns[0] & 0xff00) | (u2) OP_EXECUTE_INLINE);
- updateCode(method, insns+1, (u2) inlineSubs->inlineIdx);
+ updateOpCode(method, insns, OP_EXECUTE_INLINE);
+ updateCodeUnit(method, insns+1, (u2) inlineSubs->inlineIdx);
//LOGI("DexOpt: execute-inline %s.%s --> %s.%s\n",
// method->clazz->descriptor, method->name,
assert((insns[0] & 0xff) == OP_INVOKE_DIRECT_RANGE ||
(insns[0] & 0xff) == OP_INVOKE_STATIC_RANGE ||
(insns[0] & 0xff) == OP_INVOKE_VIRTUAL_RANGE);
- updateCode(method, insns,
- (insns[0] & 0xff00) | (u2) OP_EXECUTE_INLINE_RANGE);
- updateCode(method, insns+1, (u2) inlineSubs->inlineIdx);
+ updateOpCode(method, insns, OP_EXECUTE_INLINE_RANGE);
+ updateCodeUnit(method, insns+1, (u2) inlineSubs->inlineIdx);
//LOGI("DexOpt: execute-inline/range %s.%s --> %s.%s\n",
// method->clazz->descriptor, method->name,
return false;
}
+
+/*
+ * Returns "true" if the return-void instructions in this method should
+ * be converted to return-void-barrier.
+ *
+ * This is needed to satisfy a Java Memory Model requirement regarding
+ * the construction of objects with final fields. (This does not apply
+ * to <clinit> or static fields, since appropriate barriers are guaranteed
+ * by the class initialization process.)
+ */
+static bool needsReturnBarrier(Method* method)
+{
+ if (!gDvm.dexOptForSmp)
+ return false;
+ if (strcmp(method->name, "<init>") != 0)
+ return false;
+
+ /*
+ * Check to see if the class has any final fields. If not, we don't
+ * need to generate a barrier instruction.
+ */
+ const ClassObject* clazz = method->clazz;
+ int idx = clazz->ifieldCount;
+ while (--idx >= 0) {
+ if (dvmIsFinalField(&clazz->ifields[idx].field))
+ break;
+ }
+ if (idx < 0)
+ return false;
+
+ /*
+ * In theory, we only need to do this if the method actually modifies
+ * a final field. In practice, non-constructor methods are allowed
+ * to modify final fields by the VM, and there are tools that rely on
+ * this behavior. (The compiler does not allow it.)
+ *
+ * If we alter the verifier to restrict final-field updates to
+ * constructors, we can tighten this up as well.
+ */
+
+ return true;
+}
+
+/*
+ * Convert a return-void to a return-void-barrier.
+ */
+static void rewriteReturnVoid(Method* method, u2* insns)
+{
+ assert((insns[0] & 0xff) == OP_RETURN_VOID);
+ updateOpCode(method, insns, OP_RETURN_VOID_BARRIER);
+}