#define SET_VREG(_reg, _vreg) str _reg, [rFP, _vreg, lsl #2]
#if defined(WITH_JIT)
-#define GET_JIT_ENABLED(_reg) ldr _reg,[rGLUE,#offGlue_jitEnabled]
#define GET_JIT_PROF_TABLE(_reg) ldr _reg,[rGLUE,#offGlue_pJitProfTable]
#define GET_JIT_THRESHOLD(_reg) ldr _reg,[rGLUE,#offGlue_jitThreshold]
#endif
*/
#include "../common/asm-constants.h"
+#if defined(WITH_JIT)
+#include "../common/jit-config.h"
+#endif
/* File: armv5te/platform.S */
/*
#if defined(WITH_JIT)
.Lno_singleStep:
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
/* Entry is always a possible trace start */
GET_JIT_PROF_TABLE(r0)
FETCH_INST()
+ mov r1, #0 @ prepare the value for the new state
+ str r1, [r10, #offThread_inJitCodeCache] @ back to the interp land
cmp r0,#0
bne common_updateProfile
GET_INST_OPCODE(ip)
EXPORT_PC() @ before fetch: export the PC
GET_VREG(r1, r2) @ r1<- vAA (object)
cmp r1, #0 @ null object?
- beq common_errNullObject @ yes
+ beq 1f @ yes
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
bl dvmUnlockObject @ r0<- success for unlock(self, obj)
cmp r0, #0 @ failed?
- beq common_exceptionThrown @ yes, exception is pending
FETCH_ADVANCE_INST(1) @ before throw: advance rPC, load rINST
+ beq common_exceptionThrown @ yes, exception is pending
GET_INST_OPCODE(ip) @ extract opcode from rINST
GOTO_OPCODE(ip) @ jump to next instruction
+1:
+ FETCH_ADVANCE_INST(1) @ advance before throw
+ b common_errNullObject
/* ------------------------------ */
/* ------------------------------ */
.balign 64
-.L_OP_UNUSED_EC: /* 0xec */
-/* File: armv5te/OP_UNUSED_EC.S */
+.L_OP_BREAKPOINT: /* 0xec */
+/* File: armv5te/OP_BREAKPOINT.S */
/* File: armv5te/unused.S */
bl common_abort
/*
* Execute a "native inline" instruction.
*
- * We need to call:
- * dvmPerformInlineOp4Std(arg0, arg1, arg2, arg3, &retval, ref)
+ * We need to call an InlineOp4Func:
+ * bool (func)(u4 arg0, u4 arg1, u4 arg2, u4 arg3, JValue* pResult)
*
- * The first four args are in r0-r3, but the last two must be pushed
- * onto the stack.
+ * The first four args are in r0-r3, pointer to return value storage
+ * is on the stack. The function's return value is a flag that tells
+ * us if an exception was thrown.
*/
/* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
FETCH(r10, 1) @ r10<- BBBB
add r1, rGLUE, #offGlue_retval @ r1<- &glue->retval
EXPORT_PC() @ can throw
- sub sp, sp, #8 @ make room for arg(s)
+ sub sp, sp, #8 @ make room for arg, +64 bit align
mov r0, rINST, lsr #12 @ r0<- B
str r1, [sp] @ push &glue->retval
bl .LOP_EXECUTE_INLINE_continue @ make call; will return after
/* ------------------------------ */
.balign 64
-.L_OP_UNUSED_EF: /* 0xef */
-/* File: armv5te/OP_UNUSED_EF.S */
-/* File: armv5te/unused.S */
- bl common_abort
-
-
+.L_OP_EXECUTE_INLINE_RANGE: /* 0xef */
+/* File: armv5te/OP_EXECUTE_INLINE_RANGE.S */
+ /*
+ * Execute a "native inline" instruction, using "/range" semantics.
+ * Same idea as execute-inline, but we get the args differently.
+ *
+ * We need to call an InlineOp4Func:
+ * bool (func)(u4 arg0, u4 arg1, u4 arg2, u4 arg3, JValue* pResult)
+ *
+ * The first four args are in r0-r3, pointer to return value storage
+ * is on the stack. The function's return value is a flag that tells
+ * us if an exception was thrown.
+ */
+ /* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
+ FETCH(r10, 1) @ r10<- BBBB
+ add r1, rGLUE, #offGlue_retval @ r1<- &glue->retval
+ EXPORT_PC() @ can throw
+ sub sp, sp, #8 @ make room for arg, +64 bit align
+ mov r0, rINST, lsr #8 @ r0<- AA
+ str r1, [sp] @ push &glue->retval
+ bl .LOP_EXECUTE_INLINE_RANGE_continue @ make call; will return after
+ add sp, sp, #8 @ pop stack
+ cmp r0, #0 @ test boolean result of inline
+ beq common_exceptionThrown @ returned false, handle exception
+ FETCH_ADVANCE_INST(3) @ advance rPC, load rINST
+ GET_INST_OPCODE(ip) @ extract opcode from rINST
+ GOTO_OPCODE(ip) @ jump to next instruction
/* ------------------------------ */
.balign 64
.word gDvmInlineOpsTable
+/* continuation for OP_EXECUTE_INLINE_RANGE */
+
+ /*
+ * Extract args, call function.
+ * r0 = #of args (0-4)
+ * r10 = call index
+ * lr = return addr, above [DO NOT bl out of here w/o preserving LR]
+ */
+.LOP_EXECUTE_INLINE_RANGE_continue:
+ rsb r0, r0, #4 @ r0<- 4-r0
+ FETCH(r9, 2) @ r9<- CCCC
+ add pc, pc, r0, lsl #3 @ computed goto, 2 instrs each
+ bl common_abort @ (skipped due to ARM prefetch)
+4: add ip, r9, #3 @ base+3
+ GET_VREG(r3, ip) @ r3<- vBase[3]
+3: add ip, r9, #2 @ base+2
+ GET_VREG(r2, ip) @ r2<- vBase[2]
+2: add ip, r9, #1 @ base+1
+ GET_VREG(r1, ip) @ r1<- vBase[1]
+1: add ip, r9, #0 @ (nop)
+ GET_VREG(r0, ip) @ r0<- vBase[0]
+0:
+ ldr r9, .LOP_EXECUTE_INLINE_RANGE_table @ table of InlineOperation
+ LDR_PC "[r9, r10, lsl #4]" @ sizeof=16, "func" is first entry
+ @ (not reached)
+
+.LOP_EXECUTE_INLINE_RANGE_table:
+ .word gDvmInlineOpsTable
+
+
.size dvmAsmSisterStart, .-dvmAsmSisterStart
.global dvmAsmSisterEnd
dvmAsmSisterEnd:
mov r2,#kSVSSingleStep @ r2<- interpreter entry point
b dvmJitSelfVerificationEnd @ doesn't return
- .global dvmJitToTraceSelect
-dvmJitToTraceSelect:
+ .global dvmJitToInterpTraceSelectNoChain
+dvmJitToInterpTraceSelectNoChain:
+ mov r0,rPC @ pass our target PC
+ mov r2,#kSVSTraceSelectNoChain @ r2<- interpreter entry point
+ b dvmJitSelfVerificationEnd @ doesn't return
+
+ .global dvmJitToInterpTraceSelect
+dvmJitToInterpTraceSelect:
ldr r0,[lr, #-1] @ pass our target PC
mov r2,#kSVSTraceSelect @ r2<- interpreter entry point
b dvmJitSelfVerificationEnd @ doesn't return
- .global dvmJitToBackwardBranch
-dvmJitToBackwardBranch:
+ .global dvmJitToInterpBackwardBranch
+dvmJitToInterpBackwardBranch:
ldr r0,[lr, #-1] @ pass our target PC
mov r2,#kSVSBackwardBranch @ r2<- interpreter entry point
b dvmJitSelfVerificationEnd @ doesn't return
*/
.global dvmJitToInterpPunt
dvmJitToInterpPunt:
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
mov rPC, r0
#ifdef EXIT_STATS
mov r0,lr
bl dvmBumpPunt;
#endif
EXPORT_PC()
+ mov r0, #0
+ str r0, [r10, #offThread_inJitCodeCache] @ Back to the interp land
adrl rIBASE, dvmAsmInstructionStart
FETCH_INST()
GET_INST_OPCODE(ip)
str r1, [rGLUE, #offGlue_entryPoint]
mov rPC,r0
EXPORT_PC()
+
adrl rIBASE, dvmAsmInstructionStart
mov r2,#kJitSingleStep @ Ask for single step and then revert
str r2,[rGLUE,#offGlue_jitState]
mov r1,#1 @ set changeInterp to bail to debug interp
b common_gotoBail
+/*
+ * Return from the translation cache and immediately request
+ * a translation for the exit target. Commonly used for callees.
+ */
+ .global dvmJitToInterpTraceSelectNoChain
+dvmJitToInterpTraceSelectNoChain:
+#ifdef EXIT_STATS
+ bl dvmBumpNoChain
+#endif
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
+ mov r0,rPC
+ bl dvmJitGetCodeAddr @ Is there a translation?
+ str r0, [r10, #offThread_inJitCodeCache] @ set the inJitCodeCache flag
+ mov r1, rPC @ arg1 of translation may need this
+ mov lr, #0 @ in case target is HANDLER_INTERPRET
+ cmp r0,#0
+ bxne r0 @ continue native execution if so
+ b 2f
/*
* Return from the translation cache and immediately request
* a translation for the exit target. Commonly used following
* invokes.
*/
- .global dvmJitToTraceSelect
-dvmJitToTraceSelect:
+ .global dvmJitToInterpTraceSelect
+dvmJitToInterpTraceSelect:
ldr rPC,[lr, #-1] @ get our target PC
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
add rINST,lr,#-5 @ save start of chain branch
mov r0,rPC
- bl dvmJitGetCodeAddr @ Is there a translation?
+ bl dvmJitGetCodeAddr @ Is there a translation?
+ str r0, [r10, #offThread_inJitCodeCache] @ set the inJitCodeCache flag
cmp r0,#0
beq 2f
mov r1,rINST
GET_JIT_PROF_TABLE(r0)
FETCH_INST()
cmp r0, #0
+ movne r2,#kJitTSelectRequestHot @ ask for trace selection
bne common_selectTrace
GET_INST_OPCODE(ip)
GOTO_OPCODE(ip)
.global dvmJitToInterpNormal
dvmJitToInterpNormal:
ldr rPC,[lr, #-1] @ get our target PC
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
add rINST,lr,#-5 @ save start of chain branch
#ifdef EXIT_STATS
bl dvmBumpNormal
#endif
mov r0,rPC
bl dvmJitGetCodeAddr @ Is there a translation?
+ str r0, [r10, #offThread_inJitCodeCache] @ set the inJitCodeCache flag
cmp r0,#0
beq toInterpreter @ go if not, otherwise do chain
mov r1,rINST
#ifdef EXIT_STATS
bl dvmBumpNoChain
#endif
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
mov r0,rPC
bl dvmJitGetCodeAddr @ Is there a translation?
+ str r0, [r10, #offThread_inJitCodeCache] @ set the inJitCodeCache flag
mov r1, rPC @ arg1 of translation may need this
mov lr, #0 @ in case target is HANDLER_INTERPRET
cmp r0,#0
common_updateProfile:
eor r3,rPC,rPC,lsr #12 @ cheap, but fast hash function
- lsl r3,r3,#23 @ shift out excess 511
- ldrb r1,[r0,r3,lsr #23] @ get counter
+ lsl r3,r3,#(32 - JIT_PROF_SIZE_LOG_2) @ shift out excess bits
+ ldrb r1,[r0,r3,lsr #(32 - JIT_PROF_SIZE_LOG_2)] @ get counter
GET_INST_OPCODE(ip)
subs r1,r1,#1 @ decrement counter
- strb r1,[r0,r3,lsr #23] @ and store it
+ strb r1,[r0,r3,lsr #(32 - JIT_PROF_SIZE_LOG_2)] @ and store it
GOTO_OPCODE_IFNE(ip) @ if not threshold, fallthrough otherwise */
/*
* jump to it now).
*/
GET_JIT_THRESHOLD(r1)
- strb r1,[r0,r3,lsr #23] @ reset counter
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
+ strb r1,[r0,r3,lsr #(32 - JIT_PROF_SIZE_LOG_2)] @ reset counter
EXPORT_PC()
mov r0,rPC
bl dvmJitGetCodeAddr @ r0<- dvmJitGetCodeAddr(rPC)
- mov r1, rPC @ arg1 of translation may need this
- mov lr, #0 @ in case target is HANDLER_INTERPRET
+ str r0, [r10, #offThread_inJitCodeCache] @ set the inJitCodeCache flag
+ mov r1, rPC @ arg1 of translation may need this
+ mov lr, #0 @ in case target is HANDLER_INTERPRET
cmp r0,#0
#if !defined(WITH_SELF_VERIFICATION)
bxne r0 @ jump to the translation
+ mov r2,#kJitTSelectRequest @ ask for trace selection
+ @ fall-through to common_selectTrace
#else
+ moveq r2,#kJitTSelectRequest @ ask for trace selection
beq common_selectTrace
/*
* At this point, we have a target translation. However, if
/* no return */
#endif
+/*
+ * On entry:
+ * r2 is jit state, e.g. kJitTSelectRequest or kJitTSelectRequestHot
+ */
common_selectTrace:
- mov r2,#kJitTSelectRequest @ ask for trace selection
str r2,[rGLUE,#offGlue_jitState]
mov r2,#kInterpEntryInstr @ normal entry reason
str r2,[rGLUE,#offGlue_entryPoint]
* before jumping back to the interpreter.
*/
dvmJitSelfVerificationEnd:
+ ldr r10, [rGLUE, #offGlue_self] @ callee saved r10 <- glue->self
+ mov r1, #0
+ str r1, [r10, #offThread_inJitCodeCache] @ Back to the interp land
mov r1,rFP @ pass ending fp
bl dvmSelfVerificationRestoreState @ restore pc and fp values
ldr rPC,[r0,#offShadowSpace_startPC] @ restore PC
bx lr @ nothing to do, return
2: @ check suspend
+#if defined(WITH_JIT)
+ /*
+ * Refresh the Jit's cached copy of profile table pointer. This pointer
+ * doubles as the Jit's on/off switch.
+ */
+ ldr r3, [rGLUE, #offGlue_ppJitProfTable] @ r10<-&gDvmJit.pJitProfTable
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
+ ldr r3, [r3] @ r10 <- pJitProfTable
EXPORT_PC() @ need for precise GC
+ str r3, [rGLUE, #offGlue_pJitProfTable] @ refresh Jit's on/off switch
+#else
+ ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
+ EXPORT_PC() @ need for precise GC
+#endif
b dvmCheckSuspendPending @ suspend if necessary, then return
3: @ debugger/profiler enabled, bail out
@ldr pc, [r2, #offMethod_nativeFunc] @ pc<- methodToCall->nativeFunc
LDR_PC_LR "[r2, #offMethod_nativeFunc]"
+#if defined(WITH_JIT)
+ ldr r3, [rGLUE, #offGlue_ppJitProfTable] @ Refresh Jit's on/off status
+#endif
+
@ native return; r9=self, r10=newSaveArea
@ equivalent to dvmPopJniLocals
ldr r0, [r10, #offStackSaveArea_localRefCookie] @ r0<- saved top
ldr r1, [r9, #offThread_exception] @ check for exception
+#if defined(WITH_JIT)
+ ldr r3, [r3] @ r3 <- gDvmJit.pProfTable
+#endif
str rFP, [r9, #offThread_curFrame] @ self->curFrame = fp
cmp r1, #0 @ null?
str r0, [r9, #offThread_jniLocal_topCookie] @ new top <- old top
+#if defined(WITH_JIT)
+ str r3, [rGLUE, #offGlue_pJitProfTable] @ refresh cached on/off switch
+#endif
bne common_exceptionThrown @ no, handle exception
FETCH_ADVANCE_INST(3) @ advance rPC, load rINST
ldr r1, [r10, #offClassObject_pDvmDex] @ r1<- method->clazz->pDvmDex
str rFP, [r3, #offThread_curFrame] @ self->curFrame = fp
#if defined(WITH_JIT)
- ldr r3, [r0, #offStackSaveArea_returnAddr] @ r3 = saveArea->returnAddr
+ ldr r10, [r0, #offStackSaveArea_returnAddr] @ r10 = saveArea->returnAddr
GET_JIT_PROF_TABLE(r0)
mov rPC, r9 @ publish new rPC
str r1, [rGLUE, #offGlue_methodClassDex]
- cmp r3, #0 @ caller is compiled code
- blxne r3
+ str r10, [r3, #offThread_inJitCodeCache] @ may return to JIT'ed land
+ cmp r10, #0 @ caller is compiled code
+ blxne r10
GET_INST_OPCODE(ip) @ extract opcode from rINST
cmp r0,#0
bne common_updateProfile
beq 1f @ no, skip ahead
mov rFP, r0 @ save relPc result in rFP
mov r0, r10 @ r0<- self
+ mov r1, r9 @ r1<- exception
bl dvmCleanupStackOverflow @ call(self)
mov r0, rFP @ restore result
1:
ldrb r1, [r10, #offThread_stackOverflowed]
cmp r1, #0 @ did we overflow earlier?
movne r0, r10 @ if yes: r0<- self
+ movne r1, r9 @ if yes: r1<- exception
blne dvmCleanupStackOverflow @ if yes: call(self)
@ may want to show "not caught locally" debug messages here