/* save/restore the PC and/or FP from the thread struct */
#define LOAD_PC_FROM_SELF() ldr rPC, [rSELF, #offThread_pc]
#define SAVE_PC_TO_SELF() str rPC, [rSELF, #offThread_pc]
-#define LOAD_FP_FROM_SELF() ldr rFP, [rSELF, #offThread_fp]
-#define SAVE_FP_TO_SELF() str rFP, [rSELF, #offThread_fp]
+#define LOAD_FP_FROM_SELF() ldr rFP, [rSELF, #offThread_curFrame]
+#define SAVE_FP_TO_SELF() str rFP, [rSELF, #offThread_curFrame]
#define LOAD_PC_FP_FROM_SELF() ldmia rSELF, {rPC, rFP}
#define SAVE_PC_FP_TO_SELF() stmia rSELF, {rPC, rFP}
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[AA]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vAA
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* ------------------------------ */
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 0
add r2, r2, #offStaticField_value @ r2<- pointer to data
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r2, #offStaticField_value] @ field<- vAA/vAA+1
.endif
ldr r2, [rSELF, #offThread_cardTable] @ r2<- card table base
ldr r9, [r0, #offField_clazz] @ r9<- field->clazz
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
b .LOP_SPUT_OBJECT_end
/* ------------------------------ */
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[AA]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vAA
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[AA]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vAA
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[AA]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vAA
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[AA]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vAA
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[AA]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vAA
+ SMP_DMB
GOTO_OPCODE(ip) @ jump to next instruction
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 1
add r2, r2, #offStaticField_value @ r2<- pointer to data
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r2, #offStaticField_value] @ field<- vAA/vAA+1
.endif
mov r0, rPC
bl dvmGetOriginalOpcode @ (rPC)
FETCH(rINST, 0) @ reload OP_BREAKPOINT + rest of inst
+ ldr r1, [rSELF, #offThread_mainHandlerTable]
and rINST, #0xff00
orr rINST, rINST, r0
- GOTO_OPCODE(r0)
+ GOTO_OPCODE_BASE(r1, r0)
/* ------------------------------ */
.balign 64
* swap if profiler/debuggger active.
*/
/* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
- ldrb r2, [rSELF, #offThread_subMode]
+ ldrh r2, [rSELF, #offThread_subMode]
FETCH(r10, 1) @ r10<- BBBB
EXPORT_PC() @ can throw
ands r2, #kSubModeDebugProfile @ Any going on?
* us if an exception was thrown.
*/
/* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
- ldrb r2, [rSELF, #offThread_subMode]
+ ldrh r2, [rSELF, #offThread_subMode]
FETCH(r10, 1) @ r10<- BBBB
EXPORT_PC() @ can throw
ands r2, #kSubModeDebugProfile @ Any going on?
tst r2, #CLASS_ISFINALIZABLE @ is this class finalizable?
bne .LOP_INVOKE_OBJECT_INIT_RANGE_setFinal @ yes, go
.LOP_INVOKE_OBJECT_INIT_RANGE_finish:
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
ands r1, #kSubModeDebuggerActive @ debugger active?
bne .LOP_INVOKE_OBJECT_INIT_RANGE_debugger @ Yes - skip optimization
FETCH_ADVANCE_INST(2+1) @ advance to next instr, load rINST
ldr r2, [rSELF, #offThread_cardTable] @ r2<- card table base
ldr r9, [r0, #offField_clazz] @ r9<- field->clazz
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
b .LOP_SPUT_OBJECT_VOLATILE_end
FETCH_ADVANCE_INST(4) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[BBBB]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* ------------------------------ */
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 0
add r2, r2, #offStaticField_value @ r2<- pointer to data
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r2, #offStaticField_value] @ field<- vBBBB/vBBBB+1
.endif
ldr r2, [rSELF, #offThread_cardTable] @ r2<- card table base
ldr r9, [r0, #offField_clazz] @ r9<- field->clazz
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
b .LOP_SPUT_OBJECT_JUMBO_end
/* ------------------------------ */
FETCH_ADVANCE_INST(4) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[BBBB]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(4) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[BBBB]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(4) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[BBBB]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
FETCH_ADVANCE_INST(4) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[BBBB]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
tst r2, #CLASS_ISFINALIZABLE @ is this class finalizable?
bne .LOP_INVOKE_OBJECT_INIT_JUMBO_setFinal @ yes, go
.LOP_INVOKE_OBJECT_INIT_JUMBO_finish:
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
ands r1, #kSubModeDebuggerActive @ debugger active?
bne .LOP_INVOKE_OBJECT_INIT_JUMBO_debugger @ Yes - skip optimization
FETCH_ADVANCE_INST(4+1) @ advance to next instr, load rINST
FETCH_ADVANCE_INST(4) @ advance rPC, load rINST
GET_VREG(r1, r2) @ r1<- fp[BBBB]
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ SMP_DMB
GOTO_OPCODE(ip) @ jump to next instruction
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 1
add r2, r2, #offStaticField_value @ r2<- pointer to data
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r2, #offStaticField_value] @ field<- vBBBB/vBBBB+1
.endif
ldr r2, [rSELF, #offThread_cardTable] @ r2<- card table base
ldr r9, [r0, #offField_clazz] @ r9<- field->clazz
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
b .LOP_SPUT_OBJECT_VOLATILE_JUMBO_end
* The JIT needs the class to be fully resolved before it can
* include this instruction in a trace.
*/
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
beq common_exceptionThrown @ yes, handle the exception
ands r1, #kSubModeJitTraceBuild @ under construction?
bne .LOP_NEW_INSTANCE_jitCheck
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_WIDE */
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 0
add r2, r9, r3 @ r2<- target address
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r9, r3] @ obj.field (64 bits, aligned)<- r0/r1
.endif
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (32 bits)<- r0
+ @ no-op
cmp r0, #0 @ stored a null reference?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card if not
GOTO_OPCODE(ip) @ jump to next instruction
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_BYTE */
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_CHAR */
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_SHORT */
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_SGET */
.LOP_SPUT_OBJECT_end:
str r1, [r0, #offStaticField_value] @ field<- vAA
+ @ no-op
cmp r1, #0 @ stored a null object?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card based on obj head
GOTO_OPCODE(ip) @ jump to next instruction
* we need to keep this instruction out of it.
* r10: &resolved_methodToCall
*/
- ldrb r2, [rSELF, #offThread_subMode]
+ ldrh r2, [rSELF, #offThread_subMode]
beq common_exceptionThrown @ null, handle exception
ands r2, #kSubModeJitTraceBuild @ trace under construction?
beq common_invokeMethodNoRange @ no (r0=method, r9="this")
* we need to keep this instruction out of it.
* r10: &resolved_methodToCall
*/
- ldrb r2, [rSELF, #offThread_subMode]
+ ldrh r2, [rSELF, #offThread_subMode]
beq common_exceptionThrown @ null, handle exception
ands r2, #kSubModeJitTraceBuild @ trace under construction?
beq common_invokeMethodRange @ no (r0=method, r9="this")
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ SMP_DMB
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_SGET_VOLATILE */
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 1
add r2, r9, r3 @ r2<- target address
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r9, r3] @ obj.field (64 bits, aligned)<- r0/r1
.endif
*/
.LOP_EXECUTE_INLINE_continue:
rsb r0, r0, #4 @ r0<- 4-r0
- FETCH(r9, 2) @ r9<- FEDC
+ FETCH(rINST, 2) @ rINST<- FEDC
add pc, pc, r0, lsl #3 @ computed goto, 2 instrs each
bl common_abort @ (skipped due to ARM prefetch)
-4: and ip, r9, #0xf000 @ isolate F
+4: and ip, rINST, #0xf000 @ isolate F
ldr r3, [rFP, ip, lsr #10] @ r3<- vF (shift right 12, left 2)
-3: and ip, r9, #0x0f00 @ isolate E
+3: and ip, rINST, #0x0f00 @ isolate E
ldr r2, [rFP, ip, lsr #6] @ r2<- vE
-2: and ip, r9, #0x00f0 @ isolate D
+2: and ip, rINST, #0x00f0 @ isolate D
ldr r1, [rFP, ip, lsr #2] @ r1<- vD
-1: and ip, r9, #0x000f @ isolate C
+1: and ip, rINST, #0x000f @ isolate C
ldr r0, [rFP, ip, lsl #2] @ r0<- vC
0:
- ldr r9, .LOP_EXECUTE_INLINE_table @ table of InlineOperation
- ldr pc, [r9, r10, lsl #4] @ sizeof=16, "func" is first entry
+ ldr rINST, .LOP_EXECUTE_INLINE_table @ table of InlineOperation
+ ldr pc, [rINST, r10, lsl #4] @ sizeof=16, "func" is first entry
@ (not reached)
/*
bl dvmResolveInlineNative
cmp r0, #0 @ did it resolve?
beq .LOP_EXECUTE_INLINE_resume @ no, just move on
+ mov r9, r0 @ remember method
mov r1, rSELF
bl dvmFastMethodTraceEnter @ (method, self)
add r1, rSELF, #offThread_retval@ r1<- &self->retval
mov r0, rINST, lsr #12 @ r0<- B
str r1, [sp] @ push &self->retval
bl .LOP_EXECUTE_INLINE_continue @ make call; will return after
+ mov rINST, r0 @ save result of inline
add sp, sp, #8 @ pop stack
- cmp r0, #0 @ test boolean result of inline
- beq common_exceptionThrown @ returned false, handle exception
- mov r0, r10
- bl dvmResolveInlineNative @ reload method
+ mov r0, r9 @ r0<- method
mov r1, rSELF
- bl dvmFastMethodTraceExit @ (method, self)
+ bl dvmFastNativeMethodTraceExit @ (method, self)
+ cmp rINST, #0 @ test boolean result of inline
+ beq common_exceptionThrown @ returned false, handle exception
FETCH_ADVANCE_INST(3) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
GOTO_OPCODE(ip) @ jump to next instruction
bl dvmResolveInlineNative
cmp r0, #0 @ did it resolve?
beq .LOP_EXECUTE_INLINE_RANGE_resume @ no, just move on
+ mov r9, r0 @ remember method
mov r1, rSELF
bl dvmFastMethodTraceEnter @ (method, self)
add r1, rSELF, #offThread_retval@ r1<- &self->retval
sub sp, sp, #8 @ make room for arg, +64 bit align
- mov r0, rINST, lsr #12 @ r0<- B
+ mov r0, rINST, lsr #8 @ r0<- B
+ mov rINST, r9 @ rINST<- method
str r1, [sp] @ push &self->retval
bl .LOP_EXECUTE_INLINE_RANGE_continue @ make call; will return after
+ mov r9, r0 @ save result of inline
add sp, sp, #8 @ pop stack
- cmp r0, #0 @ test boolean result of inline
- beq common_exceptionThrown @ returned false, handle exception
- mov r0, r10
- bl dvmResolveInlineNative @ reload method
+ mov r0, rINST @ r0<- method
mov r1, rSELF
- bl dvmFastMethodTraceExit @ (method, self)
+ bl dvmFastNativeMethodTraceExit @ (method, self)
+ cmp r9, #0 @ test boolean result of inline
+ beq common_exceptionThrown @ returned false, handle exception
FETCH_ADVANCE_INST(3) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
GOTO_OPCODE(ip) @ jump to next instruction
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(2) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
str r0, [r9, r3] @ obj.field (32 bits)<- r0
+ SMP_DMB
cmp r0, #0 @ stored a null reference?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card if not
GOTO_OPCODE(ip) @ jump to next instruction
.LOP_SPUT_OBJECT_VOLATILE_end:
str r1, [r0, #offStaticField_value] @ field<- vAA
+ SMP_DMB
cmp r1, #0 @ stored a null object?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card based on obj head
GOTO_OPCODE(ip) @ jump to next instruction
* The JIT needs the class to be fully resolved before it can
* include this instruction in a trace.
*/
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
beq common_exceptionThrown @ yes, handle the exception
ands r1, #kSubModeJitTraceBuild @ under construction?
bne .LOP_NEW_INSTANCE_JUMBO_jitCheck
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_WIDE_JUMBO */
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 0
add r2, r9, r3 @ r2<- target address
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r9, r3] @ obj.field (64 bits, aligned)<- r0/r1
.endif
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (32 bits)<- r0
+ @ no-op
cmp r0, #0 @ stored a null reference?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card if not
GOTO_OPCODE(ip) @ jump to next instruction
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_BYTE_JUMBO */
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_CHAR_JUMBO */
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_SHORT_JUMBO */
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- @ no-op @ releasing store
+ @ no-op @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ @ no-op
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_SGET_JUMBO */
.LOP_SPUT_OBJECT_JUMBO_end:
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ @ no-op
cmp r1, #0 @ stored a null object?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card based on obj head
GOTO_OPCODE(ip) @ jump to next instruction
* we need to keep this instruction out of it.
* r10: &resolved_methodToCall
*/
- ldrb r2, [rSELF, #offThread_subMode]
+ ldrh r2, [rSELF, #offThread_subMode]
beq common_exceptionThrown @ null, handle exception
ands r2, #kSubModeJitTraceBuild @ trace under construction?
beq common_invokeMethodJumboNoThis @ no (r0=method, r9="this")
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
str r0, [r9, r3] @ obj.field (8/16/32 bits)<- r0
+ SMP_DMB
GOTO_OPCODE(ip) @ jump to next instruction
/* continuation for OP_IPUT_WIDE_VOLATILE_JUMBO */
GET_INST_OPCODE(r10) @ extract opcode from rINST
.if 1
add r2, r9, r3 @ r2<- target address
- bl dvmQuasiAtomicSwap64 @ stores r0/r1 into addr r2
+ bl dvmQuasiAtomicSwap64Sync @ stores r0/r1 into addr r2
.else
strd r0, [r9, r3] @ obj.field (64 bits, aligned)<- r0/r1
.endif
beq common_errNullObject @ object was null
FETCH_ADVANCE_INST(5) @ advance rPC, load rINST
GET_INST_OPCODE(ip) @ extract opcode from rINST
- SMP_DMB @ releasing store
+ SMP_DMB_ST @ releasing store
str r0, [r9, r3] @ obj.field (32 bits)<- r0
+ SMP_DMB
cmp r0, #0 @ stored a null reference?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card if not
GOTO_OPCODE(ip) @ jump to next instruction
.LOP_SPUT_OBJECT_VOLATILE_JUMBO_end:
str r1, [r0, #offStaticField_value] @ field<- vBBBB
+ SMP_DMB
cmp r1, #0 @ stored a null object?
strneb r2, [r2, r9, lsr #GC_CARD_SHIFT] @ mark card based on obj head
GOTO_OPCODE(ip) @ jump to next instruction
/* ------------------------------ */
.balign 64
.L_ALT_OP_DISPATCH_FF: /* 0xff */
-/* File: armv5te/alt_stub.S */
+/* File: armv5te/ALT_OP_DISPATCH_FF.S */
/*
- * Inter-instruction transfer stub. Call out to dvmCheckBefore to handle
- * any interesting requests and then jump to the real instruction
- * handler. Note that the call to dvmCheckBefore is done as a tail call.
- * rIBASE updates won't be seen until a refresh, and we can tell we have a
- * stale rIBASE if breakFlags==0. Always refresh rIBASE here, and then
- * bail to the real handler if breakFlags==0.
+ * Unlike other alt stubs, we don't want to call dvmCheckBefore() here.
+ * Instead, just treat this as a trampoline to reach the real alt
+ * handler (which will do the dvmCheckBefore() call.
*/
- ldrb r3, [rSELF, #offThread_breakFlags]
- adrl lr, dvmAsmInstructionStart + (255 * 64)
- ldr rIBASE, [rSELF, #offThread_curHandlerTable]
- cmp r3, #0
- bxeq lr @ nothing to do - jump to real handler
- EXPORT_PC()
- mov r0, rPC @ arg0
- mov r1, rFP @ arg1
- mov r2, rSELF @ arg2
- b dvmCheckBefore @ (dPC,dFP,self) tail call
+ mov ip, rINST, lsr #8 @ ip<- extended opcode
+ add ip, ip, #256 @ add offset for extended opcodes
+ GOTO_OPCODE(ip) @ go to proper extended handler
+
/* ------------------------------ */
.balign 64
mov r1, #1
str r1, [rSELF,#offThread_singleStepCount] @ just step once
mov r0, rSELF
- mov r1, #kInterpSingleStep
- mov r2, #kSubModeNormal
- mov r3, #1 @ true
- bl dvmUpdateInterpBreak @ (self, newBreak, newMode, enable)
+ mov r1, #kSubModeCountedStep
+ bl dvmEnableSubMode @ (self, newMode)
ldr rIBASE, [rSELF,#offThread_curHandlerTable]
FETCH_INST()
GET_INST_OPCODE(ip)
* r2 is jit state.
*/
common_selectTrace:
- ldrb r0,[rSELF,#offThread_breakFlags]
- ands r0,#kInterpJitBreak
+ ldrh r0,[rSELF,#offThread_subMode]
+ ands r0, #(kSubModeJitTraceBuild | kSubModeJitSV)
bne 3f @ already doing JIT work, continue
str r2,[rSELF,#offThread_jitState]
mov r0, rSELF
beq 1f
@ Set up SV single-stepping
mov r0, rSELF
- mov r1, #kInterpJitBreak
- mov r2, #kSubModeJitSV
- mov r3, #1 @ true
- bl dvmUpdateInterpBreak @ (self, newBreak, newMode, enable)
+ mov r1, #kSubModeJitSV
+ bl dvmEnableSubMode @ (self, subMode)
mov r2,#kJitSelfVerification @ ask for self verification
str r2,[rSELF,#offThread_jitState]
@ intentional fallthrough
/* On entry: r0 is "Method* methodToCall, r9 is "this" */
.LinvokeNewJumbo:
#if defined(WITH_JIT)
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
ands r1, #kSubModeJitTraceBuild
blne save_callsiteinfo
#endif
common_invokeMethodRange:
.LinvokeNewRange:
#if defined(WITH_JIT)
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
ands r1, #kSubModeJitTraceBuild
blne save_callsiteinfo
#endif
common_invokeMethodNoRange:
.LinvokeNewNoRange:
#if defined(WITH_JIT)
- ldrb r1, [rSELF, #offThread_subMode]
+ ldrh r1, [rSELF, #offThread_subMode]
ands r1, #kSubModeJitTraceBuild
blne save_callsiteinfo
#endif
ldr r9, [rSELF, #offThread_interpStackEnd] @ r9<- interpStackEnd
sub r3, r10, r3, lsl #2 @ r3<- bottom (newsave - outsSize)
cmp r3, r9 @ bottom < interpStackEnd?
- ldrb lr, [rSELF, #offThread_subMode]
+ ldrh lr, [rSELF, #offThread_subMode]
ldr r3, [r0, #offMethod_accessFlags] @ r3<- methodToCall->accessFlags
blo .LstackOverflow @ yes, this frame will overflow stack
mov rFP, r1 @ fp = newFp
GET_PREFETCHED_OPCODE(ip, r9) @ extract prefetched opcode from r9
mov rINST, r9 @ publish new rINST
- str r1, [rSELF, #offThread_curFrame] @ self->curFrame = newFp
+ str r1, [rSELF, #offThread_curFrame] @ curFrame = newFp
cmp r0,#0
bne common_updateProfile
GOTO_OPCODE(ip) @ jump to next instruction
mov rFP, r1 @ fp = newFp
GET_PREFETCHED_OPCODE(ip, r9) @ extract prefetched opcode from r9
mov rINST, r9 @ publish new rINST
- str r1, [rSELF, #offThread_curFrame] @ self->curFrame = newFp
+ str r1, [rSELF, #offThread_curFrame] @ curFrame = newFp
GOTO_OPCODE(ip) @ jump to next instruction
#endif
2:
@ Profiling - record method entry. r0: methodToCall
stmfd sp!, {r0-r3} @ preserve r0-r3
+ str rPC, [rSELF, #offThread_pc] @ update interpSave.pc
mov r1, r0
mov r0, rSELF
bl dvmReportInvoke @ (self, method)
.LinvokeNative:
@ Prep for the native call
@ r0=methodToCall, r1=newFp, r10=newSaveArea
- ldrb lr, [rSELF, #offThread_subMode]
+ ldrh lr, [rSELF, #offThread_subMode]
ldr r9, [rSELF, #offThread_jniLocal_topCookie]@r9<-thread->localRef->...
- str r1, [rSELF, #offThread_curFrame] @ self->curFrame = newFp
+ str r1, [rSELF, #offThread_curFrame] @ curFrame = newFp
str r9, [r10, #offStackSaveArea_localRefCookie] @newFp->localRefCookie=top
mov r2, r0 @ r2<- methodToCall
mov r0, r1 @ r0<- newFp (points to args)
cmp lr, #0 @ any special SubModes active?
bne 11f @ go handle them if so
- mov lr, pc @ set return addr
- ldr pc, [r2, #offMethod_nativeFunc] @ pc<- methodToCall->nativeFunc
+ ldr ip, [r2, #offMethod_nativeFunc] @ pc<- methodToCall->nativeFunc
+ blx ip
7:
@ native return; r10=newSaveArea
@ equivalent to dvmPopJniLocals
ldr r0, [r10, #offStackSaveArea_localRefCookie] @ r0<- saved top
ldr r1, [rSELF, #offThread_exception] @ check for exception
- str rFP, [rSELF, #offThread_curFrame] @ self->curFrame = fp
+ str rFP, [rSELF, #offThread_curFrame] @ curFrame = fp
cmp r1, #0 @ null?
str r0, [rSELF, #offThread_jniLocal_topCookie] @ new top <- old top
bne common_exceptionThrown @ no, handle exception
11:
@ r0=newFp, r1=&retval, r2=methodToCall, r3=self, lr=subModes
stmfd sp!, {r0-r3} @ save all but subModes
- mov r0, rPC
+ mov r0, r2 @ r0<- methodToCall
mov r1, rSELF
- bl dvmReportPreNativeInvoke @ (pc, self, methodToCall)
+ mov r2, rFP
+ bl dvmReportPreNativeInvoke @ (methodToCall, self, fp)
ldmfd sp, {r0-r3} @ refresh. NOTE: no sp autoincrement
@ Call the native method
- mov lr, pc @ set return addr
- ldr pc, [r2, #offMethod_nativeFunc] @ pc<- methodToCall->nativeFunc
+ ldr ip, [r2, #offMethod_nativeFunc] @ pc<- methodToCall->nativeFunc
+ blx ip
@ Restore the pre-call arguments
ldmfd sp!, {r0-r3} @ r2<- methodToCall (others unneeded)
@ Finish up any post-invoke subMode requirements
- mov r0, rPC
+ mov r0, r2 @ r0<- methodToCall
mov r1, rSELF
- bl dvmReportPostNativeInvoke @ (pc, self, methodToCall)
+ mov r2, rFP
+ bl dvmReportPostNativeInvoke @ (methodToCall, self, fp)
b 7b @ resume
.LstackOverflow: @ r0=methodToCall
*/
common_returnFromMethod:
.LreturnNew:
- ldrb lr, [rSELF, #offThread_subMode]
+ ldrh lr, [rSELF, #offThread_subMode]
SAVEAREA_FROM_FP(r0, rFP)
ldr r9, [r0, #offStackSaveArea_savedPc] @ r9 = saveArea->savedPc
cmp lr, #0 @ any special subMode handling needed?
PREFETCH_ADVANCE_INST(rINST, r9, 3) @ advance r9, update new rINST
str r2, [rSELF, #offThread_method]@ self->method = newSave->method
ldr r1, [r10, #offClassObject_pDvmDex] @ r1<- method->clazz->pDvmDex
- str rFP, [rSELF, #offThread_curFrame] @ self->curFrame = fp
+ str rFP, [rSELF, #offThread_curFrame] @ curFrame = fp
#if defined(WITH_JIT)
ldr r10, [r0, #offStackSaveArea_returnAddr] @ r10 = saveArea->returnAddr
mov rPC, r9 @ publish new rPC
19:
@ Handle special actions
@ On entry, r0: StackSaveArea
- ldr r2, [r0, #offStackSaveArea_prevFrame] @ r2<- prevFP
- mov r1, rPC
+ ldr r1, [r0, #offStackSaveArea_prevFrame] @ r2<- prevFP
+ str rPC, [rSELF, #offThread_pc] @ update interpSave.pc
+ str r1, [rSELF, #offThread_curFrame] @ update interpSave.curFrame
mov r0, rSELF
- bl dvmReportReturn @ (self, pc, prevFP)
+ bl dvmReportReturn @ (self)
SAVEAREA_FROM_FP(r0, rFP) @ restore StackSaveArea
b 14b @ continue
mov r1, rSELF @ r1<- self
mov r0, r9 @ r0<- exception
bl dvmAddTrackedAlloc @ don't let the exception be GCed
- ldrb r2, [rSELF, #offThread_subMode] @ get subMode flags
+ ldrh r2, [rSELF, #offThread_subMode] @ get subMode flags
mov r3, #0 @ r3<- NULL
str r3, [rSELF, #offThread_exception] @ self->exception = NULL
ldr r1, [rSELF, #offThread_method] @ r1<- self->method
mov r0, rSELF @ r0<- self
ldr r1, [r1, #offMethod_insns] @ r1<- method->insns
- ldrb lr, [rSELF, #offThread_subMode] @ lr<- subMode flags
+ ldrh lr, [rSELF, #offThread_subMode] @ lr<- subMode flags
mov r2, r9 @ r2<- exception
sub r1, rPC, r1 @ r1<- pc - method->insns
mov r1, r1, asr #1 @ r1<- offset in code units
add sp, sp, #8 @ restore stack
bmi .LnotCaughtLocally
- /* adjust locals to match self->curFrame and updated PC */
+ /* adjust locals to match self->interpSave.curFrame and updated PC */
SAVEAREA_FROM_FP(r1, rFP) @ r1<- new save area
ldr r1, [r1, #offStackSaveArea_method] @ r1<- new method
str r1, [rSELF, #offThread_method] @ self->method = new method
@ Manage debugger bookkeeping
7:
+ str rPC, [rSELF, #offThread_pc] @ update interpSave.pc
+ str rFP, [rSELF, #offThread_curFrame] @ update interpSave.curFrame
mov r0, rSELF @ arg0<- self
- ldr r1, [rSELF, #offThread_method] @ arg1<- curMethod
- mov r2, rPC @ arg2<- pc
- mov r3, rFP @ arg3<- fp
- bl dvmReportExceptionThrow @ (self, method, pc, fp)
+ mov r1, r9 @ arg1<- exception
+ bl dvmReportExceptionThrow @ (self, exception)
b 8b @ resume with normal handling
.LnotCaughtLocally: @ r9=exception
* r0: field pointer (must preserve)
*/
common_verifyField:
- ldrb r3, [rSELF, #offThread_subMode] @ r3 <- submode byte
+ ldrh r3, [rSELF, #offThread_subMode] @ r3 <- submode byte
ands r3, #kSubModeJitTraceBuild
bxeq lr @ Not building trace, continue
ldr r1, [r10] @ r1<- reload resolved StaticField ptr
.LstrSqueak:
.asciz "<%d>"
.LstrPrintHex:
- .asciz "<0x%x>"
+ .asciz "<%#x>"
.LstrPrintLong:
.asciz "<%lld>"