switch (abi) {
case kInterpreterAbi: // Thread* is first argument (X0) in interpreter ABI.
// FIXME IPx used by VIXL - this is unsafe.
- __ Call(Arm64ManagedRegister::FromCoreRegister(X0), Offset(offset.Int32Value()),
+ __ JumpTo(Arm64ManagedRegister::FromCoreRegister(X0), Offset(offset.Int32Value()),
Arm64ManagedRegister::FromCoreRegister(IP1));
break;
Offset(JNIEnvExt::SelfOffset().Int32Value()));
// FIXME IPx used by VIXL - this is unsafe.
- __ Call(Arm64ManagedRegister::FromCoreRegister(IP1), Offset(offset.Int32Value()),
+ __ JumpTo(Arm64ManagedRegister::FromCoreRegister(IP1), Offset(offset.Int32Value()),
Arm64ManagedRegister::FromCoreRegister(IP0));
break;
case kPortableAbi: // X18 holds Thread*.
case kQuickAbi: // Fall-through.
- __ Call(Arm64ManagedRegister::FromCoreRegister(TR), Offset(offset.Int32Value()),
+ __ JumpTo(Arm64ManagedRegister::FromCoreRegister(TR), Offset(offset.Int32Value()),
Arm64ManagedRegister::FromCoreRegister(IP0));
break;
___ Blr(reg_x(scratch.AsCoreRegister()));
}
+void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
+ Arm64ManagedRegister base = m_base.AsArm64();
+ Arm64ManagedRegister scratch = m_scratch.AsArm64();
+ CHECK(base.IsCoreRegister()) << base;
+ CHECK(scratch.IsCoreRegister()) << scratch;
+ LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), offs.Int32Value());
+ ___ Br(reg_x(scratch.AsCoreRegister()));
+}
+
void Arm64Assembler::Call(FrameOffset base, Offset offs, ManagedRegister m_scratch) {
Arm64ManagedRegister scratch = m_scratch.AsArm64();
CHECK(scratch.IsCoreRegister()) << scratch;
void Call(FrameOffset base, Offset offset, ManagedRegister scratch);
void Call(ThreadOffset offset, ManagedRegister scratch);
+ // Jump to address (not setting link register)
+ void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
+
// Generate code to check if Thread::Current()->exception_ is non-null
// and branch to a ExceptionSlowPath if it is.
void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust);
ENTRY art_quick_resolution_trampoline
SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
+ mov x19, x0 // save the called method
mov x2, xSELF
mov x3, sp
bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
- mov x9, x0 // Remember returned code pointer in x9.
+ mov x9, x0 // Remember returned code pointer in x9.
+ mov x0, x19 // Restore the method, before x19 is restored to on-call value
RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
cbz x9, 1f
- br x0
+ br x9
1:
RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
DELIVER_PENDING_EXCEPTION