From 1d8cdbc5202378a5f1a4b3a1fba610675ed4dcd5 Mon Sep 17 00:00:00 2001 From: Ian Rogers Date: Mon, 22 Sep 2014 22:51:09 -0700 Subject: [PATCH] Refactor quick entrypoints Remove FinishCalleeSaveFrameSetup. Assembly routines write down anchor into TLS as well as placing runtime method in callee save frame. Simplify artSet64InstanceFromCode by not computing the referrer from the stack in the C++ code. Move assembly offset tests next to constant declaration and tidy arch_test. Change-Id: Iededeebc05e54a1e2bb7bb3572b8ba012cffa1c8 --- compiler/jni/quick/jni_compiler.cc | 4 - runtime/arch/arch_test.cc | 406 ++------------ runtime/arch/arm/asm_support_arm.S | 82 ++- runtime/arch/arm/asm_support_arm.h | 11 - runtime/arch/arm/portable_entrypoints_arm.S | 2 +- runtime/arch/arm/quick_entrypoints_arm.S | 406 +++++++------- runtime/arch/arm64/asm_support_arm64.h | 22 - runtime/arch/arm64/quick_entrypoints_arm64.S | 215 ++++---- runtime/arch/mips/asm_support_mips.S | 27 +- runtime/arch/mips/asm_support_mips.h | 10 - runtime/arch/mips/jni_entrypoints_mips.S | 1 - runtime/arch/mips/memcmp16_mips.S | 2 +- runtime/arch/mips/portable_entrypoints_mips.S | 6 +- runtime/arch/mips/quick_entrypoints_mips.S | 444 +++++++--------- runtime/arch/stub_test.cc | 68 +-- runtime/arch/x86/asm_support_x86.S | 10 +- runtime/arch/x86/asm_support_x86.h | 12 - runtime/arch/x86/portable_entrypoints_x86.S | 4 +- runtime/arch/x86/quick_entrypoints_x86.S | 589 ++++++++------------- runtime/arch/x86_64/asm_support_x86_64.h | 22 - runtime/arch/x86_64/quick_entrypoints_x86_64.S | 388 +++++++------- runtime/arch/x86_64/thread_x86_64.cc | 13 - runtime/asm_support.h | 157 ++++-- runtime/entrypoints/quick/callee_save_frame.h | 51 +- .../entrypoints/quick/quick_alloc_entrypoints.cc | 42 +- .../quick/quick_deoptimization_entrypoints.cc | 5 +- .../quick/quick_dexcache_entrypoints.cc | 23 +- .../entrypoints/quick/quick_field_entrypoints.cc | 275 +++++----- .../quick/quick_fillarray_entrypoints.cc | 5 +- .../quick/quick_instrumentation_entrypoints.cc | 21 +- .../entrypoints/quick/quick_lock_entrypoints.cc | 14 +- .../entrypoints/quick/quick_thread_entrypoints.cc | 5 +- .../entrypoints/quick/quick_throw_entrypoints.cc | 47 +- .../quick/quick_trampoline_entrypoints.cc | 18 +- .../quick/quick_trampoline_entrypoints_test.cc | 6 +- runtime/exception_test.cc | 62 ++- runtime/fault_handler.cc | 2 +- runtime/instrumentation.cc | 1 + runtime/mirror/art_method.cc | 6 +- runtime/mirror/art_method.h | 4 +- runtime/mirror/object_test.cc | 20 - runtime/oat.cc | 2 +- runtime/runtime.cc | 4 +- runtime/stack.cc | 2 +- runtime/stack.h | 28 +- runtime/thread.cc | 1 - runtime/thread.h | 10 +- 47 files changed, 1490 insertions(+), 2065 deletions(-) diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc index f6795ea28..3c3aa0250 100644 --- a/compiler/jni/quick/jni_compiler.cc +++ b/compiler/jni/quick/jni_compiler.cc @@ -176,12 +176,8 @@ CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, // 4. Write out the end of the quick frames. if (is_64_bit_target) { __ StoreStackPointerToThread64(Thread::TopOfManagedStackOffset<8>()); - __ StoreImmediateToThread64(Thread::TopOfManagedStackPcOffset<8>(), 0, - mr_conv->InterproceduralScratchRegister()); } else { __ StoreStackPointerToThread32(Thread::TopOfManagedStackOffset<4>()); - __ StoreImmediateToThread32(Thread::TopOfManagedStackPcOffset<4>(), 0, - mr_conv->InterproceduralScratchRegister()); } // 5. Move frame down to allow space for out going args. diff --git a/runtime/arch/arch_test.cc b/runtime/arch/arch_test.cc index 5220dc3ca..42bf8fb12 100644 --- a/runtime/arch/arch_test.cc +++ b/runtime/arch/arch_test.cc @@ -43,398 +43,98 @@ class ArchTest : public CommonRuntimeTest { } }; +// Common tests are declared next to the constants. +#define ADD_TEST_EQ(x, y) EXPECT_EQ(x, y); +#include "asm_support.h" -TEST_F(ArchTest, ARM) { -#include "arch/arm/asm_support_arm.h" -#undef ART_RUNTIME_ARCH_ARM_ASM_SUPPORT_ARM_H_ - - -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE - CheckFrameSize(InstructionSet::kArm, Runtime::kSaveAll, FRAME_SIZE_SAVE_ALL_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for SaveAll"; -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE - CheckFrameSize(InstructionSet::kArm, Runtime::kRefsOnly, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsOnly"; -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - CheckFrameSize(InstructionSet::kArm, Runtime::kRefsAndArgs, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsAndArgs"; -#endif - +TEST_F(ArchTest, CheckCommonOffsetsAndSizes) { + CheckAsmSupportOffsetsAndSizes(); +} -#ifdef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef THREAD_SELF_OFFSET -#undef THREAD_SELF_OFFSET -#endif -#ifdef THREAD_CARD_TABLE_OFFSET -#undef THREAD_CARD_TABLE_OFFSET -#endif -#ifdef THREAD_EXCEPTION_OFFSET -#undef THREAD_EXCEPTION_OFFSET -#endif -#ifdef THREAD_ID_OFFSET -#undef THREAD_ID_OFFSET -#endif -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE +// Grab architecture specific constants. +namespace arm { +#include "arch/arm/asm_support_arm.h" +static constexpr size_t kFrameSizeSaveAllCalleeSave = FRAME_SIZE_SAVE_ALL_CALLEE_SAVE; #undef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsOnlyCalleeSave = FRAME_SIZE_REFS_ONLY_CALLEE_SAVE; #undef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsAndArgsCalleeSave = FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE; #undef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE -#endif -#ifdef HEAP_REFERENCE_SIZE -#undef HEAP_REFERENCE_SIZE -#endif } - -TEST_F(ArchTest, ARM64) { +namespace arm64 { #include "arch/arm64/asm_support_arm64.h" -#undef ART_RUNTIME_ARCH_ARM64_ASM_SUPPORT_ARM64_H_ - - -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE - CheckFrameSize(InstructionSet::kArm64, Runtime::kSaveAll, FRAME_SIZE_SAVE_ALL_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for SaveAll"; -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE - CheckFrameSize(InstructionSet::kArm64, Runtime::kRefsOnly, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsOnly"; -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - CheckFrameSize(InstructionSet::kArm64, Runtime::kRefsAndArgs, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsAndArgs"; -#endif - - -#ifdef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef THREAD_SELF_OFFSET -#undef THREAD_SELF_OFFSET -#endif -#ifdef THREAD_CARD_TABLE_OFFSET -#undef THREAD_CARD_TABLE_OFFSET -#endif -#ifdef THREAD_EXCEPTION_OFFSET -#undef THREAD_EXCEPTION_OFFSET -#endif -#ifdef THREAD_ID_OFFSET -#undef THREAD_ID_OFFSET -#endif -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE +static constexpr size_t kFrameSizeSaveAllCalleeSave = FRAME_SIZE_SAVE_ALL_CALLEE_SAVE; #undef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsOnlyCalleeSave = FRAME_SIZE_REFS_ONLY_CALLEE_SAVE; #undef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsAndArgsCalleeSave = FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE; #undef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE -#endif -#ifdef HEAP_REFERENCE_SIZE -#undef HEAP_REFERENCE_SIZE -#endif } - -TEST_F(ArchTest, MIPS) { +namespace mips { #include "arch/mips/asm_support_mips.h" -#undef ART_RUNTIME_ARCH_MIPS_ASM_SUPPORT_MIPS_H_ - - -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE - CheckFrameSize(InstructionSet::kMips, Runtime::kSaveAll, FRAME_SIZE_SAVE_ALL_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for SaveAll"; -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE - CheckFrameSize(InstructionSet::kMips, Runtime::kRefsOnly, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsOnly"; -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - CheckFrameSize(InstructionSet::kMips, Runtime::kRefsAndArgs, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsAndArgs"; -#endif - - -#ifdef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef THREAD_SELF_OFFSET -#undef THREAD_SELF_OFFSET -#endif -#ifdef THREAD_CARD_TABLE_OFFSET -#undef THREAD_CARD_TABLE_OFFSET -#endif -#ifdef THREAD_EXCEPTION_OFFSET -#undef THREAD_EXCEPTION_OFFSET -#endif -#ifdef THREAD_ID_OFFSET -#undef THREAD_ID_OFFSET -#endif -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE +static constexpr size_t kFrameSizeSaveAllCalleeSave = FRAME_SIZE_SAVE_ALL_CALLEE_SAVE; #undef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsOnlyCalleeSave = FRAME_SIZE_REFS_ONLY_CALLEE_SAVE; #undef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsAndArgsCalleeSave = FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE; #undef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE -#endif -#ifdef HEAP_REFERENCE_SIZE -#undef HEAP_REFERENCE_SIZE -#endif } - -TEST_F(ArchTest, X86) { +namespace x86 { #include "arch/x86/asm_support_x86.h" -#undef ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_H_ - - -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE - CheckFrameSize(InstructionSet::kX86, Runtime::kSaveAll, FRAME_SIZE_SAVE_ALL_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for SaveAll"; -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE - CheckFrameSize(InstructionSet::kX86, Runtime::kRefsOnly, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsOnly"; -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - CheckFrameSize(InstructionSet::kX86, Runtime::kRefsAndArgs, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsAndArgs"; -#endif - - -#ifdef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef THREAD_SELF_OFFSET -#undef THREAD_SELF_OFFSET -#endif -#ifdef THREAD_CARD_TABLE_OFFSET -#undef THREAD_CARD_TABLE_OFFSET -#endif -#ifdef THREAD_EXCEPTION_OFFSET -#undef THREAD_EXCEPTION_OFFSET -#endif -#ifdef THREAD_ID_OFFSET -#undef THREAD_ID_OFFSET -#endif -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE +static constexpr size_t kFrameSizeSaveAllCalleeSave = FRAME_SIZE_SAVE_ALL_CALLEE_SAVE; #undef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsOnlyCalleeSave = FRAME_SIZE_REFS_ONLY_CALLEE_SAVE; #undef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsAndArgsCalleeSave = FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE; #undef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE -#endif -#ifdef HEAP_REFERENCE_SIZE -#undef HEAP_REFERENCE_SIZE -#endif } - -TEST_F(ArchTest, X86_64) { +namespace x86_64 { #include "arch/x86_64/asm_support_x86_64.h" -#undef ART_RUNTIME_ARCH_X86_64_ASM_SUPPORT_X86_64_H_ - - -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE - CheckFrameSize(InstructionSet::kX86_64, Runtime::kSaveAll, FRAME_SIZE_SAVE_ALL_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for SaveAll"; -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE - CheckFrameSize(InstructionSet::kX86_64, Runtime::kRefsOnly, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsOnly"; -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE - CheckFrameSize(InstructionSet::kX86_64, Runtime::kRefsAndArgs, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE); -#else - LOG(WARNING) << "No frame size for RefsAndArgs"; -#endif - - -#ifdef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#undef RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET -#endif -#ifdef THREAD_SELF_OFFSET -#undef THREAD_SELF_OFFSET -#endif -#ifdef THREAD_CARD_TABLE_OFFSET -#undef THREAD_CARD_TABLE_OFFSET -#endif -#ifdef THREAD_EXCEPTION_OFFSET -#undef THREAD_EXCEPTION_OFFSET -#endif -#ifdef THREAD_ID_OFFSET -#undef THREAD_ID_OFFSET -#endif -#ifdef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE +static constexpr size_t kFrameSizeSaveAllCalleeSave = FRAME_SIZE_SAVE_ALL_CALLEE_SAVE; #undef FRAME_SIZE_SAVE_ALL_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsOnlyCalleeSave = FRAME_SIZE_REFS_ONLY_CALLEE_SAVE; #undef FRAME_SIZE_REFS_ONLY_CALLEE_SAVE -#endif -#ifdef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE +static constexpr size_t kFrameSizeRefsAndArgsCalleeSave = FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE; #undef FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE -#endif -#ifdef HEAP_REFERENCE_SIZE -#undef HEAP_REFERENCE_SIZE -#endif } - -// The following tests are all for the running architecture. So we get away -// with just including it and not undefining it every time. - -#if defined(__arm__) -#include "arch/arm/asm_support_arm.h" -#elif defined(__aarch64__) -#include "arch/arm64/asm_support_arm64.h" -#elif defined(__mips__) -#include "arch/mips/asm_support_mips.h" -#elif defined(__i386__) -#include "arch/x86/asm_support_x86.h" -#elif defined(__x86_64__) -#include "arch/x86_64/asm_support_x86_64.h" -#else - // This happens for the host test. -#ifdef __LP64__ -#include "arch/x86_64/asm_support_x86_64.h" -#else -#include "arch/x86/asm_support_x86.h" -#endif -#endif - - -TEST_F(ArchTest, ThreadOffsets) { - // Ugly hack, change when possible. -#ifdef __LP64__ -#define POINTER_SIZE 8 -#else -#define POINTER_SIZE 4 -#endif - -#if defined(THREAD_SELF_OFFSET) - ThreadOffset self_offset = Thread::SelfOffset(); - EXPECT_EQ(self_offset.Int32Value(), THREAD_SELF_OFFSET); -#else - LOG(INFO) << "No Thread Self Offset found."; -#endif - -#if defined(THREAD_CARD_TABLE_OFFSET) - ThreadOffset card_offset = Thread::CardTableOffset(); - EXPECT_EQ(card_offset.Int32Value(), THREAD_CARD_TABLE_OFFSET); -#else - LOG(INFO) << "No Thread Card Table Offset found."; -#endif - -#if defined(THREAD_EXCEPTION_OFFSET) - ThreadOffset exc_offset = Thread::ExceptionOffset(); - EXPECT_EQ(exc_offset.Int32Value(), THREAD_EXCEPTION_OFFSET); -#else - LOG(INFO) << "No Thread Exception Offset found."; -#endif - -#if defined(THREAD_ID_OFFSET) - ThreadOffset id_offset = Thread::ThinLockIdOffset(); - EXPECT_EQ(id_offset.Int32Value(), THREAD_ID_OFFSET); -#else - LOG(INFO) << "No Thread ID Offset found."; -#endif +// Check architecture specific constants are sound. +TEST_F(ArchTest, ARM) { + CheckFrameSize(InstructionSet::kArm, Runtime::kSaveAll, arm::kFrameSizeSaveAllCalleeSave); + CheckFrameSize(InstructionSet::kArm, Runtime::kRefsOnly, arm::kFrameSizeRefsOnlyCalleeSave); + CheckFrameSize(InstructionSet::kArm, Runtime::kRefsAndArgs, arm::kFrameSizeRefsAndArgsCalleeSave); } -TEST_F(ArchTest, CalleeSaveMethodOffsets) { -#if defined(RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET) - EXPECT_EQ(Runtime::GetCalleeSaveMethodOffset(Runtime::kSaveAll), - static_cast(RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET)); -#else - LOG(INFO) << "No Runtime Save-all Offset found."; -#endif - -#if defined(RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET) - EXPECT_EQ(Runtime::GetCalleeSaveMethodOffset(Runtime::kRefsOnly), - static_cast(RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET)); -#else - LOG(INFO) << "No Runtime Refs-only Offset found."; -#endif - -#if defined(RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET) - EXPECT_EQ(Runtime::GetCalleeSaveMethodOffset(Runtime::kRefsAndArgs), - static_cast(RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET)); -#else - LOG(INFO) << "No Runtime Refs-and-Args Offset found."; -#endif +TEST_F(ArchTest, ARM64) { + CheckFrameSize(InstructionSet::kArm64, Runtime::kSaveAll, arm64::kFrameSizeSaveAllCalleeSave); + CheckFrameSize(InstructionSet::kArm64, Runtime::kRefsOnly, arm64::kFrameSizeRefsOnlyCalleeSave); + CheckFrameSize(InstructionSet::kArm64, Runtime::kRefsAndArgs, + arm64::kFrameSizeRefsAndArgsCalleeSave); } +TEST_F(ArchTest, MIPS) { + CheckFrameSize(InstructionSet::kMips, Runtime::kSaveAll, mips::kFrameSizeSaveAllCalleeSave); + CheckFrameSize(InstructionSet::kMips, Runtime::kRefsOnly, mips::kFrameSizeRefsOnlyCalleeSave); + CheckFrameSize(InstructionSet::kMips, Runtime::kRefsAndArgs, + mips::kFrameSizeRefsAndArgsCalleeSave); +} -TEST_F(ArchTest, HeapReferenceSize) { -#if defined(HEAP_REFERENCE_SIZE) - EXPECT_EQ(sizeof(mirror::HeapReference), - static_cast(HEAP_REFERENCE_SIZE)); -#else - LOG(INFO) << "No expected HeapReference Size found."; -#endif +TEST_F(ArchTest, X86) { + CheckFrameSize(InstructionSet::kX86, Runtime::kSaveAll, x86::kFrameSizeSaveAllCalleeSave); + CheckFrameSize(InstructionSet::kX86, Runtime::kRefsOnly, x86::kFrameSizeRefsOnlyCalleeSave); + CheckFrameSize(InstructionSet::kX86, Runtime::kRefsAndArgs, x86::kFrameSizeRefsAndArgsCalleeSave); } -TEST_F(ArchTest, StackReferenceSize) { -#if defined(STACK_REFERENCE_SIZE) - EXPECT_EQ(sizeof(StackReference), - static_cast(STACK_REFERENCE_SIZE)); -#else - LOG(INFO) << "No expected StackReference Size #define found."; -#endif +TEST_F(ArchTest, X86_64) { + CheckFrameSize(InstructionSet::kX86_64, Runtime::kSaveAll, x86_64::kFrameSizeSaveAllCalleeSave); + CheckFrameSize(InstructionSet::kX86_64, Runtime::kRefsOnly, x86_64::kFrameSizeRefsOnlyCalleeSave); + CheckFrameSize(InstructionSet::kX86_64, Runtime::kRefsAndArgs, + x86_64::kFrameSizeRefsAndArgsCalleeSave); } } // namespace art diff --git a/runtime/arch/arm/asm_support_arm.S b/runtime/arch/arm/asm_support_arm.S index fb6458c9d..2af636e4b 100644 --- a/runtime/arch/arm/asm_support_arm.S +++ b/runtime/arch/arm/asm_support_arm.S @@ -30,38 +30,92 @@ .arch armv7-a .thumb -.macro ENTRY name - .thumb_func +// Macro to generate the value of Runtime::Current into rDest clobbering rTemp. As it uses labels +// then the labels need to be unique. We bind these to the function name in the ENTRY macros. +.macro RUNTIME_CURRENT name, num, rDest, rTemp + .if .Lruntime_current\num\()_used + .error + .endif + .set .Lruntime_current\num\()_used, 1 + ldr \rDest, .Lgot_\name\()_\num @ Load offset of the GOT. + ldr \rTemp, .Lruntime_instance_\name\()_\num @ Load GOT offset of Runtime::instance_. +.Lload_got_\name\()_\num\(): + add \rDest, pc @ Fixup GOT address. + ldr \rDest, [\rDest, \rTemp] @ Load address of Runtime::instance_. + ldr \rDest, [\rDest] @ Load Runtime::instance_. +.endm + +// Common ENTRY declaration code for ARM and thumb, an ENTRY should always be paired with an END. +// Declares the RUNTIME_CURRENT[123] macros that can be used within an ENTRY and will have literals +// generated at END. +.macro DEF_ENTRY thumb_or_arm, name + \thumb_or_arm .type \name, #function .hidden \name // Hide this as a global symbol, so we do not incur plt calls. .global \name - /* Cache alignment for function entry */ + // Cache alignment for function entry. .balign 16 \name: .cfi_startproc .fnstart + // Track whether RUNTIME_CURRENT was used. + .set .Lruntime_current1_used, 0 + .set .Lruntime_current2_used, 0 + .set .Lruntime_current3_used, 0 + // The RUNTIME_CURRENT macros that are bound to the \name argument of DEF_ENTRY to ensure + // that label names are unique. + .macro RUNTIME_CURRENT1 rDest, rTemp + RUNTIME_CURRENT \name, 1, \rDest, \rTemp + .endm + .macro RUNTIME_CURRENT2 rDest, rTemp + RUNTIME_CURRENT \name, 2, \rDest, \rTemp + .endm + .macro RUNTIME_CURRENT3 rDest, rTemp + RUNTIME_CURRENT \name, 3, \rDest, \rTemp + .endm .endm +// A thumb2 style ENTRY. +.macro ENTRY name + DEF_ENTRY .thumb_func, \name +.endm + +// A ARM style ENTRY. .macro ARM_ENTRY name - .arm - .type \name, #function - .hidden \name // Hide this as a global symbol, so we do not incur plt calls. - .global \name - /* Cache alignment for function entry */ - .balign 16 -\name: - .cfi_startproc - /* Ensure we get a sane starting CFA. */ - .cfi_def_cfa sp,0 - .fnstart + DEF_ENTRY .arm, \name .endm +// Terminate an ENTRY and generate GOT references. .macro END name + // Generate offsets of GOT and Runtime::instance_ used in RUNTIME_CURRENT. + .if .Lruntime_current1_used + .Lgot_\name\()_1: + .word _GLOBAL_OFFSET_TABLE_-(.Lload_got_\name\()_1+4) + .Lruntime_instance_\name\()_1: + .word _ZN3art7Runtime9instance_E(GOT) + .endif + .if .Lruntime_current2_used + .Lgot_\name\()_2: + .word _GLOBAL_OFFSET_TABLE_-(.Lload_got_\name\()_2+4) + .Lruntime_instance_\name\()_2: + .word _ZN3art7Runtime9instance_E(GOT) + .endif + .if .Lruntime_current3_used + .Lgot_\name\()_3: + .word _GLOBAL_OFFSET_TABLE_-(.Lload_got_\name\()_3+4) + .Lruntime_instance_\name\()_3: + .word _ZN3art7Runtime9instance_E(GOT) + .endif + // Remove the RUNTIME_CURRENTx macros so they get rebound in the next function entry. + .purgem RUNTIME_CURRENT1 + .purgem RUNTIME_CURRENT2 + .purgem RUNTIME_CURRENT3 .fnend .cfi_endproc .size \name, .-\name .endm +// Declare an unimplemented ENTRY that will halt a debugger. .macro UNIMPLEMENTED name ENTRY \name bkpt diff --git a/runtime/arch/arm/asm_support_arm.h b/runtime/arch/arm/asm_support_arm.h index 330924ef0..5388cc0da 100644 --- a/runtime/arch/arm/asm_support_arm.h +++ b/runtime/arch/arm/asm_support_arm.h @@ -19,21 +19,10 @@ #include "asm_support.h" -// Offset of field Thread::tls32_.state_and_flags verified in InitCpu -#define THREAD_FLAGS_OFFSET 0 -// Offset of field Thread::tls32_.thin_lock_thread_id verified in InitCpu -#define THREAD_ID_OFFSET 12 -// Offset of field Thread::tlsPtr_.card_table verified in InitCpu -#define THREAD_CARD_TABLE_OFFSET 120 -// Offset of field Thread::tlsPtr_.exception verified in InitCpu -#define THREAD_EXCEPTION_OFFSET 124 - #define FRAME_SIZE_SAVE_ALL_CALLEE_SAVE 176 #define FRAME_SIZE_REFS_ONLY_CALLEE_SAVE 32 #define FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE 48 -// Expected size of a heap reference -#define HEAP_REFERENCE_SIZE 4 // Flag for enabling R4 optimization in arm runtime #define ARM_R4_SUSPEND_FLAG diff --git a/runtime/arch/arm/portable_entrypoints_arm.S b/runtime/arch/arm/portable_entrypoints_arm.S index a34db6c6c..d37e7604e 100644 --- a/runtime/arch/arm/portable_entrypoints_arm.S +++ b/runtime/arch/arm/portable_entrypoints_arm.S @@ -53,7 +53,7 @@ ENTRY art_portable_invoke_stub mov ip, #0 @ set ip to 0 str ip, [sp] @ store NULL for method* at bottom of frame add sp, #16 @ first 4 args are not passed on stack for portable - ldr ip, [r0, #METHOD_PORTABLE_CODE_OFFSET] @ get pointer to the code + ldr ip, [r0, #MIRROR_ART_METHOD_PORTABLE_CODE_OFFSET] @ get pointer to the code blx ip @ call the method mov sp, r11 @ restore the stack pointer ldr ip, [sp, #24] @ load the result pointer diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S index 3d619be0c..aae0c9499 100644 --- a/runtime/arch/arm/quick_entrypoints_arm.S +++ b/runtime/arch/arm/quick_entrypoints_arm.S @@ -27,8 +27,8 @@ * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kSaveAll) */ -.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - push {r4-r11, lr} @ 9 words of callee saves +.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME rTemp1, rTemp2 + push {r4-r11, lr} @ 9 words (36 bytes) of callee saves. .save {r4-r11, lr} .cfi_adjust_cfa_offset 36 .cfi_rel_offset r4, 0 @@ -40,12 +40,17 @@ .cfi_rel_offset r10, 24 .cfi_rel_offset r11, 28 .cfi_rel_offset lr, 32 - vpush {s0-s31} + vpush {s0-s31} @ 32 words (128 bytes) of floats. .pad #128 .cfi_adjust_cfa_offset 128 - sub sp, #12 @ 3 words of space, bottom word will hold Method* + sub sp, #12 @ 3 words of space, bottom word will hold Method*. .pad #12 .cfi_adjust_cfa_offset 12 + RUNTIME_CURRENT1 \rTemp1, \rTemp2 @ Load Runtime::Current into rTemp1. + THIS_LOAD_REQUIRES_READ_BARRIER + ldr \rTemp1, [\rTemp1, #RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET] @ rTemp1 is kSaveAll Method*. + str \rTemp1, [sp, #0] @ Place Method* at bottom of stack. + str sp, [r9, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame. // Ugly compile-time check, but we only have the preprocessor. #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 36 + 128 + 12) @@ -57,8 +62,8 @@ * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsOnly). */ -.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME - push {r5-r8, r10-r11, lr} @ 7 words of callee saves +.macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME rTemp1, rTemp2 + push {r5-r8, r10-r11, lr} @ 7 words of callee saves .save {r5-r8, r10-r11, lr} .cfi_adjust_cfa_offset 28 .cfi_rel_offset r5, 0 @@ -68,9 +73,14 @@ .cfi_rel_offset r10, 16 .cfi_rel_offset r11, 20 .cfi_rel_offset lr, 24 - sub sp, #4 @ bottom word will hold Method* + sub sp, #4 @ bottom word will hold Method* .pad #4 .cfi_adjust_cfa_offset 4 + RUNTIME_CURRENT2 \rTemp1, \rTemp2 @ Load Runtime::Current into rTemp1. + THIS_LOAD_REQUIRES_READ_BARRIER + ldr \rTemp1, [\rTemp1, #RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET] @ rTemp1 is kRefsOnly Method*. + str \rTemp1, [sp, #0] @ Place Method* at bottom of stack. + str sp, [r9, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame. // Ugly compile-time check, but we only have the preprocessor. #if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 28 + 4) @@ -78,7 +88,7 @@ #endif .endm -.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME +.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME add sp, #4 @ bottom word holds Method* pop {r5-r8, r10-r11, lr} @ 7 words of callee saves .cfi_restore r5 @@ -87,10 +97,10 @@ .cfi_restore r8 .cfi_restore r10 .cfi_restore r11 - .cfi_adjust_cfa_offset -32 + .cfi_adjust_cfa_offset -FRAME_SIZE_REFS_ONLY_CALLEE_SAVE .endm -.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN +.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN add sp, #4 @ bottom word holds Method* pop {r5-r8, r10-r11, lr} @ 7 words of callee saves .cfi_restore r5 @@ -99,7 +109,7 @@ .cfi_restore r8 .cfi_restore r10 .cfi_restore r11 - .cfi_adjust_cfa_offset -32 + .cfi_adjust_cfa_offset -FRAME_SIZE_REFS_ONLY_CALLEE_SAVE bx lr @ return .endm @@ -107,8 +117,8 @@ * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsAndArgs). */ -.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves +.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME rTemp1, rTemp2 + push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves .save {r1-r3, r5-r8, r10-r11, lr} .cfi_rel_offset r1, 0 .cfi_rel_offset r2, 4 @@ -121,9 +131,15 @@ .cfi_rel_offset r11, 32 .cfi_rel_offset lr, 36 .cfi_adjust_cfa_offset 40 - sub sp, #8 @ 2 words of space, bottom word will hold Method* + sub sp, #8 @ 2 words of space, bottom word will hold Method* .pad #8 .cfi_adjust_cfa_offset 8 + RUNTIME_CURRENT3 \rTemp1, \rTemp2 @ Load Runtime::Current into rTemp1. + THIS_LOAD_REQUIRES_READ_BARRIER + @ rTemp1 is kRefsAndArgs Method*. + ldr \rTemp1, [\rTemp1, #RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET] + str \rTemp1, [sp, #0] @ Place Method* at bottom of stack. + str sp, [r9, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame. // Ugly compile-time check, but we only have the preprocessor. #if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 40 + 8) @@ -131,7 +147,29 @@ #endif .endm -.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME +.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_R0 + push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves + .save {r1-r3, r5-r8, r10-r11, lr} + .cfi_rel_offset r1, 0 + .cfi_rel_offset r2, 4 + .cfi_rel_offset r3, 8 + .cfi_rel_offset r5, 12 + .cfi_rel_offset r6, 16 + .cfi_rel_offset r7, 20 + .cfi_rel_offset r8, 24 + .cfi_rel_offset r10, 28 + .cfi_rel_offset r11, 32 + .cfi_rel_offset lr, 36 + .cfi_adjust_cfa_offset 40 + sub sp, #8 @ 2 words of space, bottom word will hold Method* + .pad #8 + .cfi_adjust_cfa_offset 8 + + str r0, [sp, #0] @ Store ArtMethod* to bottom of stack. + str sp, [r9, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame. +.endm + +.macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME add sp, #8 @ rewind sp pop {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves .cfi_restore r1 @@ -146,6 +184,7 @@ .cfi_adjust_cfa_offset -48 .endm + .macro RETURN_IF_RESULT_IS_ZERO cbnz r0, 1f @ result non-zero branch over bx lr @ return @@ -165,41 +204,35 @@ .macro DELIVER_PENDING_EXCEPTION .fnend .fnstart - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME @ save callee saves for throw + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r0, r1 @ save callee saves for throw mov r0, r9 @ pass Thread::Current - mov r1, sp @ pass SP - b artDeliverPendingExceptionFromCode @ artDeliverPendingExceptionFromCode(Thread*, SP) + b artDeliverPendingExceptionFromCode @ artDeliverPendingExceptionFromCode(Thread*) .endm .macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name .extern \cxx_name ENTRY \c_name - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r0, r1 // save all registers as basis for long jump context mov r0, r9 @ pass Thread::Current - mov r1, sp @ pass SP - b \cxx_name @ \cxx_name(Thread*, SP) + b \cxx_name @ \cxx_name(Thread*) END \c_name .endm .macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name .extern \cxx_name ENTRY \c_name - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r1, r2 // save all registers as basis for long jump context mov r1, r9 @ pass Thread::Current - mov r2, sp @ pass SP - b \cxx_name @ \cxx_name(Thread*, SP) - bkpt + b \cxx_name @ \cxx_name(Thread*) END \c_name .endm .macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name .extern \cxx_name ENTRY \c_name - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r2, r3 // save all registers as basis for long jump context mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - b \cxx_name @ \cxx_name(Thread*, SP) - bkpt + b \cxx_name @ \cxx_name(Thread*) END \c_name .endm @@ -224,12 +257,11 @@ END \c_name .macro ONE_ARG_REF_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC - ldr r1, [sp, #32] @ pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r1, r2 @ save callee saves in case of GC + ldr r1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - bl \entrypoint @ (uint32_t field_idx, const Method* referrer, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl \entrypoint @ (uint32_t field_idx, const Method* referrer, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return END \name .endm @@ -237,17 +269,11 @@ END \name .macro TWO_ARG_REF_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC - ldr r2, [sp, #32] @ pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC + ldr r2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer mov r3, r9 @ pass Thread::Current - mov r12, sp - str r12, [sp, #-16]! @ expand the frame and pass SP - .pad #16 - .cfi_adjust_cfa_offset 16 - bl \entrypoint @ (field_idx, Object*, referrer, Thread*, SP) - add sp, #16 @ strip the extra frame - .cfi_adjust_cfa_offset -16 - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl \entrypoint @ (field_idx, Object*, referrer, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return END \name .endm @@ -255,21 +281,15 @@ END \name .macro THREE_ARG_REF_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC - ldr r3, [sp, #32] @ pass referrer - mov r12, sp @ save SP - sub sp, #8 @ grow frame for alignment with stack args - .pad #8 - .cfi_adjust_cfa_offset 8 - push {r9, r12} @ pass Thread::Current and SP - .save {r9, r12} - .cfi_adjust_cfa_offset 8 - .cfi_rel_offset r9, 0 - .cfi_rel_offset r12, 4 - bl \entrypoint @ (field_idx, Object*, new_val, referrer, Thread*, SP) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r3, r12 @ save callee saves in case of GC + ldr r3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer + str r9, [sp, #-16]! @ expand the frame and pass Thread::Current + .pad #16 + .cfi_adjust_cfa_offset 16 + bl \entrypoint @ (field_idx, Object*, new_val, referrer, Thread*) add sp, #16 @ release out args .cfi_adjust_cfa_offset -16 - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here \return END \name .endm @@ -325,8 +345,8 @@ ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFr .macro INVOKE_TRAMPOLINE c_name, cxx_name .extern \cxx_name ENTRY \c_name - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME @ save callee saves in case allocation triggers GC - ldr r2, [sp, #48] @ pass caller Method* + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case allocation triggers GC + ldr r2, [sp, #FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE] @ pass caller Method* mov r3, r9 @ pass Thread::Current mov r12, sp str r12, [sp, #-16]! @ expand the frame and pass SP @@ -336,7 +356,7 @@ ENTRY \c_name add sp, #16 @ strip the extra frame .cfi_adjust_cfa_offset -16 mov r12, r1 @ save Method*->code_ - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME cbz r0, 1f @ did we find the target? if not go to exception delivery bx r12 @ tail call to target 1: @@ -393,7 +413,7 @@ ENTRY art_quick_invoke_stub ldr r3, [sp, #12] @ copy arg value for r3 mov ip, #0 @ set ip to 0 str ip, [sp] @ store NULL for method* at bottom of frame - ldr ip, [r0, #METHOD_QUICK_CODE_OFFSET] @ get pointer to the code + ldr ip, [r0, #MIRROR_ART_METHOD_QUICK_CODE_OFFSET] @ get pointer to the code blx ip @ call the method mov sp, r11 @ restore the stack pointer ldr ip, [sp, #24] @ load the result pointer @@ -437,10 +457,10 @@ ENTRY art_quick_lock_object cbz r0, .Lslow_lock .Lretry_lock: ldr r2, [r9, #THREAD_ID_OFFSET] - ldrex r1, [r0, #LOCK_WORD_OFFSET] + ldrex r1, [r0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] cbnz r1, .Lnot_unlocked @ already thin locked @ unlocked case - r2 holds thread id with count of 0 - strex r3, r2, [r0, #LOCK_WORD_OFFSET] + strex r3, r2, [r0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] cbnz r3, .Lstrex_fail @ store failed, retry dmb ish @ full (LoadLoad|LoadStore) memory barrier bx lr @@ -456,14 +476,13 @@ ENTRY art_quick_lock_object add r2, r1, #65536 @ increment count in lock word placing in r2 for storing lsr r1, r2, 30 @ if either of the top two bits are set, we overflowed. cbnz r1, .Lslow_lock @ if we overflow the count go slow path - str r2, [r0, #LOCK_WORD_OFFSET] @ no need for strex as we hold the lock + str r2, [r0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] @ no need for strex as we hold the lock bx lr .Lslow_lock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case we block + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r1, r2 @ save callee saves in case we block mov r1, r9 @ pass Thread::Current - mov r2, sp @ pass SP - bl artLockObjectFromCode @ (Object* obj, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl artLockObjectFromCode @ (Object* obj, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_RESULT_IS_ZERO DELIVER_PENDING_EXCEPTION END art_quick_lock_object @@ -475,7 +494,7 @@ END art_quick_lock_object .extern artUnlockObjectFromCode ENTRY art_quick_unlock_object cbz r0, .Lslow_unlock - ldr r1, [r0, #LOCK_WORD_OFFSET] + ldr r1, [r0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] lsr r2, r1, 30 cbnz r2, .Lslow_unlock @ if either of the top two bits are set, go slow path ldr r2, [r9, #THREAD_ID_OFFSET] @@ -486,18 +505,18 @@ ENTRY art_quick_unlock_object bpl .Lrecursive_thin_unlock @ transition to unlocked, r3 holds 0 dmb ish @ full (LoadStore|StoreStore) memory barrier - str r3, [r0, #LOCK_WORD_OFFSET] + str r3, [r0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] bx lr .Lrecursive_thin_unlock: sub r1, r1, #65536 - str r1, [r0, #LOCK_WORD_OFFSET] + str r1, [r0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] bx lr .Lslow_unlock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case exception allocation triggers GC + @ save callee saves in case exception allocation triggers GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r1, r2 mov r1, r9 @ pass Thread::Current - mov r2, sp @ pass SP - bl artUnlockObjectFromCode @ (Object* obj, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl artUnlockObjectFromCode @ (Object* obj, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_RESULT_IS_ZERO DELIVER_PENDING_EXCEPTION END art_quick_unlock_object @@ -528,10 +547,9 @@ ENTRY art_quick_check_cast pop {r0-r1, lr} .cfi_restore r0 .cfi_restore r1 - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r2, r3 // save all registers as basis for long jump context mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - b artThrowClassCastException @ (Class*, Class*, Thread*, SP) + b artThrowClassCastException @ (Class*, Class*, Thread*) bkpt END art_quick_check_cast @@ -548,7 +566,7 @@ END art_quick_aput_obj_with_null_and_bound_check .hidden art_quick_aput_obj_with_bound_check ENTRY art_quick_aput_obj_with_bound_check - ldr r3, [r0, #ARRAY_LENGTH_OFFSET] + ldr r3, [r0, #MIRROR_ARRAY_LENGTH_OFFSET] cmp r3, r1 bhi art_quick_aput_obj mov r0, r1 @@ -559,20 +577,20 @@ END art_quick_aput_obj_with_bound_check .hidden art_quick_aput_obj ENTRY art_quick_aput_obj cbz r2, .Ldo_aput_null - ldr r3, [r0, #CLASS_OFFSET] - ldr ip, [r2, #CLASS_OFFSET] - ldr r3, [r3, #CLASS_COMPONENT_TYPE_OFFSET] + ldr r3, [r0, #MIRROR_OBJECT_CLASS_OFFSET] + ldr ip, [r2, #MIRROR_OBJECT_CLASS_OFFSET] + ldr r3, [r3, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET] cmp r3, ip @ value's type == array's component type - trivial assignability bne .Lcheck_assignability .Ldo_aput: - add r3, r0, #OBJECT_ARRAY_DATA_OFFSET + add r3, r0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET str r2, [r3, r1, lsl #2] ldr r3, [r9, #THREAD_CARD_TABLE_OFFSET] lsr r0, r0, #7 strb r3, [r3, r0] blx lr .Ldo_aput_null: - add r3, r0, #OBJECT_ARRAY_DATA_OFFSET + add r3, r0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET str r2, [r3, r1, lsl #2] blx lr .Lcheck_assignability: @@ -593,7 +611,7 @@ ENTRY art_quick_aput_obj .cfi_restore r2 .cfi_restore lr .cfi_adjust_cfa_offset -16 - add r3, r0, #OBJECT_ARRAY_DATA_OFFSET + add r3, r0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET str r2, [r3, r1, lsl #2] ldr r3, [r9, #THREAD_CARD_TABLE_OFFSET] lsr r0, r0, #7 @@ -606,12 +624,11 @@ ENTRY art_quick_aput_obj .cfi_restore r2 .cfi_restore lr .cfi_adjust_cfa_offset -16 - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r3, ip mov r1, r2 - mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - b artThrowArrayStoreException @ (Class*, Class*, Thread*, SP) - bkpt @ unreached + mov r2, r9 @ pass Thread::Current + b artThrowArrayStoreException @ (Class*, Class*, Thread*) + bkpt @ unreached END art_quick_aput_obj /* @@ -621,12 +638,11 @@ END art_quick_aput_obj */ .extern artInitializeStaticStorageFromCode ENTRY art_quick_initialize_static_storage - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - @ artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*, SP) + @ artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*) bl artInitializeStaticStorageFromCode - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_RESULT_IS_NON_ZERO DELIVER_PENDING_EXCEPTION END art_quick_initialize_static_storage @@ -636,12 +652,11 @@ END art_quick_initialize_static_storage */ .extern artInitializeTypeFromCode ENTRY art_quick_initialize_type - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - @ artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*, SP) + @ artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*) bl artInitializeTypeFromCode - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_RESULT_IS_NON_ZERO DELIVER_PENDING_EXCEPTION END art_quick_initialize_type @@ -652,12 +667,11 @@ END art_quick_initialize_type */ .extern artInitializeTypeAndVerifyAccessFromCode ENTRY art_quick_initialize_type_and_verify_access - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - @ artInitializeTypeAndVerifyAccessFromCode(uint32_t type_idx, Method* referrer, Thread*, SP) + @ artInitializeTypeAndVerifyAccessFromCode(uint32_t type_idx, Method* referrer, Thread*) bl artInitializeTypeAndVerifyAccessFromCode - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_RESULT_IS_NON_ZERO DELIVER_PENDING_EXCEPTION END art_quick_initialize_type_and_verify_access @@ -676,13 +690,12 @@ ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_O */ .extern artGet64StaticFromCode ENTRY art_quick_get64_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC - ldr r1, [sp, #32] @ pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC + ldr r1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - bl artGet64StaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*, SP) + bl artGet64StaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*) ldr r2, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME cbnz r2, 1f @ success if no exception pending bx lr @ return on success 1: @@ -703,18 +716,12 @@ TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETU */ .extern artGet64InstanceFromCode ENTRY art_quick_get64_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC - ldr r2, [sp, #32] @ pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC + ldr r2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer mov r3, r9 @ pass Thread::Current - mov r12, sp - str r12, [sp, #-16]! @ expand the frame and pass SP - .pad #16 - .cfi_adjust_cfa_offset 16 - bl artGet64InstanceFromCode @ (field_idx, Object*, referrer, Thread*, SP) - add sp, #16 @ strip the extra frame - .cfi_adjust_cfa_offset -16 + bl artGet64InstanceFromCode @ (field_idx, Object*, referrer, Thread*) ldr r2, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME cbnz r2, 1f @ success if no exception pending bx lr @ return on success 1: @@ -734,22 +741,17 @@ TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_I */ .extern artSet64StaticFromCode ENTRY art_quick_set64_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r3, r12 @ save callee saves in case of GC mov r3, r2 @ pass one half of wide argument mov r2, r1 @ pass other half of wide argument - ldr r1, [sp, #32] @ pass referrer - mov r12, sp @ save SP - sub sp, #8 @ grow frame for alignment with stack args - .pad #8 - .cfi_adjust_cfa_offset 8 - push {r9, r12} @ pass Thread::Current and SP - .save {r9, r12} - .cfi_adjust_cfa_offset 8 - .cfi_rel_offset r9, 0 - bl artSet64StaticFromCode @ (field_idx, referrer, new_val, Thread*, SP) + ldr r1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer + str r9, [sp, #-16]! @ expand the frame and pass Thread::Current + .pad #16 + .cfi_adjust_cfa_offset 16 + bl artSet64StaticFromCode @ (field_idx, referrer, new_val, Thread*) add sp, #16 @ release out args .cfi_adjust_cfa_offset -16 - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here RETURN_IF_RESULT_IS_ZERO DELIVER_PENDING_EXCEPTION END art_quick_set64_static @@ -766,19 +768,18 @@ THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RE */ .extern artSet64InstanceFromCode ENTRY art_quick_set64_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC - mov r12, sp @ save SP - sub sp, #8 @ grow frame for alignment with stack args - .pad #8 - .cfi_adjust_cfa_offset 8 - push {r9, r12} @ pass Thread::Current and SP - .save {r9, r12} - .cfi_adjust_cfa_offset 8 - .cfi_rel_offset r9, 0 - bl artSet64InstanceFromCode @ (field_idx, Object*, new_val, Thread*, SP) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r12, lr @ save callee saves in case of GC + ldr r12, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] @ pass referrer + str r9, [sp, #-12]! @ expand the frame and pass Thread::Current + .pad #12 + .cfi_adjust_cfa_offset 12 + str r12, [sp, #-4]! @ expand the frame and pass the referrer + .pad #4 + .cfi_adjust_cfa_offset 4 + bl artSet64InstanceFromCode @ (field_idx, Object*, new_val, Method* referrer, Thread*) add sp, #16 @ release out args .cfi_adjust_cfa_offset -16 - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here RETURN_IF_RESULT_IS_ZERO DELIVER_PENDING_EXCEPTION END art_quick_set64_instance @@ -791,12 +792,11 @@ END art_quick_set64_instance */ .extern artResolveStringFromCode ENTRY art_quick_resolve_string - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - @ artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*, SP) + @ artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*) bl artResolveStringFromCode - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_RESULT_IS_NON_ZERO DELIVER_PENDING_EXCEPTION END art_quick_resolve_string @@ -805,11 +805,10 @@ END art_quick_resolve_string .macro TWO_ARG_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ save callee saves in case of GC mov r2, r9 @ pass Thread::Current - mov r3, sp @ pass SP - bl \entrypoint @ (uint32_t type_idx, Method* method, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl \entrypoint @ (uint32_t type_idx, Method* method, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return DELIVER_PENDING_EXCEPTION END \name @@ -819,17 +818,11 @@ END \name .macro THREE_ARG_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r3, r12 @ save callee saves in case of GC mov r3, r9 @ pass Thread::Current - mov r12, sp - str r12, [sp, #-16]! @ expand the frame and pass SP - .pad #16 - .cfi_adjust_cfa_offset 16 - @ (uint32_t type_idx, Method* method, int32_t component_count, Thread*, SP) + @ (uint32_t type_idx, Method* method, int32_t component_count, Thread*) bl \entrypoint - add sp, #16 @ strip the extra frame - .cfi_adjust_cfa_offset -16 - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return DELIVER_PENDING_EXCEPTION END \name @@ -844,25 +837,24 @@ GENERATE_ALL_ALLOC_ENTRYPOINTS .extern artTestSuspendFromCode ENTRY art_quick_test_suspend #ifdef ARM_R4_SUSPEND_FLAG - ldrh r0, [rSELF, #THREAD_FLAGS_OFFSET] + ldrh r0, [rSELF, #THREAD_FLAGS_OFFSET] mov rSUSPEND, #SUSPEND_CHECK_INTERVAL @ reset rSUSPEND to SUSPEND_CHECK_INTERVAL cbnz r0, 1f @ check Thread::Current()->suspend_count_ == 0 bx lr @ return if suspend_count_ == 0 1: #endif mov r0, rSELF - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves for stack crawl - mov r1, sp - bl artTestSuspendFromCode @ (Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r1, r2 @ save callee saves for GC stack crawl + @ TODO: save FPRs to enable access in the debugger? + bl artTestSuspendFromCode @ (Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN END art_quick_test_suspend ENTRY art_quick_implicit_suspend mov r0, rSELF - SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves for stack crawl - mov r1, sp - bl artTestSuspendFromCode @ (Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r1, r2 @ save callee saves for stack crawl + bl artTestSuspendFromCode @ (Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN END art_quick_implicit_suspend /* @@ -872,8 +864,7 @@ END art_quick_implicit_suspend */ .extern artQuickProxyInvokeHandler ENTRY art_quick_proxy_invoke_handler - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - str r0, [sp, #0] @ place proxy method at bottom of frame + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_R0 mov r2, r9 @ pass Thread::Current mov r3, sp @ pass SP blx artQuickProxyInvokeHandler @ (Method* proxy method, receiver, Thread*, SP) @@ -881,10 +872,10 @@ ENTRY art_quick_proxy_invoke_handler add sp, #16 @ skip r1-r3, 4 bytes padding. .cfi_adjust_cfa_offset -16 cbnz r2, 1f @ success if no exception is pending - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME bx lr @ return on success 1: - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_proxy_invoke_handler @@ -894,25 +885,25 @@ END art_quick_proxy_invoke_handler */ ENTRY art_quick_imt_conflict_trampoline ldr r0, [sp, #0] @ load caller Method* - ldr r0, [r0, #METHOD_DEX_CACHE_METHODS_OFFSET] @ load dex_cache_resolved_methods - add r0, #OBJECT_ARRAY_DATA_OFFSET @ get starting address of data + ldr r0, [r0, #MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET] @ load dex_cache_resolved_methods + add r0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET @ get starting address of data ldr r0, [r0, r12, lsl 2] @ load the target method b art_quick_invoke_interface_trampoline END art_quick_imt_conflict_trampoline .extern artQuickResolutionTrampoline ENTRY art_quick_resolution_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME r2, r3 mov r2, r9 @ pass Thread::Current mov r3, sp @ pass SP blx artQuickResolutionTrampoline @ (Method* called, receiver, Thread*, SP) cbz r0, 1f @ is code pointer null? goto exception mov r12, r0 ldr r0, [sp, #0] @ load resolved method in r0 - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME bx r12 @ tail-call into actual code 1: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_resolution_trampoline @@ -920,8 +911,7 @@ END art_quick_resolution_trampoline * Called to do a generic JNI down-call */ ENTRY art_quick_generic_jni_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - str r0, [sp, #0] // Store native ArtMethod* to bottom of stack. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_R0 // Save rSELF mov r11, rSELF @@ -1008,21 +998,21 @@ ENTRY art_quick_generic_jni_trampoline .cfi_def_cfa_register sp mov r9, r11 .Lexception_in_native: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_generic_jni_trampoline .extern artQuickToInterpreterBridge ENTRY art_quick_to_interpreter_bridge - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME r1, r2 mov r1, r9 @ pass Thread::Current mov r2, sp @ pass SP blx artQuickToInterpreterBridge @ (Method* method, Thread*, SP) ldr r2, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_ add sp, #16 @ skip r1-r3, 4 bytes padding. .cfi_adjust_cfa_offset -16 - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME cbnz r2, 1f @ success if no exception is pending bx lr @ return on success 1: @@ -1035,30 +1025,23 @@ END art_quick_to_interpreter_bridge .extern artInstrumentationMethodEntryFromCode .extern artInstrumentationMethodExitFromCode ENTRY art_quick_instrumentation_entry - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - str r0, [sp, #4] @ preserve r0 - mov r12, sp @ remember sp - str lr, [sp, #-16]! @ expand the frame and pass LR - .pad #16 - .cfi_adjust_cfa_offset 16 - .cfi_rel_offset lr, 0 + @ Make stack crawlable and clobber r2 and r3 (post saving) + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME r2, r3 + @ preserve r0 (not normally an arg) knowing there is a spare slot in kRefsAndArgs. + str r0, [sp, #4] mov r2, r9 @ pass Thread::Current - mov r3, r12 @ pass SP - blx artInstrumentationMethodEntryFromCode @ (Method*, Object*, Thread*, SP, LR) - add sp, #16 @ remove out argument and padding from stack - .cfi_adjust_cfa_offset -16 + mov r3, lr @ pass LR + blx artInstrumentationMethodEntryFromCode @ (Method*, Object*, Thread*, LR) mov r12, r0 @ r12 holds reference to code ldr r0, [sp, #4] @ restore r0 - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME blx r12 @ call method with lr set to art_quick_instrumentation_exit -END art_quick_instrumentation_entry +@ Deliberate fall-through into art_quick_instrumentation_exit. .type art_quick_instrumentation_exit, #function .global art_quick_instrumentation_exit art_quick_instrumentation_exit: - .cfi_startproc - .fnstart mov lr, #0 @ link register is to here, so clobber with 0 for later checks - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME r2, r3 @ set up frame knowing r2 and r3 must be dead on exit mov r12, sp @ remember bottom of caller's frame push {r0-r1} @ save return value .save {r0-r1} @@ -1085,7 +1068,7 @@ art_quick_instrumentation_exit: add sp, #32 @ remove callee save frame .cfi_adjust_cfa_offset -32 bx r2 @ return -END art_quick_instrumentation_exit +END art_quick_instrumentation_entry /* * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization @@ -1093,10 +1076,9 @@ END art_quick_instrumentation_exit */ .extern artDeoptimize ENTRY art_quick_deoptimize - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r0, r1 mov r0, r9 @ Set up args. - mov r1, sp - blx artDeoptimize @ artDeoptimize(Thread*, SP) + blx artDeoptimize @ artDeoptimize(Thread*) END art_quick_deoptimize /* @@ -1219,9 +1201,9 @@ ENTRY art_quick_indexof .cfi_rel_offset r10, 4 .cfi_rel_offset r11, 8 .cfi_rel_offset lr, 12 - ldr r3, [r0, #STRING_COUNT_OFFSET] - ldr r12, [r0, #STRING_OFFSET_OFFSET] - ldr r0, [r0, #STRING_VALUE_OFFSET] + ldr r3, [r0, #MIRROR_STRING_COUNT_OFFSET] + ldr r12, [r0, #MIRROR_STRING_OFFSET_OFFSET] + ldr r0, [r0, #MIRROR_STRING_VALUE_OFFSET] /* Clamp start to [0..count] */ cmp r2, #0 @@ -1232,7 +1214,7 @@ ENTRY art_quick_indexof movgt r2, r3 /* Build a pointer to the start of string data */ - add r0, #STRING_DATA_OFFSET + add r0, #MIRROR_CHAR_ARRAY_DATA_OFFSET add r0, r0, r12, lsl #1 /* Save a copy in r12 to later compute result */ @@ -1341,12 +1323,12 @@ ENTRY art_quick_string_compareto .cfi_rel_offset r12, 24 .cfi_rel_offset lr, 28 - ldr r4, [r2, #STRING_OFFSET_OFFSET] - ldr r9, [r1, #STRING_OFFSET_OFFSET] - ldr r7, [r2, #STRING_COUNT_OFFSET] - ldr r10, [r1, #STRING_COUNT_OFFSET] - ldr r2, [r2, #STRING_VALUE_OFFSET] - ldr r1, [r1, #STRING_VALUE_OFFSET] + ldr r4, [r2, #MIRROR_STRING_OFFSET_OFFSET] + ldr r9, [r1, #MIRROR_STRING_OFFSET_OFFSET] + ldr r7, [r2, #MIRROR_STRING_COUNT_OFFSET] + ldr r10, [r1, #MIRROR_STRING_COUNT_OFFSET] + ldr r2, [r2, #MIRROR_STRING_VALUE_OFFSET] + ldr r1, [r1, #MIRROR_STRING_VALUE_OFFSET] /* * At this point, we have: @@ -1368,8 +1350,8 @@ ENTRY art_quick_string_compareto * Note: data pointers point to previous element so we can use pre-index * mode with base writeback. */ - add r2, #STRING_DATA_OFFSET-2 @ offset to contents[-1] - add r1, #STRING_DATA_OFFSET-2 @ offset to contents[-1] + add r2, #MIRROR_CHAR_ARRAY_DATA_OFFSET-2 @ offset to contents[-1] + add r1, #MIRROR_CHAR_ARRAY_DATA_OFFSET-2 @ offset to contents[-1] /* * At this point we have: diff --git a/runtime/arch/arm64/asm_support_arm64.h b/runtime/arch/arm64/asm_support_arm64.h index a92644996..989ecc6c5 100644 --- a/runtime/arch/arm64/asm_support_arm64.h +++ b/runtime/arch/arm64/asm_support_arm64.h @@ -19,30 +19,8 @@ #include "asm_support.h" -// Note: these callee save methods loads require read barriers. -// Offset of field Runtime::callee_save_methods_[kSaveAll] verified in InitCpu -#define RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET 0 -// Offset of field Runtime::callee_save_methods_[kRefsOnly] verified in InitCpu -#define RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET 8 -// Offset of field Runtime::callee_save_methods_[kRefsAndArgs] verified in InitCpu -#define RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET 16 - -// Offset of field Thread::suspend_count_ -#define THREAD_FLAGS_OFFSET 0 -// Offset of field Thread::card_table_ -#define THREAD_CARD_TABLE_OFFSET 120 -// Offset of field Thread::exception_ -#define THREAD_EXCEPTION_OFFSET 128 -// Offset of field Thread::thin_lock_thread_id_ -#define THREAD_ID_OFFSET 12 - #define FRAME_SIZE_SAVE_ALL_CALLEE_SAVE 176 #define FRAME_SIZE_REFS_ONLY_CALLEE_SAVE 96 #define FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE 224 -// Expected size of a heap reference -#define HEAP_REFERENCE_SIZE 4 -// Expected size of a stack reference -#define STACK_REFERENCE_SIZE 4 - #endif // ART_RUNTIME_ARCH_ARM64_ASM_SUPPORT_ARM64_H_ diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S index ab9bf2d34..0fb96d7a9 100644 --- a/runtime/arch/arm64/quick_entrypoints_arm64.S +++ b/runtime/arch/arm64/quick_entrypoints_arm64.S @@ -79,13 +79,16 @@ // Loads appropriate callee-save-method str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs] + // Place sp in Thread::Current()->top_quick_frame. + mov xIP0, sp + str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] .endm /* * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsOnly). */ -.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME +.macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME adrp xIP0, :got:_ZN3art7Runtime9instance_E ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E] @@ -133,11 +136,14 @@ mov xETR, xSELF // Loads appropriate callee-save-method - str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs] + str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsOnly] + // Place sp in Thread::Current()->top_quick_frame. + mov xIP0, sp + str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] .endm // TODO: Probably no need to restore registers preserved by aapcs64. -.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME +.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // Restore xSELF. mov xSELF, xETR @@ -170,7 +176,7 @@ .cfi_adjust_cfa_offset -96 .endm -.macro POP_REF_ONLY_CALLEE_SAVE_FRAME +.macro POP_REFS_ONLY_CALLEE_SAVE_FRAME // Restore xSELF as it might be scratched. mov xSELF, xETR // ETR @@ -181,13 +187,13 @@ .cfi_adjust_cfa_offset -96 .endm -.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME +.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME ret .endm -.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL +.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL sub sp, sp, #224 .cfi_adjust_cfa_offset 224 @@ -251,7 +257,7 @@ * * TODO This is probably too conservative - saving FP & LR. */ -.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME +.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME adrp xIP0, :got:_ZN3art7Runtime9instance_E ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E] @@ -260,15 +266,26 @@ // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] . THIS_LOAD_REQUIRES_READ_BARRIER - ldr xIP0, [xIP0, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ] + ldr xIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ] - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs] + // Place sp in Thread::Current()->top_quick_frame. + mov xIP0, sp + str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] +.endm + +.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0 + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL + str x0, [sp, #0] // Store ArtMethod* to bottom of stack. + // Place sp in Thread::Current()->top_quick_frame. + mov xIP0, sp + str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] .endm // TODO: Probably no need to restore registers preserved by aapcs64. -.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME +.macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Restore xSELF. mov xSELF, xETR @@ -340,10 +357,9 @@ .macro DELIVER_PENDING_EXCEPTION SETUP_SAVE_ALL_CALLEE_SAVE_FRAME mov x0, xSELF - mov x1, sp // Point of no return. - b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP) + b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*) brk 0 // Unreached .endm @@ -376,8 +392,7 @@ ENTRY \c_name SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context mov x0, xSELF // pass Thread::Current - mov x1, sp // pass SP - b \cxx_name // \cxx_name(Thread*, SP) + b \cxx_name // \cxx_name(Thread*) END \c_name .endm @@ -386,8 +401,7 @@ END \c_name ENTRY \c_name SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context. mov x1, xSELF // pass Thread::Current. - mov x2, sp // pass SP. - b \cxx_name // \cxx_name(arg, Thread*, SP). + b \cxx_name // \cxx_name(arg, Thread*). brk 0 END \c_name .endm @@ -397,8 +411,7 @@ END \c_name ENTRY \c_name SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context mov x2, xSELF // pass Thread::Current - mov x3, sp // pass SP - b \cxx_name // \cxx_name(arg1, arg2, Thread*, SP) + b \cxx_name // \cxx_name(arg1, arg2, Thread*) brk 0 END \c_name .endm @@ -458,7 +471,7 @@ ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFr .macro INVOKE_TRAMPOLINE c_name, cxx_name .extern \cxx_name ENTRY \c_name - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC // Helper signature is always // (method_idx, *this_object, *caller_method, *self, sp) @@ -467,7 +480,7 @@ ENTRY \c_name mov x4, sp bl \cxx_name // (method_idx, this, caller, Thread*, SP) mov xIP0, x1 // save Method*->code_ - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME cbz x0, 1f // did we find the target? if not go to exception delivery br xIP0 // tail call to target 1: @@ -551,7 +564,7 @@ SAVE_SIZE_AND_METHOD=SAVE_SIZE+STACK_REFERENCE_SIZE .macro INVOKE_STUB_CALL_AND_RETURN // load method-> METHOD_QUICK_CODE_OFFSET - ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET] + ldr x9, [x0 , #MIRROR_ART_METHOD_QUICK_CODE_OFFSET] // Branch to method. blr x9 @@ -945,7 +958,7 @@ END art_quick_do_long_jump .extern artLockObjectFromCode ENTRY art_quick_lock_object cbz w0, .Lslow_lock - add x4, x0, #LOCK_WORD_OFFSET // exclusive load/store had no immediate anymore + add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore .Lretry_lock: ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop? ldxr w1, [x4] @@ -966,14 +979,13 @@ ENTRY art_quick_lock_object add w2, w1, #65536 // increment count in lock word placing in w2 for storing lsr w1, w2, 30 // if either of the top two bits are set, we overflowed. cbnz w1, .Lslow_lock // if we overflow the count go slow path - str w2, [x0, #LOCK_WORD_OFFSET]// no need for stxr as we hold the lock + str w2, [x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] // no need for stxr as we hold the lock ret .Lslow_lock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block mov x1, xSELF // pass Thread::Current - mov x2, sp // pass SP - bl artLockObjectFromCode // (Object* obj, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl artLockObjectFromCode // (Object* obj, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_W0_IS_ZERO_OR_DELIVER END art_quick_lock_object @@ -986,7 +998,7 @@ END art_quick_lock_object .extern artUnlockObjectFromCode ENTRY art_quick_unlock_object cbz x0, .Lslow_unlock - ldr w1, [x0, #LOCK_WORD_OFFSET] + ldr w1, [x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] lsr w2, w1, 30 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path ldr w2, [xSELF, #THREAD_ID_OFFSET] @@ -997,18 +1009,17 @@ ENTRY art_quick_unlock_object bpl .Lrecursive_thin_unlock // transition to unlocked, w3 holds 0 dmb ish // full (LoadStore|StoreStore) memory barrier - str w3, [x0, #LOCK_WORD_OFFSET] + str w3, [x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] ret .Lrecursive_thin_unlock: sub w1, w1, #65536 - str w1, [x0, #LOCK_WORD_OFFSET] + str w1, [x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET] ret .Lslow_unlock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC mov x1, xSELF // pass Thread::Current - mov x2, sp // pass SP - bl artUnlockObjectFromCode // (Object* obj, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl artUnlockObjectFromCode // (Object* obj, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_W0_IS_ZERO_OR_DELIVER END art_quick_unlock_object @@ -1058,8 +1069,7 @@ ENTRY art_quick_check_cast SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context mov x2, xSELF // pass Thread::Current - mov x3, sp // pass SP - b artThrowClassCastException // (Class*, Class*, Thread*, SP) + b artThrowClassCastException // (Class*, Class*, Thread*) brk 0 // We should not return here... END art_quick_check_cast @@ -1082,7 +1092,7 @@ ENTRY art_quick_aput_obj_with_null_and_bound_check END art_quick_aput_obj_with_null_and_bound_check ENTRY art_quick_aput_obj_with_bound_check - ldr w3, [x0, #ARRAY_LENGTH_OFFSET] + ldr w3, [x0, #MIRROR_ARRAY_LENGTH_OFFSET] cmp w3, w1 bhi art_quick_aput_obj mov x0, x1 @@ -1092,16 +1102,16 @@ END art_quick_aput_obj_with_bound_check ENTRY art_quick_aput_obj cbz x2, .Ldo_aput_null - ldr w3, [x0, #CLASS_OFFSET] // Heap reference = 32b + ldr w3, [x0, #MIRROR_OBJECT_CLASS_OFFSET] // Heap reference = 32b // This also zero-extends to x3 - ldr w4, [x2, #CLASS_OFFSET] // Heap reference = 32b + ldr w4, [x2, #MIRROR_OBJECT_CLASS_OFFSET] // Heap reference = 32b // This also zero-extends to x4 - ldr w3, [x3, #CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b + ldr w3, [x3, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b // This also zero-extends to x3 cmp w3, w4 // value's type == array's component type - trivial assignability bne .Lcheck_assignability .Ldo_aput: - add x3, x0, #OBJECT_ARRAY_DATA_OFFSET + add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET // "Compress" = do nothing str w2, [x3, x1, lsl #2] // Heap reference = 32b ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET] @@ -1109,7 +1119,7 @@ ENTRY art_quick_aput_obj strb w3, [x3, x0] ret .Ldo_aput_null: - add x3, x0, #OBJECT_ARRAY_DATA_OFFSET + add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET // "Compress" = do nothing str w2, [x3, x1, lsl #2] // Heap reference = 32b ret @@ -1146,7 +1156,7 @@ ENTRY art_quick_aput_obj add sp, sp, #48 .cfi_adjust_cfa_offset -48 - add x3, x0, #OBJECT_ARRAY_DATA_OFFSET + add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET // "Compress" = do nothing str w2, [x3, x1, lsl #2] // Heap reference = 32b ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET] @@ -1168,8 +1178,7 @@ ENTRY art_quick_aput_obj SETUP_SAVE_ALL_CALLEE_SAVE_FRAME mov x1, x2 // Pass value. mov x2, xSELF // Pass Thread::Current. - mov x3, sp // Pass SP. - b artThrowArrayStoreException // (Object*, Object*, Thread*, SP). + b artThrowArrayStoreException // (Object*, Object*, Thread*). brk 0 // Unreached. END art_quick_aput_obj @@ -1177,11 +1186,10 @@ END art_quick_aput_obj .macro TWO_ARG_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC mov x2, xSELF // pass Thread::Current - mov x3, sp // pass SP - bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + bl \entrypoint // (uint32_t type_idx, Method* method, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return DELIVER_PENDING_EXCEPTION END \name @@ -1191,11 +1199,10 @@ END \name .macro THREE_ARG_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC mov x3, xSELF // pass Thread::Current - mov x4, sp // pass SP bl \entrypoint - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return DELIVER_PENDING_EXCEPTION END \name @@ -1205,12 +1212,11 @@ END \name .macro ONE_ARG_REF_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer mov x2, xSELF // pass Thread::Current - mov x3, sp // pass SP bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return END \name .endm @@ -1218,12 +1224,11 @@ END \name .macro TWO_ARG_REF_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC ldr w2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer mov x3, xSELF // pass Thread::Current - mov x4, sp // pass SP bl \entrypoint - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return END \name .endm @@ -1231,12 +1236,11 @@ END \name .macro THREE_ARG_REF_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC ldr w3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer mov x4, xSELF // pass Thread::Current - mov x5, sp // pass SP bl \entrypoint - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME \return END \name .endm @@ -1287,14 +1291,13 @@ THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RE // This is separated out as the argument order is different. .extern artSet64StaticFromCode ENTRY art_quick_set64_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC mov x3, x1 // Store value ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer mov x2, x3 // Put value param mov x3, xSELF // pass Thread::Current - mov x4, sp // pass SP bl artSet64StaticFromCode - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_IF_W0_IS_ZERO_OR_DELIVER END art_quick_set64_static @@ -1320,18 +1323,16 @@ ENTRY art_quick_test_suspend ret // return if flags == 0 .Lneed_suspend: mov x0, xSELF - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl - mov x1, sp - bl artTestSuspendFromCode // (Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl + bl artTestSuspendFromCode // (Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN END art_quick_test_suspend ENTRY art_quick_implicit_suspend mov x0, xSELF - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl - mov x1, sp - bl artTestSuspendFromCode // (Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl + bl artTestSuspendFromCode // (Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN END art_quick_implicit_suspend /* @@ -1341,19 +1342,18 @@ END art_quick_implicit_suspend */ .extern artQuickProxyInvokeHandler ENTRY art_quick_proxy_invoke_handler - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - str x0, [sp, #0] // place proxy method at bottom of frame + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0 mov x2, xSELF // pass Thread::Current mov x3, sp // pass SP bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP) // Use xETR as xSELF might be scratched by native function above. ldr x2, [xETR, THREAD_EXCEPTION_OFFSET] cbnz x2, .Lexception_in_proxy // success if no exception is pending - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame fmov d0, x0 // Store result in d0 in case it was float or double ret // return on success .Lexception_in_proxy: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_proxy_invoke_handler @@ -1363,24 +1363,24 @@ END art_quick_proxy_invoke_handler */ ENTRY art_quick_imt_conflict_trampoline ldr w0, [sp, #0] // load caller Method* - ldr w0, [x0, #METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods - add x0, x0, #OBJECT_ARRAY_DATA_OFFSET // get starting address of data + ldr w0, [x0, #MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods + add x0, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET // get starting address of data ldr w0, [x0, xIP1, lsl 2] // load the target method b art_quick_invoke_interface_trampoline END art_quick_imt_conflict_trampoline ENTRY art_quick_resolution_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME mov x2, xSELF mov x3, sp bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP) cbz x0, 1f mov xIP0, x0 // Remember returned code pointer in xIP0. ldr w0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP. - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME br xIP0 1: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_resolution_trampoline @@ -1439,8 +1439,7 @@ END art_quick_resolution_trampoline * Called to do a generic JNI down-call */ ENTRY art_quick_generic_jni_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL - str x0, [sp, #0] // Store native ArtMethod* to bottom of stack. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0 // Save SP , so we can have static CFI info. mov x28, sp @@ -1513,7 +1512,7 @@ ENTRY art_quick_generic_jni_trampoline cbnz x1, .Lexception_in_native // Tear down the callee-save frame. - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // store into fpr, for when it's a fpr return... fmov d0, x0 @@ -1523,7 +1522,7 @@ ENTRY art_quick_generic_jni_trampoline mov sp, x28 .cfi_def_cfa_register sp .Lexception_in_native: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_generic_jni_trampoline @@ -1535,7 +1534,7 @@ END art_quick_generic_jni_trampoline * x1..x7, d0..d7 = arguments to that method. */ ENTRY art_quick_to_interpreter_bridge - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. // x0 will contain mirror::ArtMethod* method. mov x1, xSELF // How to get Thread::Current() ??? @@ -1545,7 +1544,7 @@ ENTRY art_quick_to_interpreter_bridge // mirror::ArtMethod** sp) bl artQuickToInterpreterBridge - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. fmov d0, x0 @@ -1558,19 +1557,18 @@ END art_quick_to_interpreter_bridge // .extern artInstrumentationMethodEntryFromCode ENTRY art_quick_instrumentation_entry - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME mov x20, x0 // Preserve method reference in a callee-save. mov x2, xSELF - mov x3, sp - mov x4, xLR - bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, SP, LR) + mov x3, xLR + bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, LR) mov xIP0, x0 // x0 = result of call. mov x0, x20 // Reload method reference. - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF adr xLR, art_quick_instrumentation_exit br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit. END art_quick_instrumentation_entry @@ -1579,7 +1577,7 @@ END art_quick_instrumentation_entry ENTRY art_quick_instrumentation_exit mov xLR, #0 // Clobber LR for later checks. - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then // we would need to fully restore it. As there are a lot of callee-save registers, it seems @@ -1602,7 +1600,7 @@ ENTRY art_quick_instrumentation_exit ldr x0, [sp], 16 // Restore integer result, and drop stack area. .cfi_adjust_cfa_offset 16 - POP_REF_ONLY_CALLEE_SAVE_FRAME + POP_REFS_ONLY_CALLEE_SAVE_FRAME br xIP0 // Tail-call out. END art_quick_instrumentation_exit @@ -1615,8 +1613,7 @@ END art_quick_instrumentation_exit ENTRY art_quick_deoptimize SETUP_SAVE_ALL_CALLEE_SAVE_FRAME mov x0, xSELF // Pass thread. - mov x1, sp // Pass SP. - bl artDeoptimize // artDeoptimize(Thread*, SP) + bl artDeoptimize // artDeoptimize(Thread*) brk 0 END art_quick_deoptimize @@ -1631,9 +1628,9 @@ END art_quick_deoptimize * w2: Starting offset in string data */ ENTRY art_quick_indexof - ldr w3, [x0, #STRING_COUNT_OFFSET] - ldr w4, [x0, #STRING_OFFSET_OFFSET] - ldr w0, [x0, #STRING_VALUE_OFFSET] // x0 ? + ldr w3, [x0, #MIRROR_STRING_COUNT_OFFSET] + ldr w4, [x0, #MIRROR_STRING_OFFSET_OFFSET] + ldr w0, [x0, #MIRROR_STRING_VALUE_OFFSET] // x0 ? /* Clamp start to [0..count] */ cmp w2, #0 @@ -1642,7 +1639,7 @@ ENTRY art_quick_indexof csel w2, w3, w2, gt /* Build a pointer to the start of the string data */ - add x0, x0, #STRING_DATA_OFFSET + add x0, x0, #MIRROR_CHAR_ARRAY_DATA_OFFSET add x0, x0, x4, lsl #1 /* Save a copy to compute result */ @@ -1736,12 +1733,12 @@ ENTRY art_quick_string_compareto ret 1: // Different string objects. - ldr w6, [x2, #STRING_OFFSET_OFFSET] - ldr w5, [x1, #STRING_OFFSET_OFFSET] - ldr w4, [x2, #STRING_COUNT_OFFSET] - ldr w3, [x1, #STRING_COUNT_OFFSET] - ldr w2, [x2, #STRING_VALUE_OFFSET] - ldr w1, [x1, #STRING_VALUE_OFFSET] + ldr w6, [x2, #MIRROR_STRING_OFFSET_OFFSET] + ldr w5, [x1, #MIRROR_STRING_OFFSET_OFFSET] + ldr w4, [x2, #MIRROR_STRING_COUNT_OFFSET] + ldr w3, [x1, #MIRROR_STRING_COUNT_OFFSET] + ldr w2, [x2, #MIRROR_STRING_VALUE_OFFSET] + ldr w1, [x1, #MIRROR_STRING_VALUE_OFFSET] /* * Now: CharArray* Offset Count @@ -1761,8 +1758,8 @@ ENTRY art_quick_string_compareto add x1, x1, w5, sxtw #1 // Add offset in CharArray to array. - add x2, x2, #STRING_DATA_OFFSET - add x1, x1, #STRING_DATA_OFFSET + add x2, x2, #MIRROR_CHAR_ARRAY_DATA_OFFSET + add x1, x1, #MIRROR_CHAR_ARRAY_DATA_OFFSET // TODO: Tune this value. // Check for long string, do memcmp16 for them. diff --git a/runtime/arch/mips/asm_support_mips.S b/runtime/arch/mips/asm_support_mips.S index d8ec9cd6c..0d18f1a9b 100644 --- a/runtime/arch/mips/asm_support_mips.S +++ b/runtime/arch/mips/asm_support_mips.S @@ -26,15 +26,31 @@ // Register holding Thread::Current(). #define rSELF $s1 - - /* Cache alignment for function entry */ + // Declare a function called name, sets up $gp. .macro ENTRY name .type \name, %function .global \name + // Cache alignment for function entry. .balign 16 \name: .cfi_startproc - /* Ensure we get a sane starting CFA. */ + // Ensure we get a sane starting CFA. + .cfi_def_cfa $sp,0 + // Load $gp. We expect that ".set noreorder" is in effect. + .cpload $t9 + // Declare a local convenience label to be branched to when $gp is already set up. +.L\name\()_gp_set: +.endm + + // Declare a function called name, doesn't set up $gp. +.macro ENTRY_NO_GP name + .type \name, %function + .global \name + // Cache alignment for function entry. + .balign 16 +\name: + .cfi_startproc + // Ensure we get a sane starting CFA. .cfi_def_cfa $sp,0 .endm @@ -43,11 +59,6 @@ .size \name, .-\name .endm - /* Generates $gp for function calls */ -.macro GENERATE_GLOBAL_POINTER - .cpload $t9 -.endm - .macro UNIMPLEMENTED name ENTRY \name break diff --git a/runtime/arch/mips/asm_support_mips.h b/runtime/arch/mips/asm_support_mips.h index 6add93b40..5bece18ea 100644 --- a/runtime/arch/mips/asm_support_mips.h +++ b/runtime/arch/mips/asm_support_mips.h @@ -19,18 +19,8 @@ #include "asm_support.h" -// Offset of field Thread::tls32_.state_and_flags verified in InitCpu -#define THREAD_FLAGS_OFFSET 0 -// Offset of field Thread::tlsPtr_.card_table verified in InitCpu -#define THREAD_CARD_TABLE_OFFSET 120 -// Offset of field Thread::tlsPtr_.exception verified in InitCpu -#define THREAD_EXCEPTION_OFFSET 124 - #define FRAME_SIZE_SAVE_ALL_CALLEE_SAVE 64 #define FRAME_SIZE_REFS_ONLY_CALLEE_SAVE 64 #define FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE 64 -// Expected size of a heap reference -#define HEAP_REFERENCE_SIZE 4 - #endif // ART_RUNTIME_ARCH_MIPS_ASM_SUPPORT_MIPS_H_ diff --git a/runtime/arch/mips/jni_entrypoints_mips.S b/runtime/arch/mips/jni_entrypoints_mips.S index e5f4a7923..9a79467ed 100644 --- a/runtime/arch/mips/jni_entrypoints_mips.S +++ b/runtime/arch/mips/jni_entrypoints_mips.S @@ -24,7 +24,6 @@ */ .extern artFindNativeMethod ENTRY art_jni_dlsym_lookup_stub - GENERATE_GLOBAL_POINTER addiu $sp, $sp, -32 # leave room for $a0, $a1, $a2, $a3, and $ra .cfi_adjust_cfa_offset 32 sw $ra, 16($sp) diff --git a/runtime/arch/mips/memcmp16_mips.S b/runtime/arch/mips/memcmp16_mips.S index 0196edc2c..aef81afec 100644 --- a/runtime/arch/mips/memcmp16_mips.S +++ b/runtime/arch/mips/memcmp16_mips.S @@ -20,7 +20,7 @@ #include "asm_support_mips.S" // u4 __memcmp16(const u2*, const u2*, size_t); -ENTRY __memcmp16 +ENTRY_NO_GP __memcmp16 li $t0,0 li $t1,0 beqz $a2,done /* 0 length string */ diff --git a/runtime/arch/mips/portable_entrypoints_mips.S b/runtime/arch/mips/portable_entrypoints_mips.S index a171a1d6c..d7e7a8e96 100644 --- a/runtime/arch/mips/portable_entrypoints_mips.S +++ b/runtime/arch/mips/portable_entrypoints_mips.S @@ -21,7 +21,6 @@ .extern artPortableProxyInvokeHandler ENTRY art_portable_proxy_invoke_handler - GENERATE_GLOBAL_POINTER # Fake callee save ref and args frame set up, note portable doesn't use callee save frames. # TODO: just save the registers that are needed in artPortableProxyInvokeHandler. addiu $sp, $sp, -64 @@ -72,7 +71,6 @@ END art_portable_proxy_invoke_handler * [sp + 20] = result type char */ ENTRY art_portable_invoke_stub - GENERATE_GLOBAL_POINTER sw $a0, 0($sp) # save out a0 addiu $sp, $sp, -16 # spill s0, s1, fp, ra .cfi_adjust_cfa_offset 16 @@ -87,7 +85,7 @@ ENTRY art_portable_invoke_stub move $fp, $sp # save sp in fp .cfi_def_cfa_register 30 move $s1, $a3 # move managed thread pointer into s1 - addiu $s0, $zero, SUSPEND_CHECK_INTERVAL # reset s0 to suspend check interval + addiu $s0, $zero, SUSPEND_CHECK_INTERVAL # reset s0 to suspend check interval. TODO: unused? addiu $t0, $a2, 16 # create space for method pointer in frame srl $t0, $t0, 3 # shift the frame size right 3 sll $t0, $t0, 3 # shift the frame size left 3 to align to 16 bytes @@ -100,7 +98,7 @@ ENTRY art_portable_invoke_stub lw $a1, 4($sp) # copy arg value for a1 lw $a2, 8($sp) # copy arg value for a2 lw $a3, 12($sp) # copy arg value for a3 - lw $t9, METHOD_PORTABLE_CODE_OFFSET($a0) # get pointer to the code + lw $t9, MIRROR_ART_METHOD_PORTABLE_CODE_OFFSET($a0) # get pointer to the code jalr $t9 # call the method sw $zero, 0($sp) # store NULL for method* at bottom of frame move $sp, $fp # restore the stack diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S index 609c65a38..905b8676b 100644 --- a/runtime/arch/mips/quick_entrypoints_mips.S +++ b/runtime/arch/mips/quick_entrypoints_mips.S @@ -29,7 +29,8 @@ /* * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kSaveAll) - * callee-save: $s0-$s8 + $gp + $ra, 11 total + 1 word padding + 4 open words for args + * Callee-save: $s0-$s8 + $gp + $ra, 11 total + 1 word padding + 4 open words for args + * Clobbers $t0 and $gp */ .macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME addiu $sp, $sp, -64 @@ -63,6 +64,12 @@ sw $s0, 20($sp) .cfi_rel_offset 16, 20 # 1 word for alignment, 4 open words for args $a0-$a3, bottom will hold Method* + + ld $t0, _ZN3art7Runtime9instance_E + THIS_LOAD_REQUIRES_READ_BARRIER + ld $t0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET($t0) + sw $t0, 0($sp) # Place Method* at bottom of stack. + sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. .endm /* @@ -71,7 +78,7 @@ * Does not include rSUSPEND or rSELF * callee-save: $s2-$s8 + $gp + $ra, 9 total + 3 words padding + 4 open words for args */ -.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME +.macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME addiu $sp, $sp, -64 .cfi_adjust_cfa_offset 64 @@ -99,9 +106,15 @@ sw $s2, 28($sp) .cfi_rel_offset 18, 28 # 3 words for alignment and extra args, 4 open words for args $a0-$a3, bottom will hold Method* + + ld $t0, _ZN3art7Runtime9instance_E + THIS_LOAD_REQUIRES_READ_BARRIER + ld $t0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET($t0) + sw $t0, 0($sp) # Place Method* at bottom of stack. + sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. .endm -.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME +.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME lw $ra, 60($sp) .cfi_restore 31 lw $s8, 56($sp) @@ -124,7 +137,7 @@ .cfi_adjust_cfa_offset -64 .endm -.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN +.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN lw $ra, 60($sp) .cfi_restore 31 lw $s8, 56($sp) @@ -153,7 +166,7 @@ * Runtime::CreateCalleeSaveMethod(kRefsAndArgs). Restoration assumes non-moving GC. * callee-save: $a1-$a3, $s2-$s8 + $gp + $ra, 12 total + 3 words padding + method* */ -.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME +.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME addiu $sp, $sp, -64 .cfi_adjust_cfa_offset 64 @@ -187,9 +200,15 @@ sw $a1, 4($sp) .cfi_rel_offset 5, 4 # bottom will hold Method* + + ld $t0, _ZN3art7Runtime9instance_E + THIS_LOAD_REQUIRES_READ_BARRIER + ld $t0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET($t0) + sw $t0, 0($sp) # Place Method* at bottom of stack. + sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. .endm -.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME +.macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME lw $ra, 60($sp) .cfi_restore 31 lw $s8, 56($sp) @@ -224,15 +243,14 @@ */ .macro DELIVER_PENDING_EXCEPTION SETUP_SAVE_ALL_CALLEE_SAVE_FRAME # save callee saves for throw - move $a0, rSELF # pass Thread::Current la $t9, artDeliverPendingExceptionFromCode - jr $t9 # artDeliverPendingExceptionFromCode(Thread*, $sp) - move $a1, $sp # pass $sp + jr $t9 # artDeliverPendingExceptionFromCode(Thread*) + move $a0, rSELF # pass Thread::Current .endm .macro RETURN_IF_NO_EXCEPTION lw $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME bnez $t0, 1f # success if no exception is pending nop jr $ra @@ -242,7 +260,7 @@ .endm .macro RETURN_IF_ZERO - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME bnez $v0, 1f # success? nop jr $ra # return on success @@ -252,7 +270,7 @@ .endm .macro RETURN_IF_RESULT_IS_NON_ZERO - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME beqz $v0, 1f # success? nop jr $ra # return on success @@ -342,12 +360,10 @@ END art_quick_do_long_jump * the bottom of the thread. On entry r0 holds Throwable* */ ENTRY art_quick_deliver_exception - GENERATE_GLOBAL_POINTER SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a1, rSELF # pass Thread::Current la $t9, artDeliverExceptionFromCode - jr $t9 # artDeliverExceptionFromCode(Throwable*, Thread*, $sp) - move $a2, $sp # pass $sp + jr $t9 # artDeliverExceptionFromCode(Throwable*, Thread*) + move $a1, rSELF # pass Thread::Current END art_quick_deliver_exception /* @@ -355,13 +371,10 @@ END art_quick_deliver_exception */ .extern artThrowNullPointerExceptionFromCode ENTRY art_quick_throw_null_pointer_exception - GENERATE_GLOBAL_POINTER -.Lart_quick_throw_null_pointer_exception_gp_set: SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a0, rSELF # pass Thread::Current la $t9, artThrowNullPointerExceptionFromCode - jr $t9 # artThrowNullPointerExceptionFromCode(Thread*, $sp) - move $a1, $sp # pass $sp + jr $t9 # artThrowNullPointerExceptionFromCode(Thread*) + move $a0, rSELF # pass Thread::Current END art_quick_throw_null_pointer_exception /* @@ -369,12 +382,10 @@ END art_quick_throw_null_pointer_exception */ .extern artThrowDivZeroFromCode ENTRY art_quick_throw_div_zero - GENERATE_GLOBAL_POINTER SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a0, rSELF # pass Thread::Current la $t9, artThrowDivZeroFromCode - jr $t9 # artThrowDivZeroFromCode(Thread*, $sp) - move $a1, $sp # pass $sp + jr $t9 # artThrowDivZeroFromCode(Thread*) + move $a0, rSELF # pass Thread::Current END art_quick_throw_div_zero /* @@ -382,13 +393,10 @@ END art_quick_throw_div_zero */ .extern artThrowArrayBoundsFromCode ENTRY art_quick_throw_array_bounds - GENERATE_GLOBAL_POINTER -.Lart_quick_throw_array_bounds_gp_set: SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a2, rSELF # pass Thread::Current la $t9, artThrowArrayBoundsFromCode - jr $t9 # artThrowArrayBoundsFromCode(index, limit, Thread*, $sp) - move $a3, $sp # pass $sp + jr $t9 # artThrowArrayBoundsFromCode(index, limit, Thread*) + move $a2, rSELF # pass Thread::Current END art_quick_throw_array_bounds /* @@ -396,12 +404,10 @@ END art_quick_throw_array_bounds */ .extern artThrowStackOverflowFromCode ENTRY art_quick_throw_stack_overflow - GENERATE_GLOBAL_POINTER SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a0, rSELF # pass Thread::Current la $t9, artThrowStackOverflowFromCode - jr $t9 # artThrowStackOverflowFromCode(Thread*, $sp) - move $a1, $sp # pass $sp + jr $t9 # artThrowStackOverflowFromCode(Thread*) + move $a0, rSELF # pass Thread::Current END art_quick_throw_stack_overflow /* @@ -409,12 +415,10 @@ END art_quick_throw_stack_overflow */ .extern artThrowNoSuchMethodFromCode ENTRY art_quick_throw_no_such_method - GENERATE_GLOBAL_POINTER SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a1, rSELF # pass Thread::Current la $t9, artThrowNoSuchMethodFromCode - jr $t9 # artThrowNoSuchMethodFromCode(method_idx, Thread*, $sp) - move $a2, $sp # pass $sp + jr $t9 # artThrowNoSuchMethodFromCode(method_idx, Thread*) + move $a1, rSELF # pass Thread::Current END art_quick_throw_no_such_method /* @@ -436,9 +440,8 @@ END art_quick_throw_no_such_method .macro INVOKE_TRAMPOLINE c_name, cxx_name .extern \cxx_name ENTRY \c_name - GENERATE_GLOBAL_POINTER - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME # save callee saves in case allocation triggers GC - lw $a2, 64($sp) # pass caller Method* + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME # save callee saves in case allocation triggers GC + lw $a2, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE($sp) # pass caller Method* move $t0, $sp # save $sp addiu $sp, $sp, -32 # make space for extra args .cfi_adjust_cfa_offset 32 @@ -450,7 +453,7 @@ ENTRY \c_name .cfi_adjust_cfa_offset -32 move $a0, $v0 # save target Method* move $t9, $v1 # save $v0->code_ - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME beqz $v0, 1f nop jr $t9 @@ -479,7 +482,6 @@ INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvo * [sp + 20] = shorty */ ENTRY art_quick_invoke_stub - GENERATE_GLOBAL_POINTER sw $a0, 0($sp) # save out a0 addiu $sp, $sp, -16 # spill s0, s1, fp, ra .cfi_adjust_cfa_offset 16 @@ -507,7 +509,7 @@ ENTRY art_quick_invoke_stub lw $a1, 4($sp) # copy arg value for a1 lw $a2, 8($sp) # copy arg value for a2 lw $a3, 12($sp) # copy arg value for a3 - lw $t9, METHOD_QUICK_CODE_OFFSET($a0) # get pointer to the code + lw $t9, MIRROR_ART_METHOD_QUICK_CODE_OFFSET($a0) # get pointer to the code jalr $t9 # call the method sw $zero, 0($sp) # store NULL for method* at bottom of frame move $sp, $fp # restore the stack @@ -543,12 +545,10 @@ END art_quick_invoke_stub */ .extern artHandleFillArrayDataFromCode ENTRY art_quick_handle_fill_data - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case exception allocation triggers GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case exception allocation triggers GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artHandleFillArrayDataFromCode # (payload offset, Array*, method, Thread*) move $a3, rSELF # pass Thread::Current - jal artHandleFillArrayDataFromCode # (payload offset, Array*, method, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_ZERO END art_quick_handle_fill_data @@ -557,13 +557,11 @@ END art_quick_handle_fill_data */ .extern artLockObjectFromCode ENTRY art_quick_lock_object - GENERATE_GLOBAL_POINTER beqz $a0, .Lart_quick_throw_null_pointer_exception_gp_set nop - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case we block + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case we block + jal artLockObjectFromCode # (Object* obj, Thread*) move $a1, rSELF # pass Thread::Current - jal artLockObjectFromCode # (Object* obj, Thread*, $sp) - move $a2, $sp # pass $sp RETURN_IF_ZERO END art_quick_lock_object @@ -572,13 +570,11 @@ END art_quick_lock_object */ .extern artUnlockObjectFromCode ENTRY art_quick_unlock_object - GENERATE_GLOBAL_POINTER beqz $a0, .Lart_quick_throw_null_pointer_exception_gp_set nop - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case exception allocation triggers GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case exception allocation triggers GC + jal artUnlockObjectFromCode # (Object* obj, Thread*) move $a1, rSELF # pass Thread::Current - jal artUnlockObjectFromCode # (Object* obj, Thread*, $sp) - move $a2, $sp # pass $sp RETURN_IF_ZERO END art_quick_unlock_object @@ -587,7 +583,6 @@ END art_quick_unlock_object */ .extern artThrowClassCastException ENTRY art_quick_check_cast - GENERATE_GLOBAL_POINTER addiu $sp, $sp, -16 .cfi_adjust_cfa_offset 16 sw $ra, 12($sp) @@ -609,10 +604,9 @@ ENTRY art_quick_check_cast addiu $sp, $sp, 16 .cfi_adjust_cfa_offset -16 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a2, rSELF # pass Thread::Current la $t9, artThrowClassCastException - jr $t9 # artThrowClassCastException (Class*, Class*, Thread*, SP) - move $a3, $sp # pass $sp + jr $t9 # artThrowClassCastException (Class*, Class*, Thread*) + move $a2, rSELF # pass Thread::Current END art_quick_check_cast /* @@ -621,7 +615,6 @@ END art_quick_check_cast * a0 = array, a1 = index, a2 = value */ ENTRY art_quick_aput_obj_with_null_and_bound_check - GENERATE_GLOBAL_POINTER bnez $a0, .Lart_quick_aput_obj_with_bound_check_gp_set nop b .Lart_quick_throw_null_pointer_exception_gp_set @@ -629,9 +622,7 @@ ENTRY art_quick_aput_obj_with_null_and_bound_check END art_quick_aput_obj_with_null_and_bound_check ENTRY art_quick_aput_obj_with_bound_check - GENERATE_GLOBAL_POINTER -.Lart_quick_aput_obj_with_bound_check_gp_set: - lw $t0, ARRAY_LENGTH_OFFSET($a0) + lw $t0, MIRROR_ARRAY_LENGTH_OFFSET($a0) sltu $t1, $a1, $t0 bnez $t1, .Lart_quick_aput_obj_gp_set nop @@ -641,19 +632,17 @@ ENTRY art_quick_aput_obj_with_bound_check END art_quick_aput_obj_with_bound_check ENTRY art_quick_aput_obj - GENERATE_GLOBAL_POINTER -.Lart_quick_aput_obj_gp_set: beqz $a2, .Ldo_aput_null nop - lw $t0, CLASS_OFFSET($a0) - lw $t1, CLASS_OFFSET($a2) - lw $t0, CLASS_COMPONENT_TYPE_OFFSET($t0) + lw $t0, MIRROR_OBJECT_CLASS_OFFSET($a0) + lw $t1, MIRROR_OBJECT_CLASS_OFFSET($a2) + lw $t0, MIRROR_CLASS_COMPONENT_TYPE_OFFSET($t0) bne $t1, $t0, .Lcheck_assignability # value's type == array's component type - trivial assignability nop .Ldo_aput: sll $a1, $a1, 2 add $t0, $a0, $a1 - sw $a2, OBJECT_ARRAY_DATA_OFFSET($t0) + sw $a2, MIRROR_OBJECT_ARRAY_DATA_OFFSET($t0) lw $t0, THREAD_CARD_TABLE_OFFSET(rSELF) srl $t1, $a0, 7 add $t1, $t1, $t0 @@ -663,7 +652,7 @@ ENTRY art_quick_aput_obj .Ldo_aput_null: sll $a1, $a1, 2 add $t0, $a0, $a1 - sw $a2, OBJECT_ARRAY_DATA_OFFSET($t0) + sw $a2, MIRROR_OBJECT_ARRAY_DATA_OFFSET($t0) jr $ra nop .Lcheck_assignability: @@ -690,10 +679,9 @@ ENTRY art_quick_aput_obj nop SETUP_SAVE_ALL_CALLEE_SAVE_FRAME move $a1, $a2 - move $a2, rSELF # pass Thread::Current la $t9, artThrowArrayStoreException - jr $t9 # artThrowArrayStoreException(Class*, Class*, Thread*, SP) - move $a3, $sp # pass $sp + jr $t9 # artThrowArrayStoreException(Class*, Class*, Thread*) + move $a2, rSELF # pass Thread::Current END art_quick_aput_obj /* @@ -703,12 +691,10 @@ END art_quick_aput_obj */ .extern artInitializeStaticStorageFromCode ENTRY art_quick_initialize_static_storage - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - move $a2, rSELF # pass Thread::Current - # artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*, $sp) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + # artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*) jal artInitializeStaticStorageFromCode - move $a3, $sp # pass $sp + move $a2, rSELF # pass Thread::Current RETURN_IF_RESULT_IS_NON_ZERO END art_quick_initialize_static_storage @@ -717,12 +703,10 @@ END art_quick_initialize_static_storage */ .extern artInitializeTypeFromCode ENTRY art_quick_initialize_type - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - move $a2, rSELF # pass Thread::Current - # artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*, $sp) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + # artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*) jal artInitializeTypeFromCode - move $a3, $sp # pass $sp + move $a2, rSELF # pass Thread::Current RETURN_IF_RESULT_IS_NON_ZERO END art_quick_initialize_type @@ -732,12 +716,10 @@ END art_quick_initialize_type */ .extern artInitializeTypeAndVerifyAccessFromCode ENTRY art_quick_initialize_type_and_verify_access - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - move $a2, rSELF # pass Thread::Current - # artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*, $sp) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + # artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*) jal artInitializeTypeAndVerifyAccessFromCode - move $a3, $sp # pass $sp + move $a2, rSELF # pass Thread::Current RETURN_IF_RESULT_IS_NON_ZERO END art_quick_initialize_type_and_verify_access /* @@ -745,12 +727,10 @@ END art_quick_initialize_type_and_verify_access */ .extern artGetBooleanStaticFromCode ENTRY art_quick_get_boolean_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetBooleanStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGetBooleanStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_boolean_static /* @@ -758,12 +738,10 @@ END art_quick_get_boolean_static */ .extern artGetByteStaticFromCode ENTRY art_quick_get_byte_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetByteStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGetByteStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_byte_static @@ -772,12 +750,10 @@ END art_quick_get_byte_static */ .extern artGetCharStaticFromCode ENTRY art_quick_get_char_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetCharStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGetCharStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_char_static /* @@ -785,12 +761,10 @@ END art_quick_get_char_static */ .extern artGetShortStaticFromCode ENTRY art_quick_get_short_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetShortStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGetShortStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_short_static @@ -799,12 +773,10 @@ END art_quick_get_short_static */ .extern artGet32StaticFromCode ENTRY art_quick_get32_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGet32StaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGet32StaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get32_static @@ -813,12 +785,10 @@ END art_quick_get32_static */ .extern artGet64StaticFromCode ENTRY art_quick_get64_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGet64StaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGet64StaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get64_static @@ -827,12 +797,10 @@ END art_quick_get64_static */ .extern artGetObjStaticFromCode ENTRY art_quick_get_obj_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetObjStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*) move $a2, rSELF # pass Thread::Current - jal artGetObjStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp) - move $a3, $sp # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_obj_static @@ -841,12 +809,10 @@ END art_quick_get_obj_static */ .extern artGetBooleanInstanceFromCode ENTRY art_quick_get_boolean_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetBooleanInstanceFromCode # (field_idx, Object*, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artGetBooleanInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_boolean_instance /* @@ -854,12 +820,10 @@ END art_quick_get_boolean_instance */ .extern artGetByteInstanceFromCode ENTRY art_quick_get_byte_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetByteInstanceFromCode # (field_idx, Object*, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artGetByteInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_byte_instance @@ -868,12 +832,10 @@ END art_quick_get_byte_instance */ .extern artGetCharInstanceFromCode ENTRY art_quick_get_char_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetCharInstanceFromCode # (field_idx, Object*, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artGetCharInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_char_instance /* @@ -881,12 +843,10 @@ END art_quick_get_char_instance */ .extern artGetShortInstanceFromCode ENTRY art_quick_get_short_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetShortInstanceFromCode # (field_idx, Object*, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artGetShortInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_short_instance @@ -895,9 +855,8 @@ END art_quick_get_short_instance */ .extern artGet32InstanceFromCode ENTRY art_quick_get32_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* move $a3, rSELF # pass Thread::Current jal artGet32InstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) sw $sp, 16($sp) # pass $sp @@ -909,9 +868,8 @@ END art_quick_get32_instance */ .extern artGet64InstanceFromCode ENTRY art_quick_get64_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* move $a3, rSELF # pass Thread::Current jal artGet64InstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) sw $sp, 16($sp) # pass $sp @@ -923,12 +881,10 @@ END art_quick_get64_instance */ .extern artGetObjInstanceFromCode ENTRY art_quick_get_obj_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artGetObjInstanceFromCode # (field_idx, Object*, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artGetObjInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_NO_EXCEPTION END art_quick_get_obj_instance @@ -937,12 +893,10 @@ END art_quick_get_obj_instance */ .extern artSet8StaticFromCode ENTRY art_quick_set8_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSet8StaticFromCode # (field_idx, new_val, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artSet8StaticFromCode # (field_idx, new_val, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_ZERO END art_quick_set8_static @@ -951,12 +905,10 @@ END art_quick_set8_static */ .extern artSet16StaticFromCode ENTRY art_quick_set16_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* - move $a3, rSELF # pass Thread::Current + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* jal artSet16StaticFromCode # (field_idx, new_val, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp + move $a3, rSELF # pass Thread::Current RETURN_IF_ZERO END art_quick_set16_static @@ -965,12 +917,10 @@ END art_quick_set16_static */ .extern artSet32StaticFromCode ENTRY art_quick_set32_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSet32StaticFromCode # (field_idx, new_val, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artSet32StaticFromCode # (field_idx, new_val, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_ZERO END art_quick_set32_static @@ -979,12 +929,10 @@ END art_quick_set32_static */ .extern artSet64StaticFromCode ENTRY art_quick_set64_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a1, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSet64StaticFromCode # (field_idx, referrer, new_val, Thread*) sw rSELF, 16($sp) # pass Thread::Current - jal artSet64StaticFromCode # (field_idx, referrer, new_val, Thread*, $sp) - sw $sp, 20($sp) # pass $sp RETURN_IF_ZERO END art_quick_set64_static @@ -993,12 +941,10 @@ END art_quick_set64_static */ .extern artSetObjStaticFromCode ENTRY art_quick_set_obj_static - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a2, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSetObjStaticFromCode # (field_idx, new_val, referrer, Thread*) move $a3, rSELF # pass Thread::Current - jal artSetObjStaticFromCode # (field_idx, new_val, referrer, Thread*, $sp) - sw $sp, 16($sp) # pass $sp RETURN_IF_ZERO END art_quick_set_obj_static @@ -1007,12 +953,10 @@ END art_quick_set_obj_static */ .extern artSet8InstanceFromCode ENTRY art_quick_set8_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a3, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSet8InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*) sw rSELF, 16($sp) # pass Thread::Current - jal artSet8InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*, $sp) - sw $sp, 20($sp) # pass $sp RETURN_IF_ZERO END art_quick_set8_instance @@ -1021,12 +965,10 @@ END art_quick_set8_instance */ .extern artSet16InstanceFromCode ENTRY art_quick_set16_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a3, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSet16InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*) sw rSELF, 16($sp) # pass Thread::Current - jal artSet16InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*, $sp) - sw $sp, 20($sp) # pass $sp RETURN_IF_ZERO END art_quick_set16_instance @@ -1035,12 +977,10 @@ END art_quick_set16_instance */ .extern artSet32InstanceFromCode ENTRY art_quick_set32_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a3, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSet32InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*) sw rSELF, 16($sp) # pass Thread::Current - jal artSet32InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*, $sp) - sw $sp, 20($sp) # pass $sp RETURN_IF_ZERO END art_quick_set32_instance @@ -1049,11 +989,11 @@ END art_quick_set32_instance */ .extern artSet64InstanceFromCode ENTRY art_quick_set64_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - sw rSELF, 16($sp) # pass Thread::Current - jal artSet64InstanceFromCode # (field_idx, Object*, new_val, Thread*, $sp) - sw $sp, 20($sp) # pass $sp + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $t0, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # load referrer's Method* + sw rSELF, 20($sp) # pass Thread::Current + jal artSet64InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*) + sw $t0, 16($sp) # pass referrer's Method* RETURN_IF_ZERO END art_quick_set64_instance @@ -1062,12 +1002,10 @@ END art_quick_set64_instance */ .extern artSetObjInstanceFromCode ENTRY art_quick_set_obj_instance - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - lw $a3, 64($sp) # pass referrer's Method* + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + lw $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method* + jal artSetObjInstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*) sw rSELF, 16($sp) # pass Thread::Current - jal artSetObjInstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*, $sp) - sw $sp, 20($sp) # pass $sp RETURN_IF_ZERO END art_quick_set_obj_instance @@ -1079,12 +1017,10 @@ END art_quick_set_obj_instance */ .extern artResolveStringFromCode ENTRY art_quick_resolve_string - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - move $a2, rSELF # pass Thread::Current - # artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*, $sp) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC + # artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*) jal artResolveStringFromCode - move $a3, $sp # pass $sp + move $a2, rSELF # pass Thread::Current RETURN_IF_RESULT_IS_NON_ZERO END art_quick_resolve_string @@ -1093,11 +1029,9 @@ END art_quick_resolve_string .macro TWO_ARG_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - move $a2, rSELF # pass Thread::Current + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC jal \entrypoint - move $a3, $sp # pass $sp + move $a2, rSELF # pass Thread::Current \return END \name .endm @@ -1105,11 +1039,9 @@ END \name .macro THREE_ARG_DOWNCALL name, entrypoint, return .extern \entrypoint ENTRY \name - GENERATE_GLOBAL_POINTER - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC - move $a3, rSELF # pass Thread::Current + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC jal \entrypoint - sw $sp, 16($sp) # pass $sp + move $a3, rSELF # pass Thread::Current \return END \name .endm @@ -1122,18 +1054,16 @@ GENERATE_ALL_ALLOC_ENTRYPOINTS */ .extern artTestSuspendFromCode ENTRY art_quick_test_suspend - GENERATE_GLOBAL_POINTER lh $a0, THREAD_FLAGS_OFFSET(rSELF) bnez $a0, 1f addi rSUSPEND, $zero, SUSPEND_CHECK_INTERVAL # reset rSUSPEND to SUSPEND_CHECK_INTERVAL jr $ra nop 1: + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves for stack crawl + jal artTestSuspendFromCode # (Thread*) move $a0, rSELF - SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves for stack crawl - jal artTestSuspendFromCode # (Thread*, $sp) - move $a1, $sp - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN END art_quick_test_suspend /* @@ -1142,14 +1072,13 @@ END art_quick_test_suspend */ .extern artQuickProxyInvokeHandler ENTRY art_quick_proxy_invoke_handler - GENERATE_GLOBAL_POINTER - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME sw $a0, 0($sp) # place proxy method at bottom of frame move $a2, rSELF # pass Thread::Current jal artQuickProxyInvokeHandler # (Method* proxy method, receiver, Thread*, SP) move $a3, $sp # pass $sp lw $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME bnez $t0, 1f mtc1 $v0, $f0 # place return value to FP return value jr $ra @@ -1162,32 +1091,30 @@ END art_quick_proxy_invoke_handler * Called to resolve an imt conflict. t0 is a hidden argument that holds the target method's * dex method index. */ -ENTRY art_quick_imt_conflict_trampoline - GENERATE_GLOBAL_POINTER +ENTRY_NO_GP art_quick_imt_conflict_trampoline lw $a0, 0($sp) # load caller Method* - lw $a0, METHOD_DEX_CACHE_METHODS_OFFSET($a0) # load dex_cache_resolved_methods + lw $a0, MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET($a0) # load dex_cache_resolved_methods sll $t0, 2 # convert target method offset to bytes add $a0, $t0 # get address of target method - lw $a0, OBJECT_ARRAY_DATA_OFFSET($a0) # load the target method + lw $a0, MIRROR_OBJECT_ARRAY_DATA_OFFSET($a0) # load the target method la $t9, art_quick_invoke_interface_trampoline jr $t9 END art_quick_imt_conflict_trampoline .extern artQuickResolutionTrampoline ENTRY art_quick_resolution_trampoline - GENERATE_GLOBAL_POINTER - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME move $a2, rSELF # pass Thread::Current jal artQuickResolutionTrampoline # (Method* called, receiver, Thread*, SP) move $a3, $sp # pass $sp beqz $v0, 1f lw $a0, 0($sp) # load resolved method to $a0 - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME move $t9, $v0 # code pointer must be in $t9 to generate the global pointer jr $v0 # tail call to method nop 1: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END art_quick_resolution_trampoline @@ -1195,13 +1122,12 @@ UNIMPLEMENTED art_quick_generic_jni_trampoline .extern artQuickToInterpreterBridge ENTRY art_quick_to_interpreter_bridge - GENERATE_GLOBAL_POINTER - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME move $a1, rSELF # pass Thread::Current jal artQuickToInterpreterBridge # (Method* method, Thread*, SP) move $a2, $sp # pass $sp lw $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME bnez $t0, 1f mtc1 $v0, $f0 # place return value to FP return value jr $ra @@ -1216,21 +1142,19 @@ END art_quick_to_interpreter_bridge .extern artInstrumentationMethodEntryFromCode .extern artInstrumentationMethodExitFromCode ENTRY art_quick_instrumentation_entry - GENERATE_GLOBAL_POINTER - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME move $t0, $sp # remember bottom of caller's frame - addiu $sp, $sp, -32 # space for args, pad (3 words), arguments (5 words) + addiu $sp, $sp, -32 # space for saved a0, pad (2 words), arguments (4 words) .cfi_adjust_cfa_offset 32 sw $a0, 28($sp) # save arg0 - sw $ra, 16($sp) # pass $ra - move $a3, $t0 # pass $sp - jal artInstrumentationMethodEntryFromCode # (Method*, Object*, Thread*, SP, LR) + move $a3, $ra # pass $ra + jal artInstrumentationMethodEntryFromCode # (Method*, Object*, Thread*, LR) move $a2, rSELF # pass Thread::Current move $t9, $v0 # $t9 holds reference to code lw $a0, 28($sp) # restore arg0 addiu $sp, $sp, 32 # remove args .cfi_adjust_cfa_offset -32 - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME jalr $t9 # call method nop END art_quick_instrumentation_entry @@ -1239,9 +1163,9 @@ END art_quick_instrumentation_entry art_quick_instrumentation_exit: .cfi_startproc addiu $t9, $ra, 4 # put current address into $t9 to rebuild $gp - GENERATE_GLOBAL_POINTER + .cpload $t9 move $ra, $zero # link register is to here, so clobber with 0 for later checks - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME move $t0, $sp # remember bottom of caller's frame addiu $sp, $sp, -48 # save return values and set up args .cfi_adjust_cfa_offset 48 @@ -1274,14 +1198,11 @@ END art_quick_instrumentation_exit * will long jump to the upcall with a special exception of -1. */ .extern artDeoptimize - .extern artEnterInterpreterFromDeoptimize ENTRY art_quick_deoptimize - GENERATE_GLOBAL_POINTER SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - move $a0, rSELF # pass Thread::current - jal artDeoptimize # artDeoptimize(Thread*, SP) + jal artDeoptimize # artDeoptimize(Thread*) # Returns caller method's frame size. - move $a1, $sp # pass $sp + move $a0, rSELF # pass Thread::current END art_quick_deoptimize /* @@ -1294,7 +1215,7 @@ END art_quick_deoptimize * $a1: high word * $a2: shift count */ -ENTRY art_quick_shl_long +ENTRY_NO_GP art_quick_shl_long /* shl-long vAA, vBB, vCC */ sll $v0, $a0, $a2 # rlo<- alo << (shift&31) not $v1, $a2 # rhi<- 31-shift (shift is 5b) @@ -1318,8 +1239,7 @@ END art_quick_shl_long * $a1: high word * $a2: shift count */ - .global art_quick_shr_long -ENTRY art_quick_shr_long +ENTRY_NO_GP art_quick_shr_long sra $v1, $a1, $a2 # rhi<- ahi >> (shift&31) srl $v0, $a0, $a2 # rlo<- alo >> (shift&31) sra $a3, $a1, 31 # $a3<- sign(ah) @@ -1344,8 +1264,7 @@ END art_quick_shr_long * r2: shift count */ /* ushr-long vAA, vBB, vCC */ - .global art_quick_ushr_long -ENTRY art_quick_ushr_long +ENTRY_NO_GP art_quick_ushr_long srl $v1, $a1, $a2 # rhi<- ahi >> (shift&31) srl $v0, $a0, $a2 # rlo<- alo >> (shift&31) not $a0, $a2 # alo<- 31-shift (shift is 5b) @@ -1358,12 +1277,5 @@ ENTRY art_quick_ushr_long movn $v1, $zero, $a2 # rhi<- 0 (if shift&0x20) END art_quick_ushr_long -ENTRY art_quick_indexof - jr $ra - nop -END art_quick_indexof - -ENTRY art_quick_string_compareto - jr $ra - nop -END art_quick_string_compareto +UNIMPLEMENTED art_quick_indexof +UNIMPLEMENTED art_quick_string_compareto diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc index c9b9f04d6..ea586b854 100644 --- a/runtime/arch/stub_test.cc +++ b/runtime/arch/stub_test.cc @@ -1221,13 +1221,12 @@ TEST_F(StubTest, StringCompareTo) { // Use array so we can index into it and use a matrix for expected results // Setup: The first half is standard. The second half uses a non-zero offset. // TODO: Shared backing arrays. - static constexpr size_t kBaseStringCount = 8; - const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", + const char* c[] = { "", "", "a", "aa", "ab", "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16. "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over. "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to // defeat object-equal optimizations. - + static constexpr size_t kBaseStringCount = arraysize(c); static constexpr size_t kStringCount = 2 * kBaseStringCount; StackHandleScope hs(self); @@ -1337,10 +1336,9 @@ static void GetSetByteStatic(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), static_cast(values[i]), 0U, @@ -1367,10 +1365,9 @@ static void GetSetBooleanInstance(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), reinterpret_cast(obj->Get()), static_cast(values[i]), @@ -1401,10 +1398,9 @@ static void GetSetByteInstance(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), reinterpret_cast(obj->Get()), static_cast(values[i]), @@ -1435,10 +1431,9 @@ static void GetSetCharStatic(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), static_cast(values[i]), 0U, @@ -1464,10 +1459,9 @@ static void GetSetShortStatic(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), static_cast(values[i]), 0U, @@ -1494,10 +1488,9 @@ static void GetSetCharInstance(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), reinterpret_cast(obj->Get()), static_cast(values[i]), @@ -1527,10 +1520,9 @@ static void GetSetShortInstance(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), reinterpret_cast(obj->Get()), static_cast(values[i]), @@ -1561,10 +1553,9 @@ static void GetSet32Static(Handle* obj, Handle mirror::ArtMethod* referrer, StubTest* test) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) - constexpr size_t num_values = 7; - uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF }; + uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF }; - for (size_t i = 0; i < num_values; ++i) { + for (size_t i = 0; i < arraysize(values); ++i) { test->Invoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), static_cast(values[i]), 0U, @@ -1592,10 +1583,9 @@ static void GetSet32Instance(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), reinterpret_cast(obj->Get()), static_cast(values[i]), @@ -1716,10 +1706,9 @@ static void GetSet64Static(Handle* obj, Handle mirror::ArtMethod* referrer, StubTest* test) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { #if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__) - constexpr size_t num_values = 8; - uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF }; + uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF }; - for (size_t i = 0; i < num_values; ++i) { + for (size_t i = 0; i < arraysize(values); ++i) { test->Invoke3UWithReferrer(static_cast((*f)->GetDexFieldIndex()), values[i], StubTest::GetEntrypoint(self, kQuickSet64Static), @@ -1746,10 +1735,9 @@ static void GetSet64Instance(Handle* obj, HandleInvoke3WithReferrer(static_cast((*f)->GetDexFieldIndex()), reinterpret_cast(obj->Get()), static_cast(values[i]), @@ -2109,10 +2097,10 @@ TEST_F(StubTest, StringIndexOf) { // Use array so we can index into it and use a matrix for expected results // Setup: The first half is standard. The second half uses a non-zero offset. // TODO: Shared backing arrays. - static constexpr size_t kStringCount = 7; - const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" }; - static constexpr size_t kCharCount = 5; - const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' }; + const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" }; + static constexpr size_t kStringCount = arraysize(c_str); + const char c_char[] = { 'a', 'b', 'c', 'd', 'e' }; + static constexpr size_t kCharCount = arraysize(c_char); StackHandleScope hs(self); Handle s[kStringCount]; diff --git a/runtime/arch/x86/asm_support_x86.S b/runtime/arch/x86/asm_support_x86.S index 78b97e5cb..fea16da9f 100644 --- a/runtime/arch/x86/asm_support_x86.S +++ b/runtime/arch/x86/asm_support_x86.S @@ -164,10 +164,14 @@ VAR(name, 0): SIZE(\name, 0) END_MACRO -MACRO0(SETUP_GOT_NOSAVE) +MACRO1(SETUP_GOT_NOSAVE, got_reg) #ifndef __APPLE__ - call __x86.get_pc_thunk.bx - addl $_GLOBAL_OFFSET_TABLE_, %ebx + .ifc RAW_VAR(got_reg, 0), ebx + call __x86.get_pc_thunk.bx + addl $_GLOBAL_OFFSET_TABLE_, %ebx + .else + .error "Unknown GOT register \got_reg" + .endif #endif END_MACRO diff --git a/runtime/arch/x86/asm_support_x86.h b/runtime/arch/x86/asm_support_x86.h index c9f5a25ee..5a88f8012 100644 --- a/runtime/arch/x86/asm_support_x86.h +++ b/runtime/arch/x86/asm_support_x86.h @@ -19,20 +19,8 @@ #include "asm_support.h" -// Offset of field Thread::self_ verified in InitCpu -#define THREAD_SELF_OFFSET 156 -// Offset of field Thread::card_table_ verified in InitCpu -#define THREAD_CARD_TABLE_OFFSET 120 -// Offset of field Thread::exception_ verified in InitCpu -#define THREAD_EXCEPTION_OFFSET 124 -// Offset of field Thread::thin_lock_thread_id_ verified in InitCpu -#define THREAD_ID_OFFSET 12 - #define FRAME_SIZE_SAVE_ALL_CALLEE_SAVE 32 #define FRAME_SIZE_REFS_ONLY_CALLEE_SAVE 32 #define FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE 32 -// Expected size of a heap reference -#define HEAP_REFERENCE_SIZE 4 - #endif // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_H_ diff --git a/runtime/arch/x86/portable_entrypoints_x86.S b/runtime/arch/x86/portable_entrypoints_x86.S index 70c0ae2d9..a7c4124a4 100644 --- a/runtime/arch/x86/portable_entrypoints_x86.S +++ b/runtime/arch/x86/portable_entrypoints_x86.S @@ -37,7 +37,7 @@ DEFINE_FUNCTION art_portable_invoke_stub andl LITERAL(0xFFFFFFF0), %ebx // align frame size to 16 bytes subl LITERAL(12), %ebx // remove space for return address, ebx, and ebp subl %ebx, %esp // reserve stack space for argument array - SETUP_GOT_NOSAVE // reset ebx to GOT table + SETUP_GOT_NOSAVE ebx // reset ebx to GOT table lea 4(%esp), %eax // use stack pointer + method ptr as dest for memcpy pushl 20(%ebp) // push size of region to memcpy pushl 16(%ebp) // push arg array as source of memcpy @@ -46,7 +46,7 @@ DEFINE_FUNCTION art_portable_invoke_stub addl LITERAL(12), %esp // pop arguments to memcpy mov 12(%ebp), %eax // move method pointer into eax mov %eax, (%esp) // push method pointer onto stack - call *METHOD_PORTABLE_CODE_OFFSET(%eax) // call the method + call *MIRROR_ART_METHOD_PORTABLE_CODE_OFFSET(%eax) // call the method mov %ebp, %esp // restore stack pointer POP ebx // pop ebx POP ebp // pop ebp diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S index a158e6db1..a1fdcf17e 100644 --- a/runtime/arch/x86/quick_entrypoints_x86.S +++ b/runtime/arch/x86/quick_entrypoints_x86.S @@ -22,12 +22,21 @@ * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kSaveAll) */ -MACRO0(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME) +MACRO2(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME, got_reg, temp_reg) PUSH edi // Save callee saves (ebx is saved/restored by the upcall) PUSH esi PUSH ebp - subl MACRO_LITERAL(16), %esp // Grow stack by 4 words, bottom word will hold Method* - CFI_ADJUST_CFA_OFFSET(16) + subl MACRO_LITERAL(12), %esp // Grow stack by 3 words. + CFI_ADJUST_CFA_OFFSET(12) + SETUP_GOT_NOSAVE RAW_VAR(got_reg, 0) + // Load Runtime::instance_ from GOT. + movl _ZN3art7Runtime9instance_E@GOT(REG_VAR(got_reg, 0)), REG_VAR(temp_reg, 1) + movl (REG_VAR(temp_reg, 1)), REG_VAR(temp_reg, 1) + // Push save all callee-save method. + pushl RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg, 1)) + CFI_ADJUST_CFA_OFFSET(4) + // Store esp as the top quick frame. + movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET // Ugly compile-time check, but we only have the preprocessor. // Last +4: implicit return address pushed on stack when caller made call. #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 3*4 + 16 + 4) @@ -39,12 +48,21 @@ END_MACRO * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsOnly) */ -MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME) +MACRO2(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME, got_reg, temp_reg) PUSH edi // Save callee saves (ebx is saved/restored by the upcall) PUSH esi PUSH ebp - subl MACRO_LITERAL(16), %esp // Grow stack by 4 words, bottom word will hold Method* - CFI_ADJUST_CFA_OFFSET(16) + subl MACRO_LITERAL(12), %esp // Grow stack by 3 words. + CFI_ADJUST_CFA_OFFSET(12) + SETUP_GOT_NOSAVE VAR(got_reg, 0) + // Load Runtime::instance_ from GOT. + movl _ZN3art7Runtime9instance_E@GOT(REG_VAR(got_reg, 0)), REG_VAR(temp_reg, 1) + movl (REG_VAR(temp_reg, 1)), REG_VAR(temp_reg, 1) + // Push save all callee-save method. + pushl RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg, 1)) + CFI_ADJUST_CFA_OFFSET(4) + // Store esp as the top quick frame. + movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET // Ugly compile-time check, but we only have the preprocessor. // Last +4: implicit return address pushed on stack when caller made call. @@ -53,7 +71,7 @@ MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME) #endif END_MACRO -MACRO0(RESTORE_REF_ONLY_CALLEE_SAVE_FRAME) +MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME) addl MACRO_LITERAL(16), %esp // Unwind stack up to saved values CFI_ADJUST_CFA_OFFSET(-16) POP ebp // Restore callee saves (ebx is saved/restored by the upcall) @@ -65,14 +83,22 @@ END_MACRO * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) */ -MACRO0(SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME) +MACRO2(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME, got_reg, temp_reg) PUSH edi // Save callee saves PUSH esi PUSH ebp PUSH ebx // Save args PUSH edx PUSH ecx - PUSH eax // Align stack, eax will be clobbered by Method* + SETUP_GOT_NOSAVE VAR(got_reg, 0) + // Load Runtime::instance_ from GOT. + movl _ZN3art7Runtime9instance_E@GOT(REG_VAR(got_reg, 0)), REG_VAR(temp_reg, 1) + movl (REG_VAR(temp_reg, 1)), REG_VAR(temp_reg, 1) + // Push save all callee-save method. + pushl RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(REG_VAR(temp_reg, 1)) + CFI_ADJUST_CFA_OFFSET(4) + // Store esp as the stop quick frame. + movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET // Ugly compile-time check, but we only have the preprocessor. // Last +4: implicit return address pushed on stack when caller made call. @@ -81,7 +107,23 @@ MACRO0(SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME) #endif END_MACRO -MACRO0(RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME) + /* + * Macro that sets up the callee save frame to conform with + * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) where the method is passed in EAX. + */ +MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX) + PUSH edi // Save callee saves + PUSH esi + PUSH ebp + PUSH ebx // Save args + PUSH edx + PUSH ecx + PUSH eax // Store the ArtMethod reference at the bottom of the stack. + // Store esp as the stop quick frame. + movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET +END_MACRO + +MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME) addl MACRO_LITERAL(4), %esp // Remove padding CFI_ADJUST_CFA_OFFSET(-4) POP ecx // Restore args except eax @@ -97,59 +139,54 @@ END_MACRO * exception is Thread::Current()->exception_. */ MACRO0(DELIVER_PENDING_EXCEPTION) - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save callee saves for throw - mov %esp, %ecx + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save callee saves for throw // Outgoing argument set up - subl MACRO_LITERAL(8), %esp // Alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ecx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() + subl MACRO_LITERAL(12), %esp // Alignment padding + CFI_ADJUST_CFA_OFFSET(12) + pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) - call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) - int3 // unreached + call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*) + int3 // unreached END_MACRO MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context - mov %esp, %ecx + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context // Outgoing argument set up - subl MACRO_LITERAL(8), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ecx // pass SP + subl MACRO_LITERAL(12), %esp // alignment padding + CFI_ADJUST_CFA_OFFSET(12) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) - call VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*) int3 // unreached END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context mov %esp, %ecx // Outgoing argument set up - PUSH eax // alignment padding - PUSH ecx // pass SP + subl MACRO_LITERAL(8), %esp // alignment padding + CFI_ADJUST_CFA_OFFSET(8) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, Thread*) int3 // unreached END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context - mov %esp, %edx + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context // Outgoing argument set up - PUSH edx // pass SP + PUSH eax // alignment padding pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*) int3 // unreached END_FUNCTION RAW_VAR(c_name, 0) END_MACRO @@ -204,15 +241,7 @@ TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromC */ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) DEFINE_FUNCTION RAW_VAR(c_name, 0) - // Set up the callee save frame to conform with Runtime::CreateCalleeSaveMethod(kRefsAndArgs) - // return address - PUSH edi - PUSH esi - PUSH ebp - PUSH ebx // Save args - PUSH edx - PUSH ecx - PUSH eax // <-- callee save Method* to go here + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx movl %esp, %edx // remember SP // Outgoing argument set up subl MACRO_LITERAL(12), %esp // alignment padding @@ -224,7 +253,7 @@ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) movl %edx, %edi // save code pointer in EDI addl MACRO_LITERAL(36), %esp // Pop arguments skip eax CFI_ADJUST_CFA_OFFSET(-36) @@ -275,7 +304,7 @@ DEFINE_FUNCTION art_quick_invoke_stub andl LITERAL(0xFFFFFFF0), %ebx // align frame size to 16 bytes subl LITERAL(12), %ebx // remove space for return address, ebx, and ebp subl %ebx, %esp // reserve stack space for argument array - SETUP_GOT_NOSAVE // clobbers ebx (harmless here) + SETUP_GOT_NOSAVE ebx // clobbers ebx (harmless here) lea 4(%esp), %eax // use stack pointer + method ptr as dest for memcpy pushl 20(%ebp) // push size of region to memcpy pushl 16(%ebp) // push arg array as source of memcpy @@ -287,7 +316,7 @@ DEFINE_FUNCTION art_quick_invoke_stub mov 4(%esp), %ecx // copy arg1 into ecx mov 8(%esp), %edx // copy arg2 into edx mov 12(%esp), %ebx // copy arg3 into ebx - call *METHOD_QUICK_CODE_OFFSET(%eax) // call the method + call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET(%eax) // call the method mov %ebp, %esp // restore stack pointer CFI_DEF_CFA_REGISTER(esp) POP ebx // pop ebx @@ -311,120 +340,127 @@ END_FUNCTION art_quick_invoke_stub MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %edx // remember SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - subl MACRO_LITERAL(8), %esp // push padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH edx // pass SP + subl MACRO_LITERAL(12), %esp // push padding + CFI_ADJUST_CFA_OFFSET(12) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) - call VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %edx // remember SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - PUSH eax // push padding - PUSH edx // pass SP + subl MACRO_LITERAL(8), %esp // push padding + CFI_ADJUST_CFA_OFFSET(8) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, Thread*) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %edx // remember SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - PUSH edx // pass SP + PUSH eax // push padding pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - subl MACRO_LITERAL(12), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH edx // pass arg3 PUSH ecx // pass arg2 PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) - addl MACRO_LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*) + addl MACRO_LITERAL(16), %esp // pop arguments + CFI_ADJUST_CFA_OFFSET(-16) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %edx // remember SP - mov 32(%esp), %ecx // get referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - PUSH edx // pass SP + mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ecx // get referrer + PUSH eax // push padding pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass referrer PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, referrer, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, referrer, Thread*) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION RAW_VAR(c_name, 0) END_MACRO MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION RAW_VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - mov 32(%esp), %edx // get referrer - subl MACRO_LITERAL(12), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC + // Outgoing argument set up + mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %edx // get referrer pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) - // Outgoing argument set up PUSH edx // pass referrer PUSH ecx // pass arg2 PUSH eax // pass arg1 - call VAR(cxx_name, 1) // cxx_name(arg1, arg2, referrer, Thread*, SP) - addl MACRO_LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, referrer, Thread*) + addl MACRO_LITERAL(16), %esp // pop arguments + CFI_ADJUST_CFA_OFFSET(-16) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION RAW_VAR(c_name, 0) END_MACRO +MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) + DEFINE_FUNCTION RAW_VAR(c_name, 0) + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC + // Outgoing argument set up + mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx // get referrer + subl LITERAL(12), %esp // alignment padding + CFI_ADJUST_CFA_OFFSET(12) + pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() + CFI_ADJUST_CFA_OFFSET(4) + PUSH ebx // pass referrer + PUSH edx // pass arg3 + PUSH ecx // pass arg2 + PUSH eax // pass arg1 + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, referrer, Thread*) + addl LITERAL(32), %esp // pop arguments + CFI_ADJUST_CFA_OFFSET(-32) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + CALL_MACRO(return_macro, 2) // return or deliver exception + END_FUNCTION RAW_VAR(c_name, 0) +END_MACRO MACRO0(RETURN_IF_RESULT_IS_NON_ZERO) testl %eax, %eax // eax == 0 ? @@ -443,9 +479,8 @@ MACRO0(RETURN_IF_EAX_ZERO) END_MACRO MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION) - mov %fs:THREAD_EXCEPTION_OFFSET, %ebx // get exception field - testl %ebx, %ebx // ebx == 0 ? - jnz 1f // if ebx != 0 goto 1 + cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ? + jne 1f // if exception field != 0 goto 1 ret // return 1: // deliver exception on current thread DELIVER_PENDING_EXCEPTION @@ -566,7 +601,7 @@ DEFINE_FUNCTION art_quick_lock_object testl %eax, %eax // null check object/eax jz .Lslow_lock .Lretry_lock: - movl LOCK_WORD_OFFSET(%eax), %ecx // ecx := lock word + movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx // ecx := lock word test LITERAL(0xC0000000), %ecx // test the 2 high bits. jne .Lslow_lock // slow path if either of the two high bits are set. movl %fs:THREAD_ID_OFFSET, %edx // edx := thread id @@ -575,11 +610,11 @@ DEFINE_FUNCTION art_quick_lock_object // unlocked case - %edx holds thread id with count of 0 movl %eax, %ecx // remember object in case of retry xor %eax, %eax // eax == 0 for comparison with lock word in cmpxchg - lock cmpxchg %edx, LOCK_WORD_OFFSET(%ecx) + lock cmpxchg %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx) jnz .Lcmpxchg_fail // cmpxchg failed retry ret .Lcmpxchg_fail: - movl %ecx, %eax // restore eax + movl %ecx, %eax // restore eax jmp .Lretry_lock .Lalready_thin: cmpw %cx, %dx // do we hold the lock already? @@ -587,28 +622,28 @@ DEFINE_FUNCTION art_quick_lock_object addl LITERAL(65536), %ecx // increment recursion count test LITERAL(0xC0000000), %ecx // overflowed if either of top two bits are set jne .Lslow_lock // count overflowed so go slow - movl %ecx, LOCK_WORD_OFFSET(%eax) // update lockword, cmpxchg not necessary as we hold lock + // update lockword, cmpxchg not necessary as we hold lock + movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax) ret .Lslow_lock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %edx // remember SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - PUSH eax // push padding - PUSH edx // pass SP + subl LITERAL(8), %esp // alignment padding + CFI_ADJUST_CFA_OFFSET(8) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass object - call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP) + call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO END_FUNCTION art_quick_lock_object DEFINE_FUNCTION art_quick_unlock_object testl %eax, %eax // null check object/eax jz .Lslow_unlock - movl LOCK_WORD_OFFSET(%eax), %ecx // ecx := lock word + movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx // ecx := lock word movl %fs:THREAD_ID_OFFSET, %edx // edx := thread id test LITERAL(0xC0000000), %ecx jnz .Lslow_unlock // lock word contains a monitor @@ -616,25 +651,24 @@ DEFINE_FUNCTION art_quick_unlock_object jne .Lslow_unlock cmpl LITERAL(65536), %ecx jae .Lrecursive_thin_unlock - movl LITERAL(0), LOCK_WORD_OFFSET(%eax) + movl LITERAL(0), MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax) ret .Lrecursive_thin_unlock: subl LITERAL(65536), %ecx - mov %ecx, LOCK_WORD_OFFSET(%eax) + mov %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax) ret .Lslow_unlock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %edx // remember SP + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC // Outgoing argument set up - PUSH eax // push padding - PUSH edx // pass SP + subl LITERAL(8), %esp // alignment padding + CFI_ADJUST_CFA_OFFSET(8) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass object - call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP) + call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO END_FUNCTION art_quick_unlock_object @@ -663,15 +697,14 @@ DEFINE_FUNCTION art_quick_check_cast POP ecx addl LITERAL(4), %esp CFI_ADJUST_CFA_OFFSET(-12) - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context - mov %esp, %edx + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context // Outgoing argument set up - PUSH edx // pass SP + PUSH eax // alignment padding pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP) + call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*) int3 // unreached END_FUNCTION art_quick_check_cast @@ -687,7 +720,7 @@ DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check END_FUNCTION art_quick_aput_obj_with_null_and_bound_check DEFINE_FUNCTION art_quick_aput_obj_with_bound_check - movl ARRAY_LENGTH_OFFSET(%eax), %ebx + movl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %ebx cmpl %ebx, %ecx jb SYMBOL(art_quick_aput_obj) mov %ecx, %eax @@ -698,18 +731,19 @@ END_FUNCTION art_quick_aput_obj_with_bound_check DEFINE_FUNCTION art_quick_aput_obj test %edx, %edx // store of null jz .Ldo_aput_null - movl CLASS_OFFSET(%eax), %ebx - movl CLASS_COMPONENT_TYPE_OFFSET(%ebx), %ebx - cmpl CLASS_OFFSET(%edx), %ebx // value's type == array's component type - trivial assignability + movl MIRROR_OBJECT_CLASS_OFFSET(%eax), %ebx + movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ebx), %ebx + // value's type == array's component type - trivial assignability + cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx jne .Lcheck_assignability .Ldo_aput: - movl %edx, OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4) + movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4) movl %fs:THREAD_CARD_TABLE_OFFSET, %edx shrl LITERAL(7), %eax movb %dl, (%edx, %eax) ret .Ldo_aput_null: - movl %edx, OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4) + movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4) ret .Lcheck_assignability: PUSH eax // save arguments @@ -717,7 +751,7 @@ DEFINE_FUNCTION art_quick_aput_obj PUSH edx subl LITERAL(8), %esp // alignment padding CFI_ADJUST_CFA_OFFSET(8) - pushl CLASS_OFFSET(%edx) // pass arg2 - type of the value to be stored + pushl MIRROR_OBJECT_CLASS_OFFSET(%edx) // pass arg2 - type of the value to be stored CFI_ADJUST_CFA_OFFSET(4) PUSH ebx // pass arg1 - component type of the array call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) @@ -728,7 +762,7 @@ DEFINE_FUNCTION art_quick_aput_obj POP edx POP ecx POP eax - movl %edx, OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4) // do the aput + movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4) // do the aput movl %fs:THREAD_CARD_TABLE_OFFSET, %edx shrl LITERAL(7), %eax movb %dl, (%edx, %eax) @@ -737,20 +771,19 @@ DEFINE_FUNCTION art_quick_aput_obj POP edx POP ecx POP eax - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context - mov %esp, %ecx + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context // Outgoing argument set up - PUSH ecx // pass SP + PUSH eax // alignment padding pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH edx // pass arg2 - value PUSH eax // pass arg1 - array - call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP) + call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*) int3 // unreached END_FUNCTION art_quick_aput_obj DEFINE_FUNCTION art_quick_memcpy - SETUP_GOT_NOSAVE // clobbers EBX + SETUP_GOT_NOSAVE ebx // clobbers EBX PUSH edx // pass arg3 PUSH ecx // pass arg2 PUSH eax // pass arg1 @@ -856,236 +889,76 @@ DEFINE_FUNCTION art_quick_lushr ret END_FUNCTION art_quick_lushr -DEFINE_FUNCTION art_quick_set8_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - subl LITERAL(8), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - mov 32(%ebx), %ebx // get referrer - PUSH ebx // pass referrer - PUSH edx // pass new_val - PUSH ecx // pass object - PUSH eax // pass field_idx - call PLT_SYMBOL(artSet8InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set8_instance +ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -DEFINE_FUNCTION art_quick_set16_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - subl LITERAL(8), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - mov 32(%ebx), %ebx // get referrer - PUSH ebx // pass referrer - PUSH edx // pass new_val - PUSH ecx // pass object - PUSH eax // pass field_idx - call PLT_SYMBOL(artSet16InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set16_instance +TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION +TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -DEFINE_FUNCTION art_quick_set32_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - subl LITERAL(8), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - mov 32(%ebx), %ebx // get referrer - PUSH ebx // pass referrer - PUSH edx // pass new_val - PUSH ecx // pass object - PUSH eax // pass field_idx - call SYMBOL(artSet32InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set32_instance +TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO +TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO +TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO +TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO +THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO +THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO +THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO +THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO + +// Call artSet64InstanceFromCode with 4 word size arguments and the referrer. DEFINE_FUNCTION art_quick_set64_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC + // Outgoing argument set up + mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx // get referrer subl LITERAL(8), %esp // alignment padding CFI_ADJUST_CFA_OFFSET(8) - PUSH esp // pass SP-8 - addl LITERAL(8), (%esp) // fix SP on stack by adding 8 pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) + pushl (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+12)(%esp) // pass referrer + CFI_ADJUST_CFA_OFFSET(4) PUSH ebx // pass high half of new_val PUSH edx // pass low half of new_val PUSH ecx // pass object PUSH eax // pass field_idx - call SYMBOL(artSet64InstanceFromCode) // (field_idx, Object*, new_val, Thread*, SP) + call SYMBOL(artSet64InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO // return or deliver exception END_FUNCTION art_quick_set64_instance -DEFINE_FUNCTION art_quick_set_obj_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - subl LITERAL(8), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - mov 32(%ebx), %ebx // get referrer - PUSH ebx // pass referrer - PUSH edx // pass new_val - PUSH ecx // pass object - PUSH eax // pass field_idx - call SYMBOL(artSetObjInstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set_obj_instance - -TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION - -DEFINE_FUNCTION art_quick_get64_instance - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - mov 32(%esp), %edx // get referrer - subl LITERAL(12), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - PUSH edx // pass referrer - PUSH ecx // pass object - PUSH eax // pass field_idx - call SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception -END_FUNCTION art_quick_get64_instance - -DEFINE_FUNCTION art_quick_set8_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - mov 32(%esp), %edx // get referrer - subl LITERAL(12), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - PUSH edx // pass referrer - PUSH ecx // pass new_val - PUSH eax // pass field_idx - call SYMBOL(artSet8StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set8_static - -DEFINE_FUNCTION art_quick_set16_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - mov 32(%esp), %edx // get referrer - subl LITERAL(12), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - PUSH edx // pass referrer - PUSH ecx // pass new_val - PUSH eax // pass field_idx - call SYMBOL(artSet16StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set16_static - -DEFINE_FUNCTION art_quick_set32_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - mov 32(%esp), %edx // get referrer +// Call artSet64StaticFromCode with 3 word size arguments plus with the referrer in the 2nd position +// so that new_val is aligned on even registers were we passing arguments in registers. +DEFINE_FUNCTION art_quick_set64_static + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx // save ref containing registers for GC + mov FRAME_SIZE_REFS_ONLY_CALLEE_SAVE(%esp), %ebx // get referrer subl LITERAL(12), %esp // alignment padding CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) - PUSH edx // pass referrer - PUSH ecx // pass new_val - PUSH eax // pass field_idx - call SYMBOL(artSet32StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set32_static - -DEFINE_FUNCTION art_quick_set64_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - subl LITERAL(8), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(8) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - mov 32(%ebx), %ebx // get referrer PUSH edx // pass high half of new_val PUSH ecx // pass low half of new_val PUSH ebx // pass referrer PUSH eax // pass field_idx - call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP) + call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO // return or deliver exception END_FUNCTION art_quick_set64_static -DEFINE_FUNCTION art_quick_set_obj_static - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC - mov %esp, %ebx // remember SP - mov 32(%esp), %edx // get referrer - subl LITERAL(12), %esp // alignment padding - CFI_ADJUST_CFA_OFFSET(12) - PUSH ebx // pass SP - pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() - CFI_ADJUST_CFA_OFFSET(4) - PUSH edx // pass referrer - PUSH ecx // pass new_val - PUSH eax // pass field_idx - call SYMBOL(artSetObjStaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) - addl LITERAL(32), %esp // pop arguments - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception -END_FUNCTION art_quick_set_obj_static - -ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION -ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION - DEFINE_FUNCTION art_quick_proxy_invoke_handler - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save frame and Method* + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX PUSH esp // pass SP pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) @@ -1107,15 +980,15 @@ END_FUNCTION art_quick_proxy_invoke_handler DEFINE_FUNCTION art_quick_imt_conflict_trampoline PUSH ecx movl 8(%esp), %eax // load caller Method* - movl METHOD_DEX_CACHE_METHODS_OFFSET(%eax), %eax // load dex_cache_resolved_methods + movl MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET(%eax), %eax // load dex_cache_resolved_methods movd %xmm0, %ecx // get target method index stored in xmm0 - movl OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4), %eax // load the target method + movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4), %eax // load the target method POP ecx jmp SYMBOL(art_quick_invoke_interface_trampoline) END_FUNCTION art_quick_imt_conflict_trampoline DEFINE_FUNCTION art_quick_resolution_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx movl %esp, %edi PUSH EDI // pass SP. do not just PUSH ESP; that messes up unwinding pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() @@ -1136,14 +1009,12 @@ DEFINE_FUNCTION art_quick_resolution_trampoline xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack ret // tail call into method 1: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END_FUNCTION art_quick_resolution_trampoline DEFINE_FUNCTION art_quick_generic_jni_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - // This also stores the native ArtMethod reference at the bottom of the stack. - + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_EAX movl %esp, %ebp // save SP at callee-save frame CFI_DEF_CFA_REGISTER(ebp) subl LITERAL(5120), %esp @@ -1151,7 +1022,6 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline // (Thread*, SP) // (esp) 4(esp) <= C calling convention // fs:... ebp <= where they are - // Also: PLT, so need GOT in ebx. subl LITERAL(8), %esp // Padding for 16B alignment. pushl %ebp // Pass SP (to ArtMethod). @@ -1216,12 +1086,12 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline movl %ebp, %esp CFI_DEF_CFA_REGISTER(esp) .Lexception_in_native: - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME DELIVER_PENDING_EXCEPTION END_FUNCTION art_quick_generic_jni_trampoline DEFINE_FUNCTION art_quick_to_interpreter_bridge - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save frame + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, ebx // save frame mov %esp, %edx // remember SP PUSH eax // alignment padding PUSH edx // pass SP @@ -1234,7 +1104,7 @@ DEFINE_FUNCTION art_quick_to_interpreter_bridge punpckldq %xmm1, %xmm0 addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception END_FUNCTION art_quick_to_interpreter_bridge @@ -1242,26 +1112,23 @@ END_FUNCTION art_quick_to_interpreter_bridge * Routine that intercepts method calls and returns. */ DEFINE_FUNCTION art_quick_instrumentation_entry - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME - movl %esp, %edx // Save SP. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME ebx, edx PUSH eax // Save eax which will be clobbered by the callee-save method. - subl LITERAL(8), %esp // Align stack. - CFI_ADJUST_CFA_OFFSET(8) + subl LITERAL(12), %esp // Align stack. + CFI_ADJUST_CFA_OFFSET(12) pushl 40(%esp) // Pass LR. CFI_ADJUST_CFA_OFFSET(4) - PUSH edx // Pass SP. pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current(). CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // Pass receiver. PUSH eax // Pass Method*. - call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR) - SETUP_GOT_NOSAVE + call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR) addl LITERAL(28), %esp // Pop arguments upto saved Method*. movl 28(%esp), %edi // Restore edi. movl %eax, 28(%esp) // Place code* over edi, just under return pc. movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx + // Place instrumentation exit as return pc. ebx holds the GOT computed on entry. movl %ebx, 32(%esp) - // Place instrumentation exit as return pc. movl (%esp), %eax // Restore eax. movl 8(%esp), %ecx // Restore ecx. movl 12(%esp), %edx // Restore edx. @@ -1274,7 +1141,7 @@ END_FUNCTION art_quick_instrumentation_entry DEFINE_FUNCTION art_quick_instrumentation_exit pushl LITERAL(0) // Push a fake return PC as there will be none on the stack. - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx mov %esp, %ecx // Remember SP subl LITERAL(8), %esp // Save float return value. CFI_ADJUST_CFA_OFFSET(8) @@ -1300,7 +1167,7 @@ DEFINE_FUNCTION art_quick_instrumentation_exit movq (%esp), %xmm0 // Restore fpr return value. addl LITERAL(8), %esp CFI_ADJUST_CFA_OFFSET(-8) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME addl LITERAL(4), %esp // Remove fake return pc. jmp *%ecx // Return. END_FUNCTION art_quick_instrumentation_exit @@ -1311,14 +1178,12 @@ END_FUNCTION art_quick_instrumentation_exit */ DEFINE_FUNCTION art_quick_deoptimize pushl %ebx // Fake that we were called. - SETUP_SAVE_ALL_CALLEE_SAVE_FRAME - mov %esp, %ecx // Remember SP. - subl LITERAL(8), %esp // Align stack. - CFI_ADJUST_CFA_OFFSET(8) - PUSH ecx // Pass SP. + SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx + subl LITERAL(12), %esp // Align stack. + CFI_ADJUST_CFA_OFFSET(12) pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current(). CFI_ADJUST_CFA_OFFSET(4) - call SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP) + call SYMBOL(artDeoptimize) // artDeoptimize(Thread*) int3 // Unreachable. END_FUNCTION art_quick_deoptimize @@ -1332,15 +1197,15 @@ END_FUNCTION art_quick_deoptimize DEFINE_FUNCTION art_quick_string_compareto PUSH esi // push callee save reg PUSH edi // push callee save reg - mov STRING_COUNT_OFFSET(%eax), %edx - mov STRING_COUNT_OFFSET(%ecx), %ebx - mov STRING_VALUE_OFFSET(%eax), %esi - mov STRING_VALUE_OFFSET(%ecx), %edi - mov STRING_OFFSET_OFFSET(%eax), %eax - mov STRING_OFFSET_OFFSET(%ecx), %ecx + mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx + mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx + mov MIRROR_STRING_VALUE_OFFSET(%eax), %esi + mov MIRROR_STRING_VALUE_OFFSET(%ecx), %edi + mov MIRROR_STRING_OFFSET_OFFSET(%eax), %eax + mov MIRROR_STRING_OFFSET_OFFSET(%ecx), %ecx /* Build pointers to the start of string data */ - lea STRING_DATA_OFFSET(%esi, %eax, 2), %esi - lea STRING_DATA_OFFSET(%edi, %ecx, 2), %edi + lea MIRROR_CHAR_ARRAY_DATA_OFFSET(%esi, %eax, 2), %esi + lea MIRROR_CHAR_ARRAY_DATA_OFFSET(%edi, %ecx, 2), %edi /* Calculate min length and count diff */ mov %edx, %ecx mov %edx, %eax @@ -1375,7 +1240,7 @@ END_FUNCTION art_quick_string_compareto // eax: address of jmp_buf in TLS DEFINE_FUNCTION art_nested_signal_return - SETUP_GOT_NOSAVE // sets %ebx for call into PLT + SETUP_GOT_NOSAVE ebx // sets %ebx for call into PLT movl LITERAL(1), %ecx pushl %ecx // second arg to longjmp (1) pushl %eax // first arg to longjmp (jmp_buf) diff --git a/runtime/arch/x86_64/asm_support_x86_64.h b/runtime/arch/x86_64/asm_support_x86_64.h index 40958dcdf..eddd17206 100644 --- a/runtime/arch/x86_64/asm_support_x86_64.h +++ b/runtime/arch/x86_64/asm_support_x86_64.h @@ -19,30 +19,8 @@ #include "asm_support.h" -// Note: these callee save methods loads require read barriers. -// Offset of field Runtime::callee_save_methods_[kSaveAll] -#define RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET 0 -// Offset of field Runtime::callee_save_methods_[kRefsOnly] -#define RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET 8 -// Offset of field Runtime::callee_save_methods_[kRefsAndArgs] -#define RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET 16 - -// Offset of field Thread::self_ verified in InitCpu -#define THREAD_SELF_OFFSET 192 -// Offset of field Thread::card_table_ verified in InitCpu -#define THREAD_CARD_TABLE_OFFSET 120 -// Offset of field Thread::exception_ verified in InitCpu -#define THREAD_EXCEPTION_OFFSET 128 -// Offset of field Thread::thin_lock_thread_id_ verified in InitCpu -#define THREAD_ID_OFFSET 12 - #define FRAME_SIZE_SAVE_ALL_CALLEE_SAVE 64 + 4*8 #define FRAME_SIZE_REFS_ONLY_CALLEE_SAVE 64 + 4*8 #define FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE 176 + 4*8 -// Expected size of a heap reference -#define HEAP_REFERENCE_SIZE 4 -// Expected size of a stack reference -#define STACK_REFERENCE_SIZE 4 - #endif // ART_RUNTIME_ARCH_X86_64_ASM_SUPPORT_X86_64_H_ diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S index 648a99a0e..bed7238b0 100644 --- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S +++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S @@ -57,25 +57,25 @@ MACRO0(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME) PUSH r12 // Callee save. PUSH rbp // Callee save. PUSH rbx // Callee save. - // Create space for FPR args, plus padding for alignment - subq LITERAL(4 * 8), %rsp - CFI_ADJUST_CFA_OFFSET(4 * 8) + // Create space for FPR args, plus space for StackReference. + subq MACRO_LITERAL(4 * 8 + 8), %rsp + CFI_ADJUST_CFA_OFFSET(4 * 8 + 8) // Save FPRs. - movq %xmm12, 0(%rsp) - movq %xmm13, 8(%rsp) - movq %xmm14, 16(%rsp) - movq %xmm15, 24(%rsp) - subq MACRO_LITERAL(8), %rsp // Space for Method* (also aligns the frame). - CFI_ADJUST_CFA_OFFSET(8) + movq %xmm12, 8(%rsp) + movq %xmm13, 16(%rsp) + movq %xmm14, 24(%rsp) + movq %xmm15, 32(%rsp) // R10 := ArtMethod* for save all callee save frame method. THIS_LOAD_REQUIRES_READ_BARRIER movq RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 // Store ArtMethod* to bottom of stack. movq %r10, 0(%rsp) + // Store rsp as the top quick frame. + movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET // Ugly compile-time check, but we only have the preprocessor. // Last +8: implicit return address pushed on stack when caller made call. -#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 6*8 + 4*8 + 8 + 8) +#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 6 * 8 + 4 * 8 + 8 + 8) #error "SAVE_ALL_CALLEE_SAVE_FRAME(X86_64) size not as expected." #endif #endif // __APPLE__ @@ -85,7 +85,7 @@ END_MACRO * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsOnly) */ -MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME) +MACRO0(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME) #if defined(__APPLE__) int3 int3 @@ -100,9 +100,9 @@ MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME) PUSH r12 // Callee save. PUSH rbp // Callee save. PUSH rbx // Callee save. - // Create space for FPR args, plus padding for alignment - subq LITERAL(8 + 4*8), %rsp - CFI_ADJUST_CFA_OFFSET(8 + 4*8) + // Create space for FPR args, plus space for StackReference. + subq LITERAL(8 + 4 * 8), %rsp + CFI_ADJUST_CFA_OFFSET(8 + 4 * 8) // Save FPRs. movq %xmm12, 8(%rsp) movq %xmm13, 16(%rsp) @@ -113,16 +113,18 @@ MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME) movq RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 // Store ArtMethod* to bottom of stack. movq %r10, 0(%rsp) + // Store rsp as the stop quick frame. + movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET // Ugly compile-time check, but we only have the preprocessor. // Last +8: implicit return address pushed on stack when caller made call. -#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 6*8 + 4*8 + 8 + 8) +#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 6 * 8 + 4 * 8 + 8 + 8) #error "REFS_ONLY_CALLEE_SAVE_FRAME(X86_64) size not as expected." #endif #endif // __APPLE__ END_MACRO -MACRO0(RESTORE_REF_ONLY_CALLEE_SAVE_FRAME) +MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME) movq 8(%rsp), %xmm12 movq 16(%rsp), %xmm13 movq 24(%rsp), %xmm14 @@ -142,7 +144,7 @@ END_MACRO * Macro that sets up the callee save frame to conform with * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) */ -MACRO0(SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME) +MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME) #if defined(__APPLE__) int3 int3 @@ -162,12 +164,13 @@ MACRO0(SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME) PUSH rbx // Callee save. PUSH rdx // Quick arg 2. PUSH rcx // Quick arg 3. - // Create space for FPR args and create 2 slots, 1 of padding and 1 for the ArtMethod*. + // Create space for FPR args and create 2 slots, 1 of padding and 1 for the + // StackReference. subq MACRO_LITERAL(80 + 4 * 8), %rsp CFI_ADJUST_CFA_OFFSET(80 + 4 * 8) // R10 := ArtMethod* for ref and args callee save frame method. THIS_LOAD_REQUIRES_READ_BARRIER - movq RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 + movq RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 // Save FPRs. movq %xmm0, 16(%rsp) movq %xmm1, 24(%rsp) @@ -183,16 +186,54 @@ MACRO0(SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME) movq %xmm15, 104(%rsp) // Store ArtMethod* to bottom of stack. movq %r10, 0(%rsp) + // Store rsp as the top quick frame. + movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET // Ugly compile-time check, but we only have the preprocessor. // Last +8: implicit return address pushed on stack when caller made call. -#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 11*8 + 4*8 + 80 + 8) +#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 11 * 8 + 4 * 8 + 80 + 8) #error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(X86_64) size not as expected." #endif #endif // __APPLE__ END_MACRO -MACRO0(RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME) +MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI) + // Save callee and GPR args, mixed together to agree with core spills bitmap. + PUSH r15 // Callee save. + PUSH r14 // Callee save. + PUSH r13 // Callee save. + PUSH r12 // Callee save. + PUSH r9 // Quick arg 5. + PUSH r8 // Quick arg 4. + PUSH rsi // Quick arg 1. + PUSH rbp // Callee save. + PUSH rbx // Callee save. + PUSH rdx // Quick arg 2. + PUSH rcx // Quick arg 3. + // Create space for FPR args and create 2 slots, 1 of padding and 1 for the + // StackReference. + subq LITERAL(80 + 4 * 8), %rsp + CFI_ADJUST_CFA_OFFSET(80 + 4 * 8) + // Save FPRs. + movq %xmm0, 16(%rsp) + movq %xmm1, 24(%rsp) + movq %xmm2, 32(%rsp) + movq %xmm3, 40(%rsp) + movq %xmm4, 48(%rsp) + movq %xmm5, 56(%rsp) + movq %xmm6, 64(%rsp) + movq %xmm7, 72(%rsp) + movq %xmm12, 80(%rsp) + movq %xmm13, 88(%rsp) + movq %xmm14, 96(%rsp) + movq %xmm15, 104(%rsp) + // Store ArtMethod to bottom of stack. + movq %rdi, 0(%rsp) + // Store rsp as the stop quick frame. + movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET +END_MACRO + +MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME) // Restore FPRs. movq 16(%rsp), %xmm0 movq 24(%rsp), %xmm1 @@ -229,10 +270,9 @@ END_MACRO */ MACRO0(DELIVER_PENDING_EXCEPTION) SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save callee saves for throw - // (Thread*, SP) setup + // (Thread*) setup movq %gs:THREAD_SELF_OFFSET, %rdi - movq %rsp, %rsi - call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) + call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*) UNREACHABLE END_MACRO @@ -240,9 +280,8 @@ MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) DEFINE_FUNCTION VAR(c_name, 0) SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context // Outgoing argument set up - movq %rsp, %rsi // pass SP movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*) UNREACHABLE END_FUNCTION VAR(c_name, 0) END_MACRO @@ -251,9 +290,8 @@ MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) DEFINE_FUNCTION VAR(c_name, 0) SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context // Outgoing argument set up - movq %rsp, %rdx // pass SP movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, Thread*) UNREACHABLE END_FUNCTION VAR(c_name, 0) END_MACRO @@ -262,9 +300,8 @@ MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) DEFINE_FUNCTION VAR(c_name, 0) SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context // Outgoing argument set up - movq %rsp, %rcx // pass SP movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*) UNREACHABLE END_FUNCTION VAR(c_name, 0) END_MACRO @@ -321,7 +358,7 @@ TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromC */ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) DEFINE_FUNCTION VAR(c_name, 0) - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC // Helper signature is always // (method_idx, *this_object, *caller_method, *self, sp) @@ -333,7 +370,7 @@ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) // save the code pointer movq %rax, %rdi movq %rdx, %rax - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME testq %rdi, %rdi jz 1f @@ -481,7 +518,7 @@ DEFINE_FUNCTION art_quick_invoke_stub LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished .Lgpr_setup_finished: - call *METHOD_QUICK_CODE_OFFSET(%rdi) // Call the method. + call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET(%rdi) // Call the method. movq %rbp, %rsp // Restore stack pointer. CFI_DEF_CFA_REGISTER(rsp) POP r9 // Pop r9 - shorty*. @@ -564,7 +601,7 @@ DEFINE_FUNCTION art_quick_invoke_static_stub LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished2 LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished2 .Lgpr_setup_finished2: - call *METHOD_QUICK_CODE_OFFSET(%rdi) // Call the method. + call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET(%rdi) // Call the method. movq %rbp, %rsp // Restore stack pointer. CFI_DEF_CFA_REGISTER(rsp) POP r9 // Pop r9 - shorty*. @@ -639,88 +676,81 @@ END_FUNCTION art_quick_do_long_jump MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC // Outgoing argument set up - movq %rsp, %rsi // pass SP - movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - CALL_MACRO(return_macro, 2) // return or deliver exception + movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() + call VAR(cxx_name, 1) // cxx_name(Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) END_MACRO MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC // Outgoing argument set up - movq %rsp, %rdx // pass SP - movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(arg0, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - CALL_MACRO(return_macro, 2) // return or deliver exception + movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() + call VAR(cxx_name, 1) // cxx_name(arg0, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) END_MACRO MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC // Outgoing argument set up - movq %rsp, %rcx // pass SP - movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(arg0, arg1, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - CALL_MACRO(return_macro, 2) // return or deliver exception + movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() + call VAR(cxx_name, 1) // cxx_name(arg0, arg1, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) END_MACRO MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC // Outgoing argument set up - movq %rsp, %r8 // pass SP - movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() - call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - CALL_MACRO(return_macro, 2) // return or deliver exception + movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() + call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) END_MACRO MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - movl 8(%rsp), %esi // pass referrer - SETUP_REF_ONLY_CALLEE_SAVE_FRAME - // arg0 is in rdi - movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - movq %rsp, %rcx // pass SP - call VAR(cxx_name, 1) // cxx_name(arg0, referrer, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + movl 8(%rsp), %esi // pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME + // arg0 is in rdi + movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() + call VAR(cxx_name, 1) // cxx_name(arg0, referrer, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) END_FUNCTION VAR(c_name, 0) END_MACRO MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - movl 8(%rsp), %edx // pass referrer - SETUP_REF_ONLY_CALLEE_SAVE_FRAME - // arg0 and arg1 are in rdi/rsi - movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() - movq %rsp, %r8 // pass SP - call VAR(cxx_name, 1) // (arg0, arg1, referrer, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + movl 8(%rsp), %edx // pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME + // arg0 and arg1 are in rdi/rsi + movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() + call VAR(cxx_name, 1) // (arg0, arg1, referrer, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) END_FUNCTION VAR(c_name, 0) END_MACRO MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) DEFINE_FUNCTION VAR(c_name, 0) - movl 8(%rsp), %ecx // pass referrer - SETUP_REF_ONLY_CALLEE_SAVE_FRAME - // arg0, arg1, and arg2 are in rdi/rsi/rdx + movl 8(%rsp), %ecx // pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME + // arg0, arg1, and arg2 are in rdi/rsi/rdx movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current() - movq %rsp, %r9 // pass SP - call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, referrer, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - CALL_MACRO(return_macro, 2) // return or deliver exception + call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, referrer, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) END_MACRO @@ -864,7 +894,7 @@ DEFINE_FUNCTION art_quick_lock_object testl %edi, %edi // Null check object/rdi. jz .Lslow_lock .Lretry_lock: - movl LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word. + movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word. test LITERAL(0xC0000000), %ecx // Test the 2 high bits. jne .Lslow_lock // Slow path if either of the two high bits are set. movl %gs:THREAD_ID_OFFSET, %edx // edx := thread id @@ -872,7 +902,7 @@ DEFINE_FUNCTION art_quick_lock_object jnz .Lalready_thin // Lock word contains a thin lock. // unlocked case - %edx holds thread id with count of 0 xor %eax, %eax // eax == 0 for comparison with lock word in cmpxchg - lock cmpxchg %edx, LOCK_WORD_OFFSET(%edi) + lock cmpxchg %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) jnz .Lretry_lock // cmpxchg failed retry ret .Lalready_thin: @@ -881,21 +911,21 @@ DEFINE_FUNCTION art_quick_lock_object addl LITERAL(65536), %ecx // increment recursion count test LITERAL(0xC0000000), %ecx // overflowed if either of top two bits are set jne .Lslow_lock // count overflowed so go slow - movl %ecx, LOCK_WORD_OFFSET(%edi) // update lockword, cmpxchg not necessary as we hold lock + // update lockword, cmpxchg not necessary as we hold lock + movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) ret .Lslow_lock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() - movq %rsp, %rdx // pass SP - call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO END_FUNCTION art_quick_lock_object DEFINE_FUNCTION art_quick_unlock_object testl %edi, %edi // null check object/edi jz .Lslow_unlock - movl LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word + movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word movl %gs:THREAD_ID_OFFSET, %edx // edx := thread id test LITERAL(0xC0000000), %ecx jnz .Lslow_unlock // lock word contains a monitor @@ -903,18 +933,17 @@ DEFINE_FUNCTION art_quick_unlock_object jne .Lslow_unlock cmpl LITERAL(65536), %ecx jae .Lrecursive_thin_unlock - movl LITERAL(0), LOCK_WORD_OFFSET(%edi) + movl LITERAL(0), MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) ret .Lrecursive_thin_unlock: subl LITERAL(65536), %ecx - mov %ecx, LOCK_WORD_OFFSET(%edi) + mov %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) ret .Lslow_unlock: - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() - movq %rsp, %rdx // pass SP - call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO END_FUNCTION art_quick_unlock_object @@ -935,9 +964,8 @@ DEFINE_FUNCTION art_quick_check_cast POP rsi // Pop arguments POP rdi SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context - mov %rsp, %rcx // pass SP mov %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP) + call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*) int3 // unreached END_FUNCTION art_quick_check_cast @@ -969,8 +997,8 @@ DEFINE_FUNCTION art_quick_aput_obj_with_bound_check int3 int3 #else - movl ARRAY_LENGTH_OFFSET(%edi), %ecx -// movl ARRAY_LENGTH_OFFSET(%rdi), %ecx // This zero-extends, so value(%rcx)=value(%ecx) + movl MIRROR_ARRAY_LENGTH_OFFSET(%edi), %ecx +// movl MIRROR_ARRAY_LENGTH_OFFSET(%rdi), %ecx // This zero-extends, so value(%rcx)=value(%ecx) cmpl %ecx, %esi jb art_quick_aput_obj mov %esi, %edi @@ -986,24 +1014,24 @@ DEFINE_FUNCTION art_quick_aput_obj testl %edx, %edx // store of null // test %rdx, %rdx jz .Ldo_aput_null - movl CLASS_OFFSET(%edi), %ecx -// movq CLASS_OFFSET(%rdi), %rcx - movl CLASS_COMPONENT_TYPE_OFFSET(%ecx), %ecx -// movq CLASS_COMPONENT_TYPE_OFFSET(%rcx), %rcx - cmpl CLASS_OFFSET(%edx), %ecx // value's type == array's component type - trivial assignability -// cmpq CLASS_OFFSET(%rdx), %rcx + movl MIRROR_OBJECT_CLASS_OFFSET(%edi), %ecx +// movq MIRROR_OBJECT_CLASS_OFFSET(%rdi), %rcx + movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ecx), %ecx +// movq MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%rcx), %rcx + cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ecx // value's type == array's component type - trivial assignability +// cmpq MIRROR_CLASS_OFFSET(%rdx), %rcx jne .Lcheck_assignability .Ldo_aput: - movl %edx, OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) -// movq %rdx, OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) + movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) +// movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) movq %gs:THREAD_CARD_TABLE_OFFSET, %rdx shrl LITERAL(7), %edi // shrl LITERAL(7), %rdi movb %dl, (%rdx, %rdi) // Note: this assumes that top 32b of %rdi are zero ret .Ldo_aput_null: - movl %edx, OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) -// movq %rdx, OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) + movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) +// movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) ret .Lcheck_assignability: // Save arguments. @@ -1015,7 +1043,7 @@ DEFINE_FUNCTION art_quick_aput_obj SETUP_FP_CALLEE_SAVE_FRAME // "Uncompress" = do nothing, as already zero-extended on load. - movl CLASS_OFFSET(%edx), %esi // Pass arg2 = value's class. + movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %esi // Pass arg2 = value's class. movq %rcx, %rdi // Pass arg1 = array's component type. call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) @@ -1032,8 +1060,8 @@ DEFINE_FUNCTION art_quick_aput_obj POP rsi POP rdi - movl %edx, OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) -// movq %rdx, OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) + movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) +// movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) movq %gs:THREAD_CARD_TABLE_OFFSET, %rdx shrl LITERAL(7), %edi // shrl LITERAL(7), %rdi @@ -1052,12 +1080,10 @@ DEFINE_FUNCTION art_quick_aput_obj SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // Save all registers as basis for long jump context. // Outgoing argument set up. - movq %rsp, %rcx // Pass arg 4 = SP. movq %rdx, %rsi // Pass arg 2 = value. - movq %gs:THREAD_SELF_OFFSET, %rdx // Pass arg 3 = Thread::Current(). + movq %gs:THREAD_SELF_OFFSET, %rdx // Pass arg 3 = Thread::Current(). // Pass arg 1 = array. - - call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP) + call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*) int3 // unreached END_FUNCTION art_quick_aput_obj @@ -1079,7 +1105,7 @@ UNIMPLEMENTED art_quick_lushr THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO -THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_EAX_ZERO +THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_EAX_ZERO THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION @@ -1105,55 +1131,25 @@ ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_O // This is singled out as the argument order is different. DEFINE_FUNCTION art_quick_set64_static - movq %rsi, %rdx // pass new_val - movl 8(%rsp), %esi // pass referrer - SETUP_REF_ONLY_CALLEE_SAVE_FRAME - // field_idx is in rdi - movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() - movq %rsp, %r8 // pass SP - call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP) - RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address - RETURN_IF_EAX_ZERO // return or deliver exception + movq %rsi, %rdx // pass new_val + movl 8(%rsp), %esi // pass referrer + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME + // field_idx is in rdi + movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() + call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*) + RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address + RETURN_IF_EAX_ZERO // return or deliver exception END_FUNCTION art_quick_set64_static DEFINE_FUNCTION art_quick_proxy_invoke_handler - // Save callee and GPR args, mixed together to agree with core spills bitmap of ref. and args - // callee save frame. - PUSH r15 // Callee save. - PUSH r14 // Callee save. - PUSH r13 // Callee save. - PUSH r12 // Callee save. - PUSH r9 // Quick arg 5. - PUSH r8 // Quick arg 4. - PUSH rsi // Quick arg 1. - PUSH rbp // Callee save. - PUSH rbx // Callee save. - PUSH rdx // Quick arg 2. - PUSH rcx // Quick arg 3. - // Create space for FPR args and create 2 slots, 1 of padding and 1 for the ArtMethod*. - subq LITERAL(80 + 4*8), %rsp - CFI_ADJUST_CFA_OFFSET(80 + 4*8) - // Save FPRs. - movq %xmm0, 16(%rsp) - movq %xmm1, 24(%rsp) - movq %xmm2, 32(%rsp) - movq %xmm3, 40(%rsp) - movq %xmm4, 48(%rsp) - movq %xmm5, 56(%rsp) - movq %xmm6, 64(%rsp) - movq %xmm7, 72(%rsp) - movq %xmm12, 80(%rsp) - movq %xmm13, 88(%rsp) - movq %xmm14, 96(%rsp) - movq %xmm15, 104(%rsp) - // Store proxy method to bottom of stack. - movq %rdi, 0(%rsp) - movq %gs:THREAD_SELF_OFFSET, %rdx // Pass Thread::Current(). - movq %rsp, %rcx // Pass SP. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI + + movq %gs:THREAD_SELF_OFFSET, %rdx // Pass Thread::Current(). + movq %rsp, %rcx // Pass SP. call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP) - movq %rax, %xmm0 // Copy return value in case of float returns. - addq LITERAL(168 + 4*8), %rsp // Pop arguments. + movq %rax, %xmm0 // Copy return value in case of float returns. + addq LITERAL(168 + 4*8), %rsp // Pop arguments. CFI_ADJUST_CFA_OFFSET(-168 - 4*8) RETURN_OR_DELIVER_PENDING_EXCEPTION END_FUNCTION art_quick_proxy_invoke_handler @@ -1168,20 +1164,20 @@ DEFINE_FUNCTION art_quick_imt_conflict_trampoline int3 #else movl 8(%rsp), %edi // load caller Method* - movl METHOD_DEX_CACHE_METHODS_OFFSET(%rdi), %edi // load dex_cache_resolved_methods - movl OBJECT_ARRAY_DATA_OFFSET(%rdi, %rax, 4), %edi // load the target method + movl MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET(%rdi), %edi // load dex_cache_resolved_methods + movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rax, 4), %edi // load the target method jmp art_quick_invoke_interface_trampoline #endif // __APPLE__ END_FUNCTION art_quick_imt_conflict_trampoline DEFINE_FUNCTION art_quick_resolution_trampoline - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME movq %gs:THREAD_SELF_OFFSET, %rdx movq %rsp, %rcx call SYMBOL(artQuickResolutionTrampoline) // (called, receiver, Thread*, SP) movq %rax, %r10 // Remember returned code pointer in R10. movq (%rsp), %rdi // Load called method into RDI. - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME testq %r10, %r10 // If code pointer is NULL goto deliver pending exception. jz 1f jmp *%r10 // Tail call into method. @@ -1267,37 +1263,11 @@ END_FUNCTION art_quick_resolution_trampoline * Called to do a generic JNI down-call */ DEFINE_FUNCTION art_quick_generic_jni_trampoline - // Save callee and GPR args, mixed together to agree with core spills bitmap. - PUSH r15 // Callee save. - PUSH r14 // Callee save. - PUSH r13 // Callee save. - PUSH r12 // Callee save. - PUSH r9 // Quick arg 5. - PUSH r8 // Quick arg 4. - PUSH rsi // Quick arg 1. - PUSH rbp // Callee save. - PUSH rbx // Callee save. - PUSH rdx // Quick arg 2. - PUSH rcx // Quick arg 3. - // Create space for FPR args and create 2 slots, 1 of padding and 1 for the ArtMethod*. - subq LITERAL(80 + 4*8), %rsp - CFI_ADJUST_CFA_OFFSET(80 + 4*8) - // Save FPRs. - movq %xmm0, 16(%rsp) - movq %xmm1, 24(%rsp) - movq %xmm2, 32(%rsp) - movq %xmm3, 40(%rsp) - movq %xmm4, 48(%rsp) - movq %xmm5, 56(%rsp) - movq %xmm6, 64(%rsp) - movq %xmm7, 72(%rsp) - movq %xmm12, 80(%rsp) - movq %xmm13, 88(%rsp) - movq %xmm14, 96(%rsp) - movq %xmm15, 104(%rsp) - movq %rdi, 0(%rsp) // Store native ArtMethod* to bottom of stack. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI + movq %rsp, %rbp // save SP at (old) callee-save frame CFI_DEF_CFA_REGISTER(rbp) + // // reserve a lot of space // @@ -1454,11 +1424,11 @@ END_FUNCTION art_quick_generic_jni_trampoline * RSI, RDX, RCX, R8, R9 are arguments to that method. */ DEFINE_FUNCTION art_quick_to_interpreter_bridge - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. movq %gs:THREAD_SELF_OFFSET, %rsi // RSI := Thread::Current() movq %rsp, %rdx // RDX := sp call SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. movq %rax, %xmm0 // Place return value also into floating point return value. RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception END_FUNCTION art_quick_to_interpreter_bridge @@ -1471,15 +1441,14 @@ DEFINE_FUNCTION art_quick_instrumentation_entry int3 int3 #else - SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME + SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME movq %rdi, %r12 // Preserve method pointer in a callee-save. movq %gs:THREAD_SELF_OFFSET, %rdx // Pass thread. - movq %rsp, %rcx // Pass SP. movq FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp), %r8 // Pass return PC. - call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR) + call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR) // %rax = result of call. movq %r12, %rdi // Reload method pointer. @@ -1487,7 +1456,7 @@ DEFINE_FUNCTION art_quick_instrumentation_entry leaq art_quick_instrumentation_exit(%rip), %r12 // Set up return through instrumentation movq %r12, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp) // exit. - RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME + RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME jmp *%rax // Tail call to intended method. #endif // __APPLE__ @@ -1496,7 +1465,7 @@ END_FUNCTION art_quick_instrumentation_entry DEFINE_FUNCTION art_quick_instrumentation_exit pushq LITERAL(0) // Push a fake return PC as there will be none on the stack. - SETUP_REF_ONLY_CALLEE_SAVE_FRAME + SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // We need to save rax and xmm0. We could use a callee-save from SETUP_REF_ONLY, but then // we would need to fully restore it. As there are a good number of callee-save registers, it @@ -1536,9 +1505,8 @@ DEFINE_FUNCTION art_quick_deoptimize pushq %rsi // Fake that we were called. Use hidden arg. SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // Stack should be aligned now. - movq %rsp, %rsi // Pass SP. movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread. - call SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP) + call SYMBOL(artDeoptimize) // artDeoptimize(Thread*) int3 // Unreachable. END_FUNCTION art_quick_deoptimize @@ -1551,15 +1519,15 @@ END_FUNCTION art_quick_deoptimize * rsi: comp string object (known non-null) */ DEFINE_FUNCTION art_quick_string_compareto - movl STRING_COUNT_OFFSET(%edi), %r8d - movl STRING_COUNT_OFFSET(%esi), %r9d - movl STRING_VALUE_OFFSET(%edi), %r10d - movl STRING_VALUE_OFFSET(%esi), %r11d - movl STRING_OFFSET_OFFSET(%edi), %eax - movl STRING_OFFSET_OFFSET(%esi), %ecx + movl MIRROR_STRING_COUNT_OFFSET(%edi), %r8d + movl MIRROR_STRING_COUNT_OFFSET(%esi), %r9d + movl MIRROR_STRING_VALUE_OFFSET(%edi), %r10d + movl MIRROR_STRING_VALUE_OFFSET(%esi), %r11d + movl MIRROR_STRING_OFFSET_OFFSET(%edi), %eax + movl MIRROR_STRING_OFFSET_OFFSET(%esi), %ecx /* Build pointers to the start of string data */ - leal STRING_DATA_OFFSET(%r10d, %eax, 2), %esi - leal STRING_DATA_OFFSET(%r11d, %ecx, 2), %edi + leal MIRROR_CHAR_ARRAY_DATA_OFFSET(%r10d, %eax, 2), %esi + leal MIRROR_CHAR_ARRAY_DATA_OFFSET(%r11d, %ecx, 2), %edi /* Calculate min length and count diff */ movl %r8d, %ecx movl %r8d, %eax @@ -1605,5 +1573,3 @@ DEFINE_FUNCTION art_nested_signal_return call PLT_SYMBOL(longjmp) int3 // won't get here END_FUNCTION art_nested_signal_return - - diff --git a/runtime/arch/x86_64/thread_x86_64.cc b/runtime/arch/x86_64/thread_x86_64.cc index 6dff2b4a5..553b6569c 100644 --- a/runtime/arch/x86_64/thread_x86_64.cc +++ b/runtime/arch/x86_64/thread_x86_64.cc @@ -49,29 +49,16 @@ void Thread::InitCpu() { // Sanity check that reads from %gs point to this Thread*. Thread* self_check; - CHECK_EQ(THREAD_SELF_OFFSET, SelfOffset<8>().Int32Value()); __asm__ __volatile__("movq %%gs:(%1), %0" : "=r"(self_check) // output : "r"(THREAD_SELF_OFFSET) // input :); // clobber CHECK_EQ(self_check, this); - - // Sanity check other offsets. - CHECK_EQ(static_cast(RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET), - Runtime::GetCalleeSaveMethodOffset(Runtime::kSaveAll)); - CHECK_EQ(static_cast(RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET), - Runtime::GetCalleeSaveMethodOffset(Runtime::kRefsOnly)); - CHECK_EQ(static_cast(RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET), - Runtime::GetCalleeSaveMethodOffset(Runtime::kRefsAndArgs)); - CHECK_EQ(THREAD_EXCEPTION_OFFSET, ExceptionOffset<8>().Int32Value()); - CHECK_EQ(THREAD_CARD_TABLE_OFFSET, CardTableOffset<8>().Int32Value()); - CHECK_EQ(THREAD_ID_OFFSET, ThinLockIdOffset<8>().Int32Value()); } void Thread::CleanupCpu() { // Sanity check that reads from %gs point to this Thread*. Thread* self_check; - CHECK_EQ(THREAD_SELF_OFFSET, SelfOffset<8>().Int32Value()); __asm__ __volatile__("movq %%gs:(%1), %0" : "=r"(self_check) // output : "r"(THREAD_SELF_OFFSET) // input diff --git a/runtime/asm_support.h b/runtime/asm_support.h index 62f359346..26df045a2 100644 --- a/runtime/asm_support.h +++ b/runtime/asm_support.h @@ -17,56 +17,147 @@ #ifndef ART_RUNTIME_ASM_SUPPORT_H_ #define ART_RUNTIME_ASM_SUPPORT_H_ +#if defined(__cplusplus) +#include "mirror/art_method.h" +#include "mirror/class.h" +#include "mirror/string.h" +#include "runtime.h" +#include "thread.h" +#endif + #include "read_barrier_c.h" -// Value loaded into rSUSPEND for quick. When this value is counted down to zero we do a suspend -// check. -#define SUSPEND_CHECK_INTERVAL (1000) +#if defined(__arm__) || defined(__aarch64__) || defined(__mips__) +// In quick code for ARM, ARM64 and MIPS we make poor use of registers and perform frequent suspend +// checks in the event of loop back edges. The SUSPEND_CHECK_INTERVAL constant is loaded into a +// register at the point of an up-call or after handling a suspend check. It reduces the number of +// loads of the TLS suspend check value by the given amount (turning it into a decrement and compare +// of a register). This increases the time for a thread to respond to requests from GC and the +// debugger, damaging GC performance and creating other unwanted artifacts. For example, this count +// has the effect of making loops and Java code look cold in profilers, where the count is reset +// impacts where samples will occur. Reducing the count as much as possible improves profiler +// accuracy in tools like traceview. +// TODO: get a compiler that can do a proper job of loop optimization and remove this. +#define SUSPEND_CHECK_INTERVAL 1000 +#endif -// Offsets within java.lang.Object. -#define CLASS_OFFSET 0 -#define LOCK_WORD_OFFSET 4 +#if defined(__cplusplus) -#ifndef USE_BAKER_OR_BROOKS_READ_BARRIER +#ifndef ADD_TEST_EQ // Allow #include-r to replace with their own. +#define ADD_TEST_EQ(x, y) CHECK_EQ(x, y); +#endif -// Offsets within java.lang.Class. -#define CLASS_COMPONENT_TYPE_OFFSET 12 +static inline void CheckAsmSupportOffsetsAndSizes() { +#else +#define ADD_TEST_EQ(x, y) +#endif -// Array offsets. -#define ARRAY_LENGTH_OFFSET 8 -#define OBJECT_ARRAY_DATA_OFFSET 12 +// Size of references to the heap on the stack. +#define STACK_REFERENCE_SIZE 4 +ADD_TEST_EQ(static_cast(STACK_REFERENCE_SIZE), sizeof(art::StackReference)) + +// Note: these callee save methods loads require read barriers. +// Offset of field Runtime::callee_save_methods_[kSaveAll] +#define RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET 0 +ADD_TEST_EQ(static_cast(RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET), + art::Runtime::GetCalleeSaveMethodOffset(art::Runtime::kSaveAll)) + +// Offset of field Runtime::callee_save_methods_[kRefsOnly] +#define RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET __SIZEOF_POINTER__ +ADD_TEST_EQ(static_cast(RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET), + art::Runtime::GetCalleeSaveMethodOffset(art::Runtime::kRefsOnly)) + +// Offset of field Runtime::callee_save_methods_[kRefsAndArgs] +#define RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET (2 * __SIZEOF_POINTER__) +ADD_TEST_EQ(static_cast(RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET), + art::Runtime::GetCalleeSaveMethodOffset(art::Runtime::kRefsAndArgs)) + +// Offset of field Thread::tls32_.state_and_flags. +#define THREAD_FLAGS_OFFSET 0 +ADD_TEST_EQ(THREAD_FLAGS_OFFSET, + art::Thread::ThreadFlagsOffset<__SIZEOF_POINTER__>().Int32Value()) + +// Offset of field Thread::tls32_.thin_lock_thread_id. +#define THREAD_ID_OFFSET 12 +ADD_TEST_EQ(THREAD_ID_OFFSET, + art::Thread::ThinLockIdOffset<__SIZEOF_POINTER__>().Int32Value()) + +// Offset of field Thread::tlsPtr_.card_table. +#define THREAD_CARD_TABLE_OFFSET 120 +ADD_TEST_EQ(THREAD_CARD_TABLE_OFFSET, + art::Thread::CardTableOffset<__SIZEOF_POINTER__>().Int32Value()) + +// Offset of field Thread::tlsPtr_.exception. +#define THREAD_EXCEPTION_OFFSET (THREAD_CARD_TABLE_OFFSET + __SIZEOF_POINTER__) +ADD_TEST_EQ(THREAD_EXCEPTION_OFFSET, + art::Thread::ExceptionOffset<__SIZEOF_POINTER__>().Int32Value()) + +// Offset of field Thread::tlsPtr_.managed_stack.top_quick_frame_. +#define THREAD_TOP_QUICK_FRAME_OFFSET (THREAD_CARD_TABLE_OFFSET + (3 * __SIZEOF_POINTER__)) +ADD_TEST_EQ(THREAD_TOP_QUICK_FRAME_OFFSET, + art::Thread::TopOfManagedStackOffset<__SIZEOF_POINTER__>().Int32Value()) + +// Offset of field Thread::tlsPtr_.managed_stack.top_quick_frame_. +#define THREAD_SELF_OFFSET (THREAD_CARD_TABLE_OFFSET + (8 * __SIZEOF_POINTER__)) +ADD_TEST_EQ(THREAD_SELF_OFFSET, + art::Thread::SelfOffset<__SIZEOF_POINTER__>().Int32Value()) -// Offsets within java.lang.String. -#define STRING_VALUE_OFFSET 8 -#define STRING_COUNT_OFFSET 12 -#define STRING_OFFSET_OFFSET 20 -#define STRING_DATA_OFFSET 12 - -// Offsets within java.lang.Method. -#define METHOD_DEX_CACHE_METHODS_OFFSET 12 -#define METHOD_PORTABLE_CODE_OFFSET 40 -#define METHOD_QUICK_CODE_OFFSET 48 +// Offsets within java.lang.Object. +#define MIRROR_OBJECT_CLASS_OFFSET 0 +ADD_TEST_EQ(MIRROR_OBJECT_CLASS_OFFSET, art::mirror::Object::ClassOffset().Int32Value()) +#define MIRROR_OBJECT_LOCK_WORD_OFFSET 4 +ADD_TEST_EQ(MIRROR_OBJECT_LOCK_WORD_OFFSET, art::mirror::Object::MonitorOffset().Int32Value()) +#if defined(USE_BAKER_OR_BROOKS_READ_BARRIER) +#define MIRROR_OBJECT_HEADER_SIZE 16 #else +#define MIRROR_OBJECT_HEADER_SIZE 8 +#endif +ADD_TEST_EQ(size_t(MIRROR_OBJECT_HEADER_SIZE), sizeof(art::mirror::Object)) // Offsets within java.lang.Class. -#define CLASS_COMPONENT_TYPE_OFFSET 20 +#define MIRROR_CLASS_COMPONENT_TYPE_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_CLASS_COMPONENT_TYPE_OFFSET, + art::mirror::Class::ComponentTypeOffset().Int32Value()) // Array offsets. -#define ARRAY_LENGTH_OFFSET 16 -#define OBJECT_ARRAY_DATA_OFFSET 20 +#define MIRROR_ARRAY_LENGTH_OFFSET MIRROR_OBJECT_HEADER_SIZE +ADD_TEST_EQ(MIRROR_ARRAY_LENGTH_OFFSET, art::mirror::Array::LengthOffset().Int32Value()) + +#define MIRROR_CHAR_ARRAY_DATA_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_CHAR_ARRAY_DATA_OFFSET, + art::mirror::Array::DataOffset(sizeof(uint16_t)).Int32Value()) + +#define MIRROR_OBJECT_ARRAY_DATA_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_OBJECT_ARRAY_DATA_OFFSET, + art::mirror::Array::DataOffset( + sizeof(art::mirror::HeapReference)).Int32Value()) // Offsets within java.lang.String. -#define STRING_VALUE_OFFSET 16 -#define STRING_COUNT_OFFSET 20 -#define STRING_OFFSET_OFFSET 28 -#define STRING_DATA_OFFSET 20 +#define MIRROR_STRING_VALUE_OFFSET MIRROR_OBJECT_HEADER_SIZE +ADD_TEST_EQ(MIRROR_STRING_VALUE_OFFSET, art::mirror::String::ValueOffset().Int32Value()) + +#define MIRROR_STRING_COUNT_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_STRING_COUNT_OFFSET, art::mirror::String::CountOffset().Int32Value()) + +#define MIRROR_STRING_OFFSET_OFFSET (12 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_STRING_OFFSET_OFFSET, art::mirror::String::OffsetOffset().Int32Value()) + +// Offsets within java.lang.reflect.ArtMethod. +#define MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET, + art::mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()) + +#define MIRROR_ART_METHOD_PORTABLE_CODE_OFFSET (32 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_ART_METHOD_PORTABLE_CODE_OFFSET, + art::mirror::ArtMethod::EntryPointFromPortableCompiledCodeOffset().Int32Value()) -// Offsets within java.lang.Method. -#define METHOD_DEX_CACHE_METHODS_OFFSET 20 -#define METHOD_PORTABLE_CODE_OFFSET 48 -#define METHOD_QUICK_CODE_OFFSET 56 +#define MIRROR_ART_METHOD_QUICK_CODE_OFFSET (40 + MIRROR_OBJECT_HEADER_SIZE) +ADD_TEST_EQ(MIRROR_ART_METHOD_QUICK_CODE_OFFSET, + art::mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()) +#if defined(__cplusplus) +} // End of CheckAsmSupportOffsets. #endif #endif // ART_RUNTIME_ASM_SUPPORT_H_ diff --git a/runtime/entrypoints/quick/callee_save_frame.h b/runtime/entrypoints/quick/callee_save_frame.h index e728f7dd1..49357ad27 100644 --- a/runtime/entrypoints/quick/callee_save_frame.h +++ b/runtime/entrypoints/quick/callee_save_frame.h @@ -18,9 +18,8 @@ #define ART_RUNTIME_ENTRYPOINTS_QUICK_CALLEE_SAVE_FRAME_H_ #include "base/mutex.h" -#include "gc_root-inl.h" #include "instruction_set.h" -#include "runtime-inl.h" +#include "runtime.h" #include "thread-inl.h" // Specific frame size code is in architecture-specific files. We include this to compile-time @@ -36,16 +35,41 @@ namespace mirror { class ArtMethod; } // namespace mirror -// Place a special frame at the TOS that will save the callee saves for the given type. -static inline void FinishCalleeSaveFrameSetup(Thread* self, StackReference* sp, - Runtime::CalleeSaveType type) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - // Be aware the store below may well stomp on an incoming argument. - Locks::mutator_lock_->AssertSharedHeld(self); - sp->Assign(Runtime::Current()->GetCalleeSaveMethod(type)); - self->SetTopOfStack(sp, 0); - self->VerifyStack(); -} +class ScopedQuickEntrypointChecks { + public: + explicit ScopedQuickEntrypointChecks(Thread *self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) + : self_(self) { + if (kIsDebugBuild) { + TestsOnEntry(); + } + } + + explicit ScopedQuickEntrypointChecks() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) + : self_(kIsDebugBuild ? Thread::Current() : nullptr) { + if (kIsDebugBuild) { + TestsOnEntry(); + } + } + + ~ScopedQuickEntrypointChecks() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + if (kIsDebugBuild) { + TestsOnExit(); + } + } + + private: + void TestsOnEntry() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + Locks::mutator_lock_->AssertSharedHeld(self_); + self_->VerifyStack(); + } + + void TestsOnExit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + Locks::mutator_lock_->AssertSharedHeld(self_); + self_->VerifyStack(); + } + + Thread* const self_; +}; static constexpr size_t GetCalleeSaveFrameSize(InstructionSet isa, Runtime::CalleeSaveType type) { // constexpr must be a return statement. @@ -71,7 +95,8 @@ static constexpr size_t GetConstExprPointerSize(InstructionSet isa) { } // Note: this specialized statement is sanity-checked in the quick-trampoline gtest. -static constexpr size_t GetCalleeSavePCOffset(InstructionSet isa, Runtime::CalleeSaveType type) { +static constexpr size_t GetCalleeSaveReturnPcOffset(InstructionSet isa, + Runtime::CalleeSaveType type) { return GetCalleeSaveFrameSize(isa, type) - GetConstExprPointerSize(isa); } diff --git a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc index d8da46398..a2869ecc4 100644 --- a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc @@ -32,6 +32,7 @@ extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \ StackReference* sp) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ + ScopedQuickEntrypointChecks sqec(self); \ if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ mirror::Class* klass = method->GetDexCacheResolvedType(type_idx); \ if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ @@ -53,13 +54,12 @@ extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ } \ } \ } \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ return AllocObjectFromCode(type_idx, method, self, allocator_type); \ } \ extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ - mirror::Class* klass, mirror::ArtMethod* method, Thread* self, \ - StackReference* sp) \ + mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ + ScopedQuickEntrypointChecks sqec(self); \ if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ if (LIKELY(klass->IsInitialized())) { \ size_t byte_count = klass->GetObjectSize(); \ @@ -80,13 +80,12 @@ extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ } \ } \ } \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ return AllocObjectFromCodeResolved(klass, method, self, allocator_type); \ } \ extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ - mirror::Class* klass, mirror::ArtMethod* method, Thread* self, \ - StackReference* sp) \ + mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ + ScopedQuickEntrypointChecks sqec(self); \ if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ size_t byte_count = klass->GetObjectSize(); \ byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ @@ -105,45 +104,39 @@ extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ return obj; \ } \ } \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ return AllocObjectFromCodeInitialized(klass, method, self, allocator_type); \ } \ extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \ - StackReference* sp) \ + uint32_t type_idx, mirror::ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ + ScopedQuickEntrypointChecks sqec(self); \ return AllocObjectFromCode(type_idx, method, self, allocator_type); \ } \ extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ - StackReference* sp) \ + uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ + ScopedQuickEntrypointChecks sqec(self); \ return AllocArrayFromCode(type_idx, method, component_count, self, \ allocator_type); \ } \ extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ - mirror::Class* klass, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ - StackReference* sp) \ + mirror::Class* klass, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ + ScopedQuickEntrypointChecks sqec(self); \ return AllocArrayFromCodeResolved(klass, method, component_count, self, \ allocator_type); \ } \ extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ - StackReference* sp) \ + uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ + ScopedQuickEntrypointChecks sqec(self); \ return AllocArrayFromCode(type_idx, method, component_count, self, \ allocator_type); \ } \ extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ - StackReference* sp) \ + uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ + ScopedQuickEntrypointChecks sqec(self); \ if (!instrumented_bool) { \ return CheckAndAllocArrayFromCode(type_idx, method, component_count, self, false, allocator_type); \ } else { \ @@ -151,10 +144,9 @@ extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ } \ } \ extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ - StackReference* sp) \ + uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ + ScopedQuickEntrypointChecks sqec(self); \ if (!instrumented_bool) { \ return CheckAndAllocArrayFromCode(type_idx, method, component_count, self, true, allocator_type); \ } else { \ diff --git a/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc b/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc index f9f62c272..14ab320b9 100644 --- a/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc @@ -27,9 +27,8 @@ namespace art { -extern "C" void artDeoptimize(Thread* self, StackReference* sp) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); +extern "C" void artDeoptimize(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); self->SetException(ThrowLocation(), Thread::GetDeoptimizationException()); self->QuickDeliverException(); } diff --git a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc index 704db0503..2e7c8bab4 100644 --- a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc @@ -27,42 +27,39 @@ namespace art { extern "C" mirror::Class* artInitializeStaticStorageFromCode(uint32_t type_idx, mirror::ArtMethod* referrer, - Thread* self, - StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Called to ensure static storage base is initialized for direct static field reads and writes. // A class may be accessing another class' fields when it doesn't have access, as access has been // given by inheritance. - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); + ScopedQuickEntrypointChecks sqec(self); return ResolveVerifyAndClinit(type_idx, referrer, self, true, false); } extern "C" mirror::Class* artInitializeTypeFromCode(uint32_t type_idx, mirror::ArtMethod* referrer, - Thread* self, - StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Called when method->dex_cache_resolved_types_[] misses. - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); + ScopedQuickEntrypointChecks sqec(self); return ResolveVerifyAndClinit(type_idx, referrer, self, false, false); } extern "C" mirror::Class* artInitializeTypeAndVerifyAccessFromCode(uint32_t type_idx, - mirror::ArtMethod* referrer, - Thread* self, - StackReference* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + mirror::ArtMethod* referrer, + Thread* self) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Called when caller isn't guaranteed to have access to a type and the dex cache may be // unpopulated. - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); + ScopedQuickEntrypointChecks sqec(self); return ResolveVerifyAndClinit(type_idx, referrer, self, false, true); } extern "C" mirror::String* artResolveStringFromCode(mirror::ArtMethod* referrer, int32_t string_idx, - Thread* self, - StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); + ScopedQuickEntrypointChecks sqec(self); return ResolveStringFromCode(referrer, string_idx); } diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc index b89c015cd..7326fcfc6 100644 --- a/runtime/entrypoints/quick/quick_field_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc @@ -25,295 +25,284 @@ namespace art { -extern "C" int8_t artGetByteStaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, - Thread* self, StackReference* sp) +extern "C" int8_t artGetByteStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead, sizeof(int8_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetByte(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int8_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetByte(field->GetDeclaringClass()); } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } -extern "C" uint8_t artGetBooleanStaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, - Thread* self, StackReference* sp) +extern "C" uint8_t artGetBooleanStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead, sizeof(int8_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetBoolean(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int8_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetBoolean(field->GetDeclaringClass()); } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } -extern "C" int16_t artGetShortStaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, - Thread* self, StackReference* sp) +extern "C" int16_t artGetShortStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead, sizeof(int16_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetShort(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int16_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetShort(field->GetDeclaringClass()); } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint16_t artGetCharStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - Thread* self, StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead, sizeof(int16_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetChar(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int16_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetChar(field->GetDeclaringClass()); } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint32_t artGet32StaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - Thread* self, StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead, sizeof(int32_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->Get32(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int32_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->Get32(field->GetDeclaringClass()); } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint64_t artGet64StaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - Thread* self, StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead, sizeof(int64_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->Get64(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int64_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->Get64(field->GetDeclaringClass()); } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" mirror::Object* artGetObjStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - Thread* self, - StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticObjectRead, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetObj(field->GetDeclaringClass()); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { return field->GetObj(field->GetDeclaringClass()); } - return NULL; // Will throw exception by checking with Thread::Current + return nullptr; // Will throw exception by checking with Thread::Current. } extern "C" int8_t artGetByteInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int8_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->GetByte(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int8_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->GetByte(obj); } } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint8_t artGetBooleanInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int8_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->GetBoolean(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int8_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->GetBoolean(obj); } } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" int16_t artGetShortInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int16_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->GetShort(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int16_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->GetShort(obj); } } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint16_t artGetCharInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int16_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->GetChar(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int16_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->GetChar(obj); } } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint32_t artGet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int32_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->Get32(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int32_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->Get32(obj); } } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" uint64_t artGet64InstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int64_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->Get64(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int64_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->Get64(obj); } } - return 0; // Will throw exception by checking with Thread::Current + return 0; // Will throw exception by checking with Thread::Current. } extern "C" mirror::Object* artGetObjInstanceFromCode(uint32_t field_idx, mirror::Object* obj, mirror::ArtMethod* referrer, - Thread* self, - StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstanceObjectRead, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { return field->GetObj(obj); } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, true); } else { return field->GetObj(obj); } } - return NULL; // Will throw exception by checking with Thread::Current + return nullptr; // Will throw exception by checking with Thread::Current. } extern "C" int artSet8StaticFromCode(uint32_t field_idx, uint32_t new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int8_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { Primitive::Type type = field->GetTypeAsPrimitiveType(); // Compiled code can't use transactional mode. if (type == Primitive::kPrimBoolean) { @@ -324,9 +313,8 @@ extern "C" int artSet8StaticFromCode(uint32_t field_idx, uint32_t new_value, } return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int8_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { Primitive::Type type = field->GetTypeAsPrimitiveType(); // Compiled code can't use transactional mode. if (type == Primitive::kPrimBoolean) { @@ -341,12 +329,12 @@ extern "C" int artSet8StaticFromCode(uint32_t field_idx, uint32_t new_value, } extern "C" int artSet16StaticFromCode(uint32_t field_idx, uint16_t new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int16_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { Primitive::Type type = field->GetTypeAsPrimitiveType(); // Compiled code can't use transactional mode. if (type == Primitive::kPrimChar) { @@ -357,9 +345,8 @@ extern "C" int artSet16StaticFromCode(uint32_t field_idx, uint16_t new_value, } return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int16_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { Primitive::Type type = field->GetTypeAsPrimitiveType(); // Compiled code can't use transactional mode. if (type == Primitive::kPrimChar) { @@ -374,19 +361,18 @@ extern "C" int artSet16StaticFromCode(uint32_t field_idx, uint16_t new_value, } extern "C" int artSet32StaticFromCode(uint32_t field_idx, uint32_t new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int32_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { // Compiled code can't use transactional mode. field->Set32(field->GetDeclaringClass(), new_value); return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int32_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { // Compiled code can't use transactional mode. field->Set32(field->GetDeclaringClass(), new_value); return 0; // success @@ -395,19 +381,18 @@ extern "C" int artSet32StaticFromCode(uint32_t field_idx, uint32_t new_value, } extern "C" int artSet64StaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - uint64_t new_value, Thread* self, - StackReference* sp) + uint64_t new_value, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int64_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { // Compiled code can't use transactional mode. field->Set64(field->GetDeclaringClass(), new_value); return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int64_t)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { // Compiled code can't use transactional mode. field->Set64(field->GetDeclaringClass(), new_value); return 0; // success @@ -416,22 +401,21 @@ extern "C" int artSet64StaticFromCode(uint32_t field_idx, mirror::ArtMethod* ref } extern "C" int artSetObjStaticFromCode(uint32_t field_idx, mirror::Object* new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticObjectWrite, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { if (LIKELY(!field->IsPrimitiveType())) { // Compiled code can't use transactional mode. field->SetObj(field->GetDeclaringClass(), new_value); return 0; // success } } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL)) { + if (LIKELY(field != nullptr)) { // Compiled code can't use transactional mode. field->SetObj(field->GetDeclaringClass(), new_value); return 0; // success @@ -440,12 +424,12 @@ extern "C" int artSetObjStaticFromCode(uint32_t field_idx, mirror::Object* new_v } extern "C" int artSet8InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint8_t new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int8_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { Primitive::Type type = field->GetTypeAsPrimitiveType(); // Compiled code can't use transactional mode. if (type == Primitive::kPrimBoolean) { @@ -456,15 +440,14 @@ extern "C" int artSet8InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); { StackHandleScope<1> hs(self); HandleWrapper h_obj(hs.NewHandleWrapper(&obj)); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int8_t)); } - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, false); } else { @@ -482,12 +465,12 @@ extern "C" int artSet8InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } extern "C" int artSet16InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint16_t new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int16_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { Primitive::Type type = field->GetTypeAsPrimitiveType(); // Compiled code can't use transactional mode. if (type == Primitive::kPrimChar) { @@ -498,15 +481,14 @@ extern "C" int artSet16InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); { StackHandleScope<1> hs(self); HandleWrapper h_obj(hs.NewHandleWrapper(&obj)); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int16_t)); } - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, false); } else { @@ -525,25 +507,24 @@ extern "C" int artSet16InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } extern "C" int artSet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint32_t new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int32_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { // Compiled code can't use transactional mode. field->Set32(obj, new_value); return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); { StackHandleScope<1> hs(self); HandleWrapper h_obj(hs.NewHandleWrapper(&obj)); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int32_t)); } - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, false); } else { @@ -556,25 +537,20 @@ extern "C" int artSet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } extern "C" int artSet64InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint64_t new_value, - Thread* self, StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - constexpr size_t frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsOnly); - mirror::ArtMethod* referrer = - reinterpret_cast*>( - reinterpret_cast(sp) + frame_size)->AsMirrorPtr(); + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int64_t)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { // Compiled code can't use transactional mode. field->Set64(obj, new_value); return 0; // success } - sp->Assign(Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsOnly)); - self->SetTopOfStack(sp, 0); field = FindFieldFromCode(field_idx, referrer, self, sizeof(int64_t)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, false); } else { @@ -588,21 +564,20 @@ extern "C" int artSet64InstanceFromCode(uint32_t field_idx, mirror::Object* obj, extern "C" int artSetObjInstanceFromCode(uint32_t field_idx, mirror::Object* obj, mirror::Object* new_value, - mirror::ArtMethod* referrer, Thread* self, - StackReference* sp) + mirror::ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstanceObjectWrite, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL && obj != NULL)) { + if (LIKELY(field != nullptr && obj != nullptr)) { // Compiled code can't use transactional mode. field->SetObj(obj, new_value); return 0; // success } - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); field = FindFieldFromCode(field_idx, referrer, self, sizeof(mirror::HeapReference)); - if (LIKELY(field != NULL)) { - if (UNLIKELY(obj == NULL)) { + if (LIKELY(field != nullptr)) { + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionForFieldAccess(throw_location, field, false); } else { diff --git a/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc b/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc index 06bbabcbe..e3365431c 100644 --- a/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc @@ -25,10 +25,9 @@ namespace art { * Handle fill array data by copying appropriate part of dex file into array. */ extern "C" int artHandleFillArrayDataFromCode(uint32_t payload_offset, mirror::Array* array, - mirror::ArtMethod* method, Thread* self, - StackReference* sp) + mirror::ArtMethod* method, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); + ScopedQuickEntrypointChecks sqec(self); const uint16_t* const insns = method->GetCodeItem()->insns_; const Instruction::ArrayDataPayload* payload = reinterpret_cast(insns + payload_offset); diff --git a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc index bb0e5e31c..6b3e9dc98 100644 --- a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc @@ -28,10 +28,9 @@ namespace art { extern "C" const void* artInstrumentationMethodEntryFromCode(mirror::ArtMethod* method, mirror::Object* this_object, Thread* self, - StackReference* sp, uintptr_t lr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); + ScopedQuickEntrypointChecks sqec(self); instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); const void* result; if (instrumentation->IsDeoptimized(method)) { @@ -52,23 +51,19 @@ extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self, uint64_t gpr_result, uint64_t fpr_result) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - // TODO: use FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly) not the hand inlined below. - // We use the hand inline version to ensure the return_pc is assigned before verifying the - // stack. - // Be aware the store below may well stomp on an incoming argument. - Locks::mutator_lock_->AssertSharedHeld(self); - Runtime* runtime = Runtime::Current(); - sp->Assign(runtime->GetCalleeSaveMethod(Runtime::kRefsOnly)); - uint32_t return_pc_offset = GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsOnly); + // Compute address of return PC and sanity check that it currently holds 0. + uint32_t return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsOnly); uintptr_t* return_pc = reinterpret_cast(reinterpret_cast(sp) + return_pc_offset); CHECK_EQ(*return_pc, 0U); - self->SetTopOfStack(sp, 0); - self->VerifyStack(); + + // Pop the frame filling in the return pc. The low half of the return value is 0 when + // deoptimization shouldn't be performed with the high-half having the return address. When + // deoptimization should be performed the low half is zero and the high-half the address of the + // deoptimization entry point. instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); TwoWordReturn return_or_deoptimize_pc = instrumentation->PopInstrumentationStackFrame( self, return_pc, gpr_result, fpr_result); - self->VerifyStack(); return return_or_deoptimize_pc; } diff --git a/runtime/entrypoints/quick/quick_lock_entrypoints.cc b/runtime/entrypoints/quick/quick_lock_entrypoints.cc index 92c0841dd..8ceac971e 100644 --- a/runtime/entrypoints/quick/quick_lock_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_lock_entrypoints.cc @@ -20,12 +20,11 @@ namespace art { -extern "C" int artLockObjectFromCode(mirror::Object* obj, Thread* self, - StackReference* sp) +extern "C" int artLockObjectFromCode(mirror::Object* obj, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) NO_THREAD_SAFETY_ANALYSIS /* EXCLUSIVE_LOCK_FUNCTION(Monitor::monitor_lock_) */ { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); - if (UNLIKELY(obj == NULL)) { + ScopedQuickEntrypointChecks sqec(self); + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location(self->GetCurrentLocationForThrow()); ThrowNullPointerException(&throw_location, "Null reference used for synchronization (monitor-enter)"); @@ -43,12 +42,11 @@ extern "C" int artLockObjectFromCode(mirror::Object* obj, Thread* self, } } -extern "C" int artUnlockObjectFromCode(mirror::Object* obj, Thread* self, - StackReference* sp) +extern "C" int artUnlockObjectFromCode(mirror::Object* obj, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) NO_THREAD_SAFETY_ANALYSIS /* UNLOCK_FUNCTION(Monitor::monitor_lock_) */ { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); - if (UNLIKELY(obj == NULL)) { + ScopedQuickEntrypointChecks sqec(self); + if (UNLIKELY(obj == nullptr)) { ThrowLocation throw_location(self->GetCurrentLocationForThrow()); ThrowNullPointerException(&throw_location, "Null reference used for synchronization (monitor-exit)"); diff --git a/runtime/entrypoints/quick/quick_thread_entrypoints.cc b/runtime/entrypoints/quick/quick_thread_entrypoints.cc index ea75fb6c4..87e0c6eec 100644 --- a/runtime/entrypoints/quick/quick_thread_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_thread_entrypoints.cc @@ -19,10 +19,9 @@ namespace art { -extern "C" void artTestSuspendFromCode(Thread* self, StackReference* sp) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { +extern "C" void artTestSuspendFromCode(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Called when suspend count check value is 0 and thread->suspend_count_ != 0 - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); + ScopedQuickEntrypointChecks sqec(self); self->CheckSuspend(); } diff --git a/runtime/entrypoints/quick/quick_throw_entrypoints.cc b/runtime/entrypoints/quick/quick_throw_entrypoints.cc index 13decc806..25df40b6c 100644 --- a/runtime/entrypoints/quick/quick_throw_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_throw_entrypoints.cc @@ -24,16 +24,14 @@ namespace art { // Deliver an exception that's pending on thread helping set up a callee save frame on the way. -extern "C" void artDeliverPendingExceptionFromCode(Thread* thread, - StackReference* sp) +extern "C" void artDeliverPendingExceptionFromCode(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(thread, sp, Runtime::kSaveAll); - thread->QuickDeliverException(); + ScopedQuickEntrypointChecks sqec(self); + self->QuickDeliverException(); } // Called by generated call to throw an exception. -extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread* self, - StackReference* sp) +extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { /* * exception may be NULL, in which case this routine should @@ -42,9 +40,9 @@ extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread * and threw a NPE if NULL. This routine responsible for setting * exception_ in thread and delivering the exception. */ - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); ThrowLocation throw_location = self->GetCurrentLocationForThrow(); - if (exception == NULL) { + if (exception == nullptr) { self->ThrowNewException(throw_location, "Ljava/lang/NullPointerException;", "throw with null exception"); } else { @@ -54,10 +52,9 @@ extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread } // Called by generated call to throw a NPE exception. -extern "C" void artThrowNullPointerExceptionFromCode(Thread* self, - StackReference* sp) +extern "C" void artThrowNullPointerExceptionFromCode(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); self->NoteSignalBeingHandled(); ThrowLocation throw_location = self->GetCurrentLocationForThrow(); ThrowNullPointerExceptionFromDexPC(throw_location); @@ -66,52 +63,50 @@ extern "C" void artThrowNullPointerExceptionFromCode(Thread* self, } // Called by generated call to throw an arithmetic divide by zero exception. -extern "C" void artThrowDivZeroFromCode(Thread* self, StackReference* sp) +extern "C" void artThrowDivZeroFromCode(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); ThrowArithmeticExceptionDivideByZero(); self->QuickDeliverException(); } // Called by generated call to throw an array index out of bounds exception. -extern "C" void artThrowArrayBoundsFromCode(int index, int length, Thread* self, - StackReference*sp) +extern "C" void artThrowArrayBoundsFromCode(int index, int length, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); ThrowArrayIndexOutOfBoundsException(index, length); self->QuickDeliverException(); } -extern "C" void artThrowStackOverflowFromCode(Thread* self, StackReference* sp) +extern "C" void artThrowStackOverflowFromCode(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); self->NoteSignalBeingHandled(); ThrowStackOverflowError(self); self->NoteSignalHandlerDone(); self->QuickDeliverException(); } -extern "C" void artThrowNoSuchMethodFromCode(int32_t method_idx, Thread* self, - StackReference* sp) +extern "C" void artThrowNoSuchMethodFromCode(int32_t method_idx, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); ThrowNoSuchMethodError(method_idx); self->QuickDeliverException(); } extern "C" void artThrowClassCastException(mirror::Class* dest_type, mirror::Class* src_type, - Thread* self, StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); - CHECK(!dest_type->IsAssignableFrom(src_type)); + ScopedQuickEntrypointChecks sqec(self); + DCHECK(!dest_type->IsAssignableFrom(src_type)); ThrowClassCastException(dest_type, src_type); self->QuickDeliverException(); } extern "C" void artThrowArrayStoreException(mirror::Object* array, mirror::Object* value, - Thread* self, StackReference* sp) + Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); + ScopedQuickEntrypointChecks sqec(self); ThrowArrayStoreException(value->GetClass(), array->GetClass()); self->QuickDeliverException(); } diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 224756bbc..3695ff11d 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -466,7 +466,7 @@ extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Threa SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Ensure we don't get thread suspension until the object arguments are safely in the shadow // frame. - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); + ScopedQuickEntrypointChecks sqec(self); if (method->IsAbstract()) { ThrowAbstractMethodError(method); @@ -593,7 +593,6 @@ extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments"); // Register the top of the managed stack, making stack crawlable. DCHECK_EQ(sp->AsMirrorPtr(), proxy_method) << PrettyMethod(proxy_method); - self->SetTopOfStack(sp, 0); DCHECK_EQ(proxy_method->GetFrameSizeInBytes(), Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes()) << PrettyMethod(proxy_method); @@ -678,7 +677,7 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, Thread* self, StackReference* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); + ScopedQuickEntrypointChecks sqec(self); // Start new JNI local reference state JNIEnvExt* env = self->GetJniEnv(); ScopedObjectAccessUnchecked soa(env); @@ -1216,6 +1215,7 @@ class ComputeNativeCallFrameSize { Primitive::Type cur_type_ = Primitive::GetType(shorty[i]); switch (cur_type_) { case Primitive::kPrimNot: + // TODO: fix abuse of mirror types. sm.AdvanceHandleScope( reinterpret_cast(0x12345678)); break; @@ -1609,13 +1609,13 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, uint32_t shorty_len = 0; const char* shorty = called->GetShorty(&shorty_len); - // Run the visitor. + // Run the visitor and update sp. BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp); visitor.VisitArguments(); visitor.FinalizeHandleScope(self); // Fix up managed-stack things in Thread. - self->SetTopOfStack(sp, 0); + self->SetTopOfStack(sp); self->VerifyStack(); @@ -1744,10 +1744,11 @@ template static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, mirror::ArtMethod* caller_method, Thread* self, StackReference* sp) { + ScopedQuickEntrypointChecks sqec(self); + DCHECK_EQ(sp->AsMirrorPtr(), Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)); mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, type); if (UNLIKELY(method == nullptr)) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile(); uint32_t shorty_len; const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len); @@ -1852,21 +1853,20 @@ extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interfa Thread* self, StackReference* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ScopedQuickEntrypointChecks sqec(self); mirror::ArtMethod* method; if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) { method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method); if (UNLIKELY(method == NULL)) { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object, caller_method); return GetTwoWordFailureValue(); // Failure. } } else { - FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs); DCHECK(interface_method == Runtime::Current()->GetResolutionMethod()); // Find the caller PC. - constexpr size_t pc_offset = GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsAndArgs); + constexpr size_t pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsAndArgs); uintptr_t caller_pc = *reinterpret_cast(reinterpret_cast(sp) + pc_offset); // Map the caller PC to a dex PC. diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc index 41af88e63..a9af75401 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc @@ -98,11 +98,11 @@ TEST_F(QuickTrampolineEntrypointsTest, ReturnPC) { // Note: we can only check against the kRuntimeISA, because the ArtMethod computation uses // sizeof(void*), which is wrong when the target bitwidth is not the same as the host's. CheckPCOffset(kRuntimeISA, Runtime::kRefsAndArgs, - GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsAndArgs)); + GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsAndArgs)); CheckPCOffset(kRuntimeISA, Runtime::kRefsOnly, - GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsOnly)); + GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsOnly)); CheckPCOffset(kRuntimeISA, Runtime::kSaveAll, - GetCalleeSavePCOffset(kRuntimeISA, Runtime::kSaveAll)); + GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kSaveAll)); } } // namespace art diff --git a/runtime/exception_test.cc b/runtime/exception_test.cc index 1714134d2..1365cd4be 100644 --- a/runtime/exception_test.cc +++ b/runtime/exception_test.cc @@ -43,7 +43,7 @@ class ExceptionTest : public CommonRuntimeTest { Handle class_loader( hs.NewHandle(soa.Decode(LoadDex("ExceptionHandle")))); my_klass_ = class_linker_->FindClass(soa.Self(), "LExceptionHandle;", class_loader); - ASSERT_TRUE(my_klass_ != NULL); + ASSERT_TRUE(my_klass_ != nullptr); Handle klass(hs.NewHandle(my_klass_)); class_linker_->EnsureInitialized(soa.Self(), klass, true, true); my_klass_ = klass.Get(); @@ -93,12 +93,12 @@ class ExceptionTest : public CommonRuntimeTest { const uint8_t* code_ptr = &fake_header_code_and_maps_[mapping_table_offset]; method_f_ = my_klass_->FindVirtualMethod("f", "()I"); - ASSERT_TRUE(method_f_ != NULL); + ASSERT_TRUE(method_f_ != nullptr); method_f_->SetEntryPointFromQuickCompiledCode(code_ptr); method_f_->SetNativeGcMap(&fake_gc_map_[0]); method_g_ = my_klass_->FindVirtualMethod("g", "(I)V"); - ASSERT_TRUE(method_g_ != NULL); + ASSERT_TRUE(method_g_ != nullptr); method_g_->SetEntryPointFromQuickCompiledCode(code_ptr); method_g_->SetNativeGcMap(&fake_gc_map_[0]); } @@ -122,7 +122,7 @@ TEST_F(ExceptionTest, FindCatchHandler) { ScopedObjectAccess soa(Thread::Current()); const DexFile::CodeItem* code_item = dex_->GetCodeItem(method_f_->GetCodeItemOffset()); - ASSERT_TRUE(code_item != NULL); + ASSERT_TRUE(code_item != nullptr); ASSERT_EQ(2u, code_item->tries_size_); ASSERT_NE(0u, code_item->insns_size_in_code_units_); @@ -163,14 +163,30 @@ TEST_F(ExceptionTest, StackTraceElement) { ScopedObjectAccess soa(env); std::vector fake_stack; + Runtime* r = Runtime::Current(); + r->SetInstructionSet(kRuntimeISA); + mirror::ArtMethod* save_method = r->CreateCalleeSaveMethod(Runtime::kSaveAll); + r->SetCalleeSaveMethod(save_method, Runtime::kSaveAll); + QuickMethodFrameInfo frame_info = save_method->GetQuickFrameInfo(); + ASSERT_EQ(kStackAlignment, 16U); // ASSERT_EQ(sizeof(uintptr_t), sizeof(uint32_t)); + if (!kUsePortableCompiler) { - // Create two fake stack frames with mapping data created in SetUp. We map offset 3 in the code - // to dex pc 3. + // Create three fake stack frames with mapping data created in SetUp. We map offset 3 in the + // code to dex pc 3. const uint32_t dex_pc = 3; + // Create the stack frame for the callee save method, expected by the runtime. + fake_stack.push_back(reinterpret_cast(save_method)); + for (size_t i = 0; i < frame_info.FrameSizeInBytes() - 2 * sizeof(uintptr_t); + i += sizeof(uintptr_t)) { + fake_stack.push_back(0); + } + + fake_stack.push_back(method_g_->ToNativeQuickPc(dex_pc)); // return pc + // Create/push fake 16byte stack frame for method g fake_stack.push_back(reinterpret_cast(method_g_)); fake_stack.push_back(0); @@ -183,7 +199,7 @@ TEST_F(ExceptionTest, StackTraceElement) { fake_stack.push_back(0); fake_stack.push_back(0xEBAD6070); // return pc - // Pull Method* of NULL to terminate the trace + // Push Method* of NULL to terminate the trace fake_stack.push_back(0); // Push null values which will become null incoming arguments. @@ -192,9 +208,7 @@ TEST_F(ExceptionTest, StackTraceElement) { fake_stack.push_back(0); // Set up thread to appear as if we called out of method_g_ at pc dex 3 - thread->SetTopOfStack( - reinterpret_cast*>(&fake_stack[0]), - method_g_->ToNativeQuickPc(dex_pc)); // return pc + thread->SetTopOfStack(reinterpret_cast*>(&fake_stack[0])); } else { // Create/push fake 20-byte shadow frame for method g fake_stack.push_back(0); @@ -215,33 +229,35 @@ TEST_F(ExceptionTest, StackTraceElement) { } jobject internal = thread->CreateInternalStackTrace(soa); - ASSERT_TRUE(internal != NULL); + ASSERT_TRUE(internal != nullptr); jobjectArray ste_array = Thread::InternalStackTraceToStackTraceElementArray(soa, internal); - ASSERT_TRUE(ste_array != NULL); + ASSERT_TRUE(ste_array != nullptr); mirror::ObjectArray* trace_array = soa.Decode*>(ste_array); - ASSERT_TRUE(trace_array != NULL); - ASSERT_TRUE(trace_array->Get(0) != NULL); + ASSERT_TRUE(trace_array != nullptr); + ASSERT_TRUE(trace_array->Get(0) != nullptr); EXPECT_STREQ("ExceptionHandle", trace_array->Get(0)->GetDeclaringClass()->ToModifiedUtf8().c_str()); - EXPECT_STREQ("ExceptionHandle.java", trace_array->Get(0)->GetFileName()->ToModifiedUtf8().c_str()); + EXPECT_STREQ("ExceptionHandle.java", + trace_array->Get(0)->GetFileName()->ToModifiedUtf8().c_str()); EXPECT_STREQ("g", trace_array->Get(0)->GetMethodName()->ToModifiedUtf8().c_str()); EXPECT_EQ(37, trace_array->Get(0)->GetLineNumber()); - ASSERT_TRUE(trace_array->Get(1) != NULL); + ASSERT_TRUE(trace_array->Get(1) != nullptr); EXPECT_STREQ("ExceptionHandle", trace_array->Get(1)->GetDeclaringClass()->ToModifiedUtf8().c_str()); - EXPECT_STREQ("ExceptionHandle.java", trace_array->Get(1)->GetFileName()->ToModifiedUtf8().c_str()); + EXPECT_STREQ("ExceptionHandle.java", + trace_array->Get(1)->GetFileName()->ToModifiedUtf8().c_str()); EXPECT_STREQ("f", trace_array->Get(1)->GetMethodName()->ToModifiedUtf8().c_str()); EXPECT_EQ(22, trace_array->Get(1)->GetLineNumber()); -#if !defined(ART_USE_PORTABLE_COMPILER) - thread->SetTopOfStack(NULL, 0); // Disarm the assertion that no code is running when we detach. -#else - thread->PopShadowFrame(); - thread->PopShadowFrame(); -#endif + if (!kUsePortableCompiler) { + thread->SetTopOfStack(nullptr); // Disarm the assertion that no code is running when we detach. + } else { + thread->PopShadowFrame(); + thread->PopShadowFrame(); + } } } // namespace art diff --git a/runtime/fault_handler.cc b/runtime/fault_handler.cc index 62e06090b..4ae929b0c 100644 --- a/runtime/fault_handler.cc +++ b/runtime/fault_handler.cc @@ -400,7 +400,7 @@ bool JavaStackTraceHandler::Action(int sig, siginfo_t* siginfo, void* context) { // Inside of generated code, sp[0] is the method, so sp is the frame. StackReference* frame = reinterpret_cast*>(sp); - self->SetTopOfStack(frame, 0); // Since we don't necessarily have a dex pc, pass in 0. + self->SetTopOfStack(frame); #ifdef TEST_NESTED_SIGNAL // To test the nested signal handler we raise a signal here. This will cause the // nested signal handler to be called and perform a longjmp back to the setjmp diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc index 6c6058f81..adbece0e1 100644 --- a/runtime/instrumentation.cc +++ b/runtime/instrumentation.cc @@ -1016,6 +1016,7 @@ TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintpt // Set return PC and check the sanity of the stack. *return_pc = instrumentation_frame.return_pc_; CheckStackDepth(self, instrumentation_frame, 0); + self->VerifyStack(); mirror::ArtMethod* method = instrumentation_frame.method_; uint32_t length; diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc index 5c72e5567..9584d155c 100644 --- a/runtime/mirror/art_method.cc +++ b/runtime/mirror/art_method.cc @@ -315,12 +315,12 @@ void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) { bool ArtMethod::IsEntrypointInterpreter() { ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this); - const void* oat_portable_code = class_linker->GetOatMethodPortableCodeFor(this); if (!IsPortableCompiled()) { // Quick. + const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this); return oat_quick_code == nullptr || oat_quick_code != GetEntryPointFromQuickCompiledCode(); } else { // Portable. + const void* oat_portable_code = class_linker->GetOatMethodPortableCodeFor(this); return oat_portable_code == nullptr || oat_portable_code != GetEntryPointFromPortableCompiledCode(); } @@ -414,7 +414,7 @@ void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* // stack. Continue execution in the interpreter. self->ClearException(); ShadowFrame* shadow_frame = self->GetAndClearDeoptimizationShadowFrame(result); - self->SetTopOfStack(nullptr, 0); + self->SetTopOfStack(nullptr); self->SetTopOfShadowStack(shadow_frame); interpreter::EnterInterpreterFromDeoptimize(self, shadow_frame, result); } diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h index 1dbfe5de8..3b9201266 100644 --- a/runtime/mirror/art_method.h +++ b/runtime/mirror/art_method.h @@ -334,7 +334,9 @@ class MANAGED ArtMethod FINAL : public Object { ALWAYS_INLINE static const void* EntryPointToCodePointer(const void* entry_point) { uintptr_t code = reinterpret_cast(entry_point); - code &= ~0x1; // TODO: Make this Thumb2 specific. + // TODO: Make this Thumb2 specific. It is benign on other architectures as code is always at + // least 2 byte aligned. + code &= ~0x1; return reinterpret_cast(code); } diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc index a2a062617..a0aaa9e8c 100644 --- a/runtime/mirror/object_test.cc +++ b/runtime/mirror/object_test.cc @@ -80,26 +80,6 @@ TEST_F(ObjectTest, Constants) { EXPECT_EQ(kObjectHeaderSize, sizeof(Object)); } -// Keep the assembly code constats in sync. -TEST_F(ObjectTest, AsmConstants) { - EXPECT_EQ(CLASS_OFFSET, Object::ClassOffset().Int32Value()); - EXPECT_EQ(LOCK_WORD_OFFSET, Object::MonitorOffset().Int32Value()); - - EXPECT_EQ(CLASS_COMPONENT_TYPE_OFFSET, Class::ComponentTypeOffset().Int32Value()); - - EXPECT_EQ(ARRAY_LENGTH_OFFSET, Array::LengthOffset().Int32Value()); - EXPECT_EQ(OBJECT_ARRAY_DATA_OFFSET, Array::DataOffset(sizeof(HeapReference)).Int32Value()); - - EXPECT_EQ(STRING_VALUE_OFFSET, String::ValueOffset().Int32Value()); - EXPECT_EQ(STRING_COUNT_OFFSET, String::CountOffset().Int32Value()); - EXPECT_EQ(STRING_OFFSET_OFFSET, String::OffsetOffset().Int32Value()); - EXPECT_EQ(STRING_DATA_OFFSET, Array::DataOffset(sizeof(uint16_t)).Int32Value()); - - EXPECT_EQ(METHOD_DEX_CACHE_METHODS_OFFSET, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); - EXPECT_EQ(METHOD_PORTABLE_CODE_OFFSET, ArtMethod::EntryPointFromPortableCompiledCodeOffset().Int32Value()); - EXPECT_EQ(METHOD_QUICK_CODE_OFFSET, ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()); -} - TEST_F(ObjectTest, IsInSamePackage) { // Matches EXPECT_TRUE(Class::IsInSamePackage("Ljava/lang/Object;", "Ljava/lang/Class;")); diff --git a/runtime/oat.cc b/runtime/oat.cc index a237bf6a7..0d7fb0120 100644 --- a/runtime/oat.cc +++ b/runtime/oat.cc @@ -23,7 +23,7 @@ namespace art { const uint8_t OatHeader::kOatMagic[] = { 'o', 'a', 't', '\n' }; -const uint8_t OatHeader::kOatVersion[] = { '0', '4', '2', '\0' }; +const uint8_t OatHeader::kOatVersion[] = { '0', '4', '3', '\0' }; static size_t ComputeOatHeaderSize(const SafeMap* variable_data) { size_t estimate = 0U; diff --git a/runtime/runtime.cc b/runtime/runtime.cc index adf0994ac..62b065605 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -43,8 +43,9 @@ #include "arch/x86/registers_x86.h" #include "arch/x86_64/quick_method_frame_info_x86_64.h" #include "arch/x86_64/registers_x86_64.h" -#include "base/unix_file/fd_file.h" +#include "asm_support.h" #include "atomic.h" +#include "base/unix_file/fd_file.h" #include "class_linker.h" #include "debugger.h" #include "elf_file.h" @@ -146,6 +147,7 @@ Runtime::Runtime() implicit_null_checks_(false), implicit_so_checks_(false), implicit_suspend_checks_(false) { + CheckAsmSupportOffsetsAndSizes(); } Runtime::~Runtime() { diff --git a/runtime/stack.cc b/runtime/stack.cc index b4e85e2c2..0cdc984e8 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -526,7 +526,7 @@ void StackVisitor::WalkStack(bool include_transitions) { current_fragment = current_fragment->GetLink()) { cur_shadow_frame_ = current_fragment->GetTopShadowFrame(); cur_quick_frame_ = current_fragment->GetTopQuickFrame(); - cur_quick_frame_pc_ = current_fragment->GetTopQuickFramePc(); + cur_quick_frame_pc_ = 0; if (cur_quick_frame_ != NULL) { // Handle quick stack frames. // Can't be both a shadow and a quick fragment. diff --git a/runtime/stack.h b/runtime/stack.h index 25e50a1a1..2f8df6109 100644 --- a/runtime/stack.h +++ b/runtime/stack.h @@ -360,7 +360,7 @@ class ShadowFrame { class PACKED(4) ManagedStack { public: ManagedStack() - : link_(NULL), top_shadow_frame_(NULL), top_quick_frame_(NULL), top_quick_frame_pc_(0) {} + : top_quick_frame_(nullptr), link_(nullptr), top_shadow_frame_(nullptr) {} void PushManagedStackFragment(ManagedStack* fragment) { // Copy this top fragment into given fragment. @@ -386,29 +386,16 @@ class PACKED(4) ManagedStack { } void SetTopQuickFrame(StackReference* top) { - DCHECK(top_shadow_frame_ == NULL); + DCHECK(top_shadow_frame_ == nullptr); top_quick_frame_ = top; } - uintptr_t GetTopQuickFramePc() const { - return top_quick_frame_pc_; - } - - void SetTopQuickFramePc(uintptr_t pc) { - DCHECK(top_shadow_frame_ == NULL); - top_quick_frame_pc_ = pc; - } - static size_t TopQuickFrameOffset() { return OFFSETOF_MEMBER(ManagedStack, top_quick_frame_); } - static size_t TopQuickFramePcOffset() { - return OFFSETOF_MEMBER(ManagedStack, top_quick_frame_pc_); - } - ShadowFrame* PushShadowFrame(ShadowFrame* new_top_frame) { - DCHECK(top_quick_frame_ == NULL); + DCHECK(top_quick_frame_ == nullptr); ShadowFrame* old_frame = top_shadow_frame_; top_shadow_frame_ = new_top_frame; new_top_frame->SetLink(old_frame); @@ -416,8 +403,8 @@ class PACKED(4) ManagedStack { } ShadowFrame* PopShadowFrame() { - DCHECK(top_quick_frame_ == NULL); - CHECK(top_shadow_frame_ != NULL); + DCHECK(top_quick_frame_ == nullptr); + CHECK(top_shadow_frame_ != nullptr); ShadowFrame* frame = top_shadow_frame_; top_shadow_frame_ = frame->GetLink(); return frame; @@ -428,7 +415,7 @@ class PACKED(4) ManagedStack { } void SetTopShadowFrame(ShadowFrame* top) { - DCHECK(top_quick_frame_ == NULL); + DCHECK(top_quick_frame_ == nullptr); top_shadow_frame_ = top; } @@ -441,10 +428,9 @@ class PACKED(4) ManagedStack { bool ShadowFramesContain(StackReference* shadow_frame_entry) const; private: + StackReference* top_quick_frame_; ManagedStack* link_; ShadowFrame* top_shadow_frame_; - StackReference* top_quick_frame_; - uintptr_t top_quick_frame_pc_; }; class StackVisitor { diff --git a/runtime/thread.cc b/runtime/thread.cc index efe27eefd..83c4e0365 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -1812,7 +1812,6 @@ void Thread::DumpThreadOffset(std::ostream& os, uint32_t offset) { DO_THREAD_OFFSET(StackEndOffset(), "stack_end") DO_THREAD_OFFSET(ThinLockIdOffset(), "thin_lock_thread_id") DO_THREAD_OFFSET(TopOfManagedStackOffset(), "top_quick_frame_method") - DO_THREAD_OFFSET(TopOfManagedStackPcOffset(), "top_quick_frame_pc") DO_THREAD_OFFSET(TopShadowFrameOffset(), "top_shadow_frame") DO_THREAD_OFFSET(TopHandleScopeOffset(), "top_handle_scope") DO_THREAD_OFFSET(ThreadSuspendTriggerOffset(), "suspend_trigger") diff --git a/runtime/thread.h b/runtime/thread.h index 32ed758ef..694dbda97 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -365,9 +365,8 @@ class Thread { ThrowLocation GetCurrentLocationForThrow() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - void SetTopOfStack(StackReference* top_method, uintptr_t pc) { + void SetTopOfStack(StackReference* top_method) { tlsPtr_.managed_stack.SetTopQuickFrame(top_method); - tlsPtr_.managed_stack.SetTopQuickFramePc(pc); } void SetTopOfShadowStack(ShadowFrame* top) { @@ -637,13 +636,6 @@ class Thread { ManagedStack::TopQuickFrameOffset()); } - template - static ThreadOffset TopOfManagedStackPcOffset() { - return ThreadOffsetFromTlsPtr( - OFFSETOF_MEMBER(tls_ptr_sized_values, managed_stack) + - ManagedStack::TopQuickFramePcOffset()); - } - const ManagedStack* GetManagedStack() const { return &tlsPtr_.managed_stack; } -- 2.11.0