From 367f3dd32454858b8b25d87feb8f6599d3b4c9dd Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 1 Sep 2016 17:00:24 -0700 Subject: [PATCH] jni: Add @CriticalNative optimization to speed up JNI transitions Change-Id: I963059ac3a72dd8e6a867596c356d7062deb6da7 --- compiler/compiler.h | 5 +- compiler/driver/compiler_driver.cc | 17 +- compiler/jni/jni_cfi_test.cc | 7 +- compiler/jni/jni_compiler_test.cc | 1035 ++++++++++++++------ compiler/jni/quick/arm/calling_convention_arm.cc | 206 +++- compiler/jni/quick/arm/calling_convention_arm.h | 5 +- .../jni/quick/arm64/calling_convention_arm64.cc | 68 +- .../jni/quick/arm64/calling_convention_arm64.h | 5 +- compiler/jni/quick/calling_convention.cc | 238 +++-- compiler/jni/quick/calling_convention.h | 63 +- compiler/jni/quick/jni_compiler.cc | 471 +++++---- compiler/jni/quick/x86/calling_convention_x86.cc | 47 +- compiler/jni/quick/x86/calling_convention_x86.h | 6 +- .../jni/quick/x86_64/calling_convention_x86_64.cc | 79 +- .../jni/quick/x86_64/calling_convention_x86_64.h | 5 +- compiler/utils/arm/jni_macro_assembler_arm.cc | 18 +- compiler/utils/assembler_thumb_test.cc | 8 +- runtime/art_method.cc | 18 +- runtime/art_method.h | 6 + runtime/common_runtime_test.cc | 4 + runtime/common_runtime_test.h | 1 + runtime/well_known_classes.cc | 3 + runtime/well_known_classes.h | 1 + test/004-JniTest/jni_test.cc | 11 +- test/004-JniTest/src/Main.java | 18 + test/MyClassNatives/MyClassNatives.java | 237 ++++- 26 files changed, 1934 insertions(+), 648 deletions(-) diff --git a/compiler/compiler.h b/compiler/compiler.h index 9e5fb8312..ed42958a7 100644 --- a/compiler/compiler.h +++ b/compiler/compiler.h @@ -39,8 +39,9 @@ class Compiler { }; enum JniOptimizationFlags { - kNone, - kFastNative, + kNone = 0x0, + kFastNative = 0x1, + kCriticalNative = 0x2, }; static Compiler* Create(CompilerDriver* driver, Kind kind); diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index b5bc2fb11..daac7fbb9 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -616,17 +616,22 @@ static void CompileMethod(Thread* self, /* referrer */ nullptr, invoke_type); - bool fast_native = false; - if (LIKELY(method != nullptr)) { - fast_native = method->IsAnnotatedWithFastNative(); - } else { + // Query any JNI optimization annotations such as @FastNative or @CriticalNative. + Compiler::JniOptimizationFlags optimization_flags = Compiler::kNone; + if (UNLIKELY(method == nullptr)) { // Failed method resolutions happen very rarely, e.g. ancestor class cannot be resolved. DCHECK(self->IsExceptionPending()); self->ClearException(); + } else if (method->IsAnnotatedWithFastNative()) { + // TODO: Will no longer need this CHECK once we have verifier checking this. + CHECK(!method->IsAnnotatedWithCriticalNative()); + optimization_flags = Compiler::kFastNative; + } else if (method->IsAnnotatedWithCriticalNative()) { + // TODO: Will no longer need this CHECK once we have verifier checking this. + CHECK(!method->IsAnnotatedWithFastNative()); + optimization_flags = Compiler::kCriticalNative; } - Compiler::JniOptimizationFlags optimization_flags = - fast_native ? Compiler::kFastNative : Compiler::kNone; compiled_method = driver->GetCompiler()->JniCompile(access_flags, method_idx, dex_file, diff --git a/compiler/jni/jni_cfi_test.cc b/compiler/jni/jni_cfi_test.cc index 4b056f552..28b7290be 100644 --- a/compiler/jni/jni_cfi_test.cc +++ b/compiler/jni/jni_cfi_test.cc @@ -64,7 +64,12 @@ class JNICFITest : public CFITest { ArenaAllocator arena(&pool); std::unique_ptr jni_conv( - JniCallingConvention::Create(&arena, is_static, is_synchronized, shorty, isa)); + JniCallingConvention::Create(&arena, + is_static, + is_synchronized, + /*is_critical_native*/false, + shorty, + isa)); std::unique_ptr mr_conv( ManagedRuntimeCallingConvention::Create(&arena, is_static, is_synchronized, shorty, isa)); const int frame_size(jni_conv->FrameSize()); diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc index b83985a77..cdd4c6847 100644 --- a/compiler/jni/jni_compiler_test.cc +++ b/compiler/jni/jni_compiler_test.cc @@ -15,12 +15,14 @@ */ #include +#include #include #include "art_method-inl.h" #include "class_linker.h" #include "common_compiler_test.h" +#include "compiler.h" #include "dex_file.h" #include "gtest/gtest.h" #include "indirect_reference_table.h" @@ -47,6 +49,171 @@ extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_sbar(JNIEnv*, jclass, jint namespace art { +enum class JniKind { + kNormal = Compiler::kNone, // Regular kind of un-annotated natives. + kFast = Compiler::kFastNative, // Native method annotated with @FastNative. + kCritical = Compiler::kCriticalNative, // Native method annotated with @CriticalNative. + kCount = Compiler::kCriticalNative + 1 // How many different types of JNIs we can have. +}; + +// Used to initialize array sizes that want to have different state per current jni. +static constexpr size_t kJniKindCount = static_cast(JniKind::kCount); +// Do not use directly, use the helpers instead. +uint32_t gCurrentJni = static_cast(JniKind::kNormal); + +// Is the current native method under test @CriticalNative? +static bool IsCurrentJniCritical() { + return gCurrentJni == static_cast(JniKind::kCritical); +} + +// Is the current native method a plain-old non-annotated native? +static bool IsCurrentJniNormal() { + return gCurrentJni == static_cast(JniKind::kNormal); +} + +// Signifify that a different kind of JNI is about to be tested. +static void UpdateCurrentJni(JniKind kind) { + gCurrentJni = static_cast(kind); +} + +// (Match the name suffixes of native methods in MyClassNatives.java) +static std::string CurrentJniStringSuffix() { + switch (gCurrentJni) { + case static_cast(JniKind::kNormal): { + return ""; + } + case static_cast(JniKind::kFast): { + return "_Fast"; + } + case static_cast(JniKind::kCritical): { + return "_Critical"; + } + default: + LOG(FATAL) << "Invalid current JNI value: " << gCurrentJni; + UNREACHABLE(); + } +} + +// Dummy values passed to our JNI handlers when we enter @CriticalNative. +// Normally @CriticalNative calling convention strips out the "JNIEnv*, jclass" parameters. +// However to avoid duplicating every single test method we have a templated handler +// that inserts dummy parameters (0,1) to make it compatible with a regular JNI handler. +static JNIEnv* const kCriticalDummyJniEnv = reinterpret_cast(0xDEADFEAD); +static jclass const kCriticalDummyJniClass = reinterpret_cast(0xBEAFBEEF); + +// Type trait. Returns true if "T" is the same type as one of the types in Args... +// +// Logically equal to OR(std::same_type for all U in Args). +template +struct is_any_of; + +template +struct is_any_of { + using value_type = bool; + static constexpr const bool value = std::is_same::value || is_any_of::value; +}; + +template +struct is_any_of { + using value_type = bool; + static constexpr const bool value = std::is_same::value; +}; + +// Type traits for JNI types. +template +struct jni_type_traits { + // True if type T ends up holding an object reference. False otherwise. + // (Non-JNI types will also be false). + static constexpr const bool is_ref = + is_any_of::value; +}; + +template +struct count_refs_helper { + using value_type = size_t; + static constexpr const size_t value = 0; +}; + +template +struct count_refs_helper { + using value_type = size_t; + static constexpr size_t value = + (jni_type_traits::is_ref ? 1 : 0) + count_refs_helper::value; +}; + +template +struct count_refs_fn_helper; + +template +struct count_refs_fn_helper : public count_refs_helper {}; + +// Given a function type 'T' figure out how many of the parameter types are a reference. +// -- The implicit jclass and thisObject also count as 1 reference. +// +// Fields: +// * value - the result counting # of refs +// * value_type - the type of value (size_t) +template +struct count_refs : public count_refs_fn_helper {}; + +// Base case: No parameters = 0 refs. +size_t count_nonnull_refs_helper() { + return 0; +} + +// SFINAE for ref types. 1 if non-null, 0 otherwise. +template +size_t count_nonnull_refs_single_helper(T arg, + typename std::enable_if::is_ref>::type* + = nullptr) { + return ((arg == NULL) ? 0 : 1); +} + +// SFINAE for non-ref-types. Always 0. +template +size_t count_nonnull_refs_single_helper(T arg ATTRIBUTE_UNUSED, + typename std::enable_if::is_ref>::type* + = nullptr) { + return 0; +} + +// Recursive case. +template +size_t count_nonnull_refs_helper(T arg, Args ... args) { + return count_nonnull_refs_single_helper(arg) + count_nonnull_refs_helper(args...); +} + +// Given any list of parameters, check how many object refs there are and only count +// them if their runtime value is non-null. +// +// For example given (jobject, jint, jclass) we can get (2) if both #0/#2 are non-null, +// (1) if either #0/#2 are null but not both, and (0) if all parameters are null. +// Primitive parameters (including JNIEnv*, if present) are ignored. +template +size_t count_nonnull_refs(Args ... args) { + return count_nonnull_refs_helper(args...); +} + +template +struct remove_extra_parameters_helper; + +template +struct remove_extra_parameters_helper { + // Note: Do not use Args&& here to maintain C-style parameter types. + static R apply(Args... args) { + JNIEnv* env = kCriticalDummyJniEnv; + jclass kls = kCriticalDummyJniClass; + return fn(env, kls, args...); + } +}; + +// Given a function 'fn' create a function 'apply' which will omit the JNIEnv/jklass parameters +// +// i.e. if fn(JNIEnv*,jklass,a,b,c,d,e...) then apply(a,b,c,d,e,...) +template +struct jni_remove_extra_parameters : public remove_extra_parameters_helper {}; + class JniCompilerTest : public CommonCompilerTest { protected: void SetUp() OVERRIDE { @@ -63,8 +230,11 @@ class JniCompilerTest : public CommonCompilerTest { check_generic_jni_ = generic; } - void CompileForTest(jobject class_loader, bool direct, - const char* method_name, const char* method_sig) { + private: + void CompileForTest(jobject class_loader, + bool direct, + const char* method_name, + const char* method_sig) { ScopedObjectAccess soa(Thread::Current()); StackHandleScope<1> hs(soa.Self()); Handle loader( @@ -87,8 +257,28 @@ class JniCompilerTest : public CommonCompilerTest { } } - void SetUpForTest(bool direct, const char* method_name, const char* method_sig, + protected: + void CompileForTestWithCurrentJni(jobject class_loader, + bool direct, + const char* method_name_orig, + const char* method_sig) { + // Append the JNI kind to the method name, so that we automatically get the + // fast or critical versions of the same method. + std::string method_name_str = std::string(method_name_orig) + CurrentJniStringSuffix(); + const char* method_name = method_name_str.c_str(); + + CompileForTest(class_loader, direct, method_name, method_sig); + } + + void SetUpForTest(bool direct, + const char* method_name_orig, + const char* method_sig, void* native_fnptr) { + // Append the JNI kind to the method name, so that we automatically get the + // fast or critical versions of the same method. + std::string method_name_str = std::string(method_name_orig) + CurrentJniStringSuffix(); + const char* method_name = method_name_str.c_str(); + // Initialize class loader and compile method when runtime not started. if (!runtime_->IsStarted()) { { @@ -129,6 +319,7 @@ class JniCompilerTest : public CommonCompilerTest { } public: + // Available as statics so our JNI handlers can access these. static jclass jklass_; static jobject jobj_; static jobject class_loader_; @@ -151,6 +342,8 @@ class JniCompilerTest : public CommonCompilerTest { void RunStaticReturnTrueImpl(); void RunStaticReturnFalseImpl(); void RunGenericStaticReturnIntImpl(); + void RunGenericStaticReturnDoubleImpl(); + void RunGenericStaticReturnLongImpl(); void CompileAndRunStaticIntObjectObjectMethodImpl(); void CompileAndRunStaticSynchronizedIntObjectObjectMethodImpl(); void ExceptionHandlingImpl(); @@ -177,10 +370,13 @@ class JniCompilerTest : public CommonCompilerTest { void NormalNativeImpl(); void FastNativeImpl(); + void CriticalNativeImpl(); JNIEnv* env_; jstring library_search_path_; jmethodID jmethod_; + + private: bool check_generic_jni_; }; @@ -188,46 +384,238 @@ jclass JniCompilerTest::jklass_; jobject JniCompilerTest::jobj_; jobject JniCompilerTest::class_loader_; -#define JNI_TEST(TestName) \ +// Test the normal compiler and normal generic JNI only. +// The following features are unsupported in @FastNative: +// 1) JNI stubs (lookup via dlsym) when methods aren't explicitly registered +// 2) Returning objects from the JNI function +// 3) synchronized keyword +// -- TODO: We can support (1) if we remove the mutator lock assert during stub lookup. +# define JNI_TEST_NORMAL_ONLY(TestName) \ TEST_F(JniCompilerTest, TestName ## Default) { \ + SCOPED_TRACE("Normal JNI with compiler"); \ + gCurrentJni = static_cast(JniKind::kNormal); \ TestName ## Impl(); \ } \ - \ TEST_F(JniCompilerTest, TestName ## Generic) { \ + SCOPED_TRACE("Normal JNI with generic"); \ + gCurrentJni = static_cast(JniKind::kNormal); \ TEST_DISABLED_FOR_MIPS(); \ SetCheckGenericJni(true); \ TestName ## Impl(); \ } -int gJava_MyClassNatives_foo_calls = 0; -void Java_MyClassNatives_foo(JNIEnv* env, jobject thisObj) { - // 1 = thisObj - EXPECT_EQ(kNative, Thread::Current()->GetState()); - Locks::mutator_lock_->AssertNotHeld(Thread::Current()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_foo_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +// Test normal compiler, @FastNative compiler, and normal/@FastNative generic for normal natives. +#define JNI_TEST(TestName) \ + JNI_TEST_NORMAL_ONLY(TestName) \ + TEST_F(JniCompilerTest, TestName ## Fast) { \ + SCOPED_TRACE("@FastNative JNI with compiler"); \ + gCurrentJni = static_cast(JniKind::kFast); \ + TestName ## Impl(); \ + } \ + \ + +// TODO: maybe. @FastNative generic JNI support? +#if 0 + TEST_F(JniCompilerTest, TestName ## FastGeneric) { \ + gCurrentJni = static_cast(JniKind::kFast); \ + TEST_DISABLED_FOR_MIPS(); \ + SetCheckGenericJni(true); \ + TestName ## Impl(); \ + } +#endif + +#define JNI_TEST_CRITICAL_ONLY(TestName) \ + TEST_F(JniCompilerTest, TestName ## DefaultCritical) { \ + SCOPED_TRACE("@CriticalNative JNI with compiler"); \ + gCurrentJni = static_cast(JniKind::kCritical); \ + TestName ## Impl(); \ + } + +// Test everything above and also the @CriticalNative compiler, and @CriticalNative generic JNI. +#define JNI_TEST_CRITICAL(TestName) \ + JNI_TEST(TestName) \ + JNI_TEST_CRITICAL_ONLY(TestName) \ + +// TODO: maybe, more likely since calling convention changed. @Criticalnative generic JNI support? +#if 0 + TEST_F(JniCompilerTest, TestName ## GenericCritical) { \ + gCurrentJni = static_cast(JniKind::kCritical); \ + TestName ## Impl(); \ + } +#endif + +static void expectValidThreadState() { + // Normal JNI always transitions to "Native". Other JNIs stay in the "Runnable" state. + if (IsCurrentJniNormal()) { + EXPECT_EQ(kNative, Thread::Current()->GetState()); + } else { + EXPECT_EQ(kRunnable, Thread::Current()->GetState()); + } +} + +#define EXPECT_THREAD_STATE_FOR_CURRENT_JNI() expectValidThreadState() + +static void expectValidMutatorLockHeld() { + if (IsCurrentJniNormal()) { + Locks::mutator_lock_->AssertNotHeld(Thread::Current()); + } else { + Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); + } +} + +#define EXPECT_MUTATOR_LOCK_FOR_CURRENT_JNI() expectValidMutatorLockHeld() + +static void expectValidJniEnvAndObject(JNIEnv* env, jobject thisObj) { + if (!IsCurrentJniCritical()) { + EXPECT_EQ(Thread::Current()->GetJniEnv(), env); + ASSERT_TRUE(thisObj != nullptr); + EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); + } else { + LOG(FATAL) << "Objects are not supported for @CriticalNative, why is this being tested?"; + UNREACHABLE(); + } +} + +// Validates the JNIEnv to be the same as the current thread's JNIEnv, and makes sure +// that the object here is an instance of the class we registered the method with. +// +// Hard-fails if this somehow gets invoked for @CriticalNative since objects are unsupported. +#define EXPECT_JNI_ENV_AND_OBJECT_FOR_CURRENT_JNI(env, thisObj) \ + expectValidJniEnvAndObject(env, thisObj) + +static void expectValidJniEnvAndClass(JNIEnv* env, jclass kls) { + if (!IsCurrentJniCritical()) { + EXPECT_EQ(Thread::Current()->GetJniEnv(), env); + ASSERT_TRUE(kls != nullptr); + EXPECT_TRUE(env->IsSameObject(static_cast(JniCompilerTest::jklass_), + static_cast(kls))); + } else { + // This is pretty much vacuously true but catch any testing setup mistakes. + EXPECT_EQ(env, kCriticalDummyJniEnv); + EXPECT_EQ(kls, kCriticalDummyJniClass); + } +} + +// Validates the JNIEnv is the same as the current thread's JNIenv, and makes sure +// that the jclass we got in the JNI handler is the same one as the class the method was looked +// up for. +// +// (Checks are skipped for @CriticalNative since the two values are dummy). +#define EXPECT_JNI_ENV_AND_CLASS_FOR_CURRENT_JNI(env, kls) expectValidJniEnvAndClass(env, kls) + +// Temporarily disable the EXPECT_NUM_STACK_REFERENCES check (for a single test). +struct ScopedDisableCheckNumStackReferences { + ScopedDisableCheckNumStackReferences() { + sCheckNumStackReferences = false; + } + + ~ScopedDisableCheckNumStackReferences() { + sCheckNumStackReferences = true; + } + + static bool sCheckNumStackReferences; +}; + +bool ScopedDisableCheckNumStackReferences::sCheckNumStackReferences = true; + +static void expectNumStackReferences(size_t val1, size_t val2) { + // In rare cases when JNI functions call themselves recursively, + // disable this test because it will have a false negative. + if (!IsCurrentJniCritical() && ScopedDisableCheckNumStackReferences::sCheckNumStackReferences) { + /* @CriticalNative doesn't build a HandleScope, so this test is meaningless then. */ + ScopedObjectAccess soa(Thread::Current()); + + size_t actual_num = Thread::Current()->NumStackReferences(); + // XX: Not too sure what's going on. + // Sometimes null references get placed and sometimes they don't? + EXPECT_TRUE(val1 == actual_num || val2 == actual_num) + << "expected either " << val1 << " or " << val2 + << " number of stack references, but got: " << actual_num; + } +} + +#define EXPECT_NUM_STACK_REFERENCES(val1, val2) expectNumStackReferences(val1, val2) + +template +struct make_jni_test_decorator; + +// Decorator for "static" JNI callbacks. +template +struct make_jni_test_decorator { + static R apply(JNIEnv* env, jclass kls, Args ... args) { + EXPECT_THREAD_STATE_FOR_CURRENT_JNI(); + EXPECT_MUTATOR_LOCK_FOR_CURRENT_JNI(); + EXPECT_JNI_ENV_AND_CLASS_FOR_CURRENT_JNI(env, kls); + // All incoming parameters + the jclass get put into the transition's StackHandleScope. + EXPECT_NUM_STACK_REFERENCES(count_nonnull_refs(kls, args...), + (count_refs_helper::value)); + + return fn(env, kls, args...); + } +}; + +// Decorator for instance JNI callbacks. +template +struct make_jni_test_decorator { + static R apply(JNIEnv* env, jobject thisObj, Args ... args) { + EXPECT_THREAD_STATE_FOR_CURRENT_JNI(); + EXPECT_MUTATOR_LOCK_FOR_CURRENT_JNI(); + EXPECT_JNI_ENV_AND_OBJECT_FOR_CURRENT_JNI(env, thisObj); + // All incoming parameters + the implicit 'this' get put into the transition's StackHandleScope. + EXPECT_NUM_STACK_REFERENCES(count_nonnull_refs(thisObj, args...), + (count_refs_helper::value)); + + return fn(env, thisObj, args...); + } +}; + +// Decorate the regular JNI callee with the extra gtest checks. +// This way we can have common test logic for everything generic like checking if a lock is held, +// checking handle scope state, etc. +#define MAKE_JNI_TEST_DECORATOR(fn) make_jni_test_decorator::apply + +// Convert function f(JNIEnv*,jclass,a,b,c,d...) into f2(a,b,c,d...) +// -- This way we don't have to write out each implementation twice for @CriticalNative. +#define JNI_CRITICAL_WRAPPER(func) jni_remove_extra_parameters::apply +// Get a function pointer whose calling convention either matches a regular native +// or a critical native depending on which kind of jni is currently under test. +// -- This also has the benefit of genering a compile time error if the 'func' doesn't properly +// have JNIEnv and jclass parameters first. +#define CURRENT_JNI_WRAPPER(func) \ + (IsCurrentJniCritical() \ + ? reinterpret_cast(&JNI_CRITICAL_WRAPPER(MAKE_JNI_TEST_DECORATOR(func))) \ + : reinterpret_cast(&MAKE_JNI_TEST_DECORATOR(func))) + +// Do the opposite of the above. Do *not* wrap the function, instead just cast it to a void*. +// Only for "TEST_JNI_NORMAL_ONLY" configs, and it inserts a test assert to ensure this is the case. +#define NORMAL_JNI_ONLY_NOWRAP(func) \ + ({ ASSERT_TRUE(IsCurrentJniNormal()); reinterpret_cast(&(func)); }) +// Same as above, but with nullptr. When we want to test the stub functionality. +#define NORMAL_JNI_ONLY_NULLPTR \ + ({ ASSERT_TRUE(IsCurrentJniNormal()); nullptr; }) + + +int gJava_MyClassNatives_foo_calls[kJniKindCount] = {}; +void Java_MyClassNatives_foo(JNIEnv*, jobject) { + gJava_MyClassNatives_foo_calls[gCurrentJni]++; } void JniCompilerTest::CompileAndRunNoArgMethodImpl() { - SetUpForTest(false, "foo", "()V", reinterpret_cast(&Java_MyClassNatives_foo)); + SetUpForTest(false, "foo", "()V", CURRENT_JNI_WRAPPER(Java_MyClassNatives_foo)); - EXPECT_EQ(0, gJava_MyClassNatives_foo_calls); + EXPECT_EQ(0, gJava_MyClassNatives_foo_calls[gCurrentJni]); env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_); - EXPECT_EQ(1, gJava_MyClassNatives_foo_calls); + EXPECT_EQ(1, gJava_MyClassNatives_foo_calls[gCurrentJni]); env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_); - EXPECT_EQ(2, gJava_MyClassNatives_foo_calls); + EXPECT_EQ(2, gJava_MyClassNatives_foo_calls[gCurrentJni]); - gJava_MyClassNatives_foo_calls = 0; + gJava_MyClassNatives_foo_calls[gCurrentJni] = 0; } JNI_TEST(CompileAndRunNoArgMethod) void JniCompilerTest::CompileAndRunIntMethodThroughStubImpl() { - SetUpForTest(false, "bar", "(I)I", nullptr); + SetUpForTest(false, "bar", "(I)I", NORMAL_JNI_ONLY_NULLPTR); // calling through stub will link with &Java_MyClassNatives_bar std::string reason; @@ -239,10 +627,11 @@ void JniCompilerTest::CompileAndRunIntMethodThroughStubImpl() { EXPECT_EQ(25, result); } -JNI_TEST(CompileAndRunIntMethodThroughStub) +// TODO: Support @FastNative and @CriticalNative through stubs. +JNI_TEST_NORMAL_ONLY(CompileAndRunIntMethodThroughStub) void JniCompilerTest::CompileAndRunStaticIntMethodThroughStubImpl() { - SetUpForTest(true, "sbar", "(I)I", nullptr); + SetUpForTest(true, "sbar", "(I)I", NORMAL_JNI_ONLY_NULLPTR); // calling through stub will link with &Java_MyClassNatives_sbar std::string reason; @@ -254,174 +643,131 @@ void JniCompilerTest::CompileAndRunStaticIntMethodThroughStubImpl() { EXPECT_EQ(43, result); } -JNI_TEST(CompileAndRunStaticIntMethodThroughStub) +// TODO: Support @FastNative and @CriticalNative through stubs. +JNI_TEST_NORMAL_ONLY(CompileAndRunStaticIntMethodThroughStub) -int gJava_MyClassNatives_fooI_calls = 0; -jint Java_MyClassNatives_fooI(JNIEnv* env, jobject thisObj, jint x) { - // 1 = thisObj - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_fooI_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooI_calls[kJniKindCount] = {}; +jint Java_MyClassNatives_fooI(JNIEnv*, jobject, jint x) { + gJava_MyClassNatives_fooI_calls[gCurrentJni]++; return x; } void JniCompilerTest::CompileAndRunIntMethodImpl() { SetUpForTest(false, "fooI", "(I)I", - reinterpret_cast(&Java_MyClassNatives_fooI)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooI)); - EXPECT_EQ(0, gJava_MyClassNatives_fooI_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooI_calls[gCurrentJni]); jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 42); EXPECT_EQ(42, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooI_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooI_calls[gCurrentJni]); result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 0xCAFED00D); EXPECT_EQ(static_cast(0xCAFED00D), result); - EXPECT_EQ(2, gJava_MyClassNatives_fooI_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooI_calls[gCurrentJni]); - gJava_MyClassNatives_fooI_calls = 0; + gJava_MyClassNatives_fooI_calls[gCurrentJni] = 0; } JNI_TEST(CompileAndRunIntMethod) -int gJava_MyClassNatives_fooII_calls = 0; -jint Java_MyClassNatives_fooII(JNIEnv* env, jobject thisObj, jint x, jint y) { - // 1 = thisObj - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_fooII_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooII_calls[kJniKindCount] = {}; +jint Java_MyClassNatives_fooII(JNIEnv*, jobject, jint x, jint y) { + gJava_MyClassNatives_fooII_calls[gCurrentJni]++; return x - y; // non-commutative operator } void JniCompilerTest::CompileAndRunIntIntMethodImpl() { SetUpForTest(false, "fooII", "(II)I", - reinterpret_cast(&Java_MyClassNatives_fooII)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooII)); - EXPECT_EQ(0, gJava_MyClassNatives_fooII_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooII_calls[gCurrentJni]); jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 99, 10); EXPECT_EQ(99 - 10, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooII_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooII_calls[gCurrentJni]); result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 0xCAFEBABE, 0xCAFED00D); EXPECT_EQ(static_cast(0xCAFEBABE - 0xCAFED00D), result); - EXPECT_EQ(2, gJava_MyClassNatives_fooII_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooII_calls[gCurrentJni]); - gJava_MyClassNatives_fooII_calls = 0; + gJava_MyClassNatives_fooII_calls[gCurrentJni] = 0; } JNI_TEST(CompileAndRunIntIntMethod) -int gJava_MyClassNatives_fooJJ_calls = 0; -jlong Java_MyClassNatives_fooJJ(JNIEnv* env, jobject thisObj, jlong x, jlong y) { - // 1 = thisObj - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_fooJJ_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooJJ_calls[kJniKindCount] = {}; +jlong Java_MyClassNatives_fooJJ(JNIEnv*, jobject, jlong x, jlong y) { + gJava_MyClassNatives_fooJJ_calls[gCurrentJni]++; return x - y; // non-commutative operator } void JniCompilerTest::CompileAndRunLongLongMethodImpl() { SetUpForTest(false, "fooJJ", "(JJ)J", - reinterpret_cast(&Java_MyClassNatives_fooJJ)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooJJ)); - EXPECT_EQ(0, gJava_MyClassNatives_fooJJ_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooJJ_calls[gCurrentJni]); jlong a = INT64_C(0x1234567890ABCDEF); jlong b = INT64_C(0xFEDCBA0987654321); jlong result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b); EXPECT_EQ(a - b, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_calls[gCurrentJni]); result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, b, a); EXPECT_EQ(b - a, result); - EXPECT_EQ(2, gJava_MyClassNatives_fooJJ_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooJJ_calls[gCurrentJni]); - gJava_MyClassNatives_fooJJ_calls = 0; + gJava_MyClassNatives_fooJJ_calls[gCurrentJni] = 0; } JNI_TEST(CompileAndRunLongLongMethod) -int gJava_MyClassNatives_fooDD_calls = 0; -jdouble Java_MyClassNatives_fooDD(JNIEnv* env, jobject thisObj, jdouble x, jdouble y) { - // 1 = thisObj - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_fooDD_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooDD_calls[kJniKindCount] = {}; +jdouble Java_MyClassNatives_fooDD(JNIEnv*, jobject, jdouble x, jdouble y) { + gJava_MyClassNatives_fooDD_calls[gCurrentJni]++; return x - y; // non-commutative operator } void JniCompilerTest::CompileAndRunDoubleDoubleMethodImpl() { SetUpForTest(false, "fooDD", "(DD)D", - reinterpret_cast(&Java_MyClassNatives_fooDD)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooDD)); - EXPECT_EQ(0, gJava_MyClassNatives_fooDD_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooDD_calls[gCurrentJni]); jdouble result = env_->CallNonvirtualDoubleMethod(jobj_, jklass_, jmethod_, 99.0, 10.0); EXPECT_DOUBLE_EQ(99.0 - 10.0, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooDD_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooDD_calls[gCurrentJni]); jdouble a = 3.14159265358979323846; jdouble b = 0.69314718055994530942; result = env_->CallNonvirtualDoubleMethod(jobj_, jklass_, jmethod_, a, b); EXPECT_DOUBLE_EQ(a - b, result); - EXPECT_EQ(2, gJava_MyClassNatives_fooDD_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooDD_calls[gCurrentJni]); - gJava_MyClassNatives_fooDD_calls = 0; + gJava_MyClassNatives_fooDD_calls[gCurrentJni] = 0; } -int gJava_MyClassNatives_fooJJ_synchronized_calls = 0; -jlong Java_MyClassNatives_fooJJ_synchronized(JNIEnv* env, jobject thisObj, jlong x, jlong y) { - // 1 = thisObj - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_fooJJ_synchronized_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooJJ_synchronized_calls[kJniKindCount] = {}; +jlong Java_MyClassNatives_fooJJ_synchronized(JNIEnv*, jobject, jlong x, jlong y) { + gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]++; return x | y; } void JniCompilerTest::CompileAndRun_fooJJ_synchronizedImpl() { SetUpForTest(false, "fooJJ_synchronized", "(JJ)J", - reinterpret_cast(&Java_MyClassNatives_fooJJ_synchronized)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooJJ_synchronized)); - EXPECT_EQ(0, gJava_MyClassNatives_fooJJ_synchronized_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]); jlong a = 0x1000000020000000ULL; jlong b = 0x00ff000000aa0000ULL; jlong result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b); EXPECT_EQ(a | b, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_synchronized_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]); - gJava_MyClassNatives_fooJJ_synchronized_calls = 0; + gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni] = 0; } -JNI_TEST(CompileAndRun_fooJJ_synchronized) +JNI_TEST_NORMAL_ONLY(CompileAndRun_fooJJ_synchronized) -int gJava_MyClassNatives_fooIOO_calls = 0; -jobject Java_MyClassNatives_fooIOO(JNIEnv* env, jobject thisObj, jint x, jobject y, +int gJava_MyClassNatives_fooIOO_calls[kJniKindCount] = {}; +jobject Java_MyClassNatives_fooIOO(JNIEnv*, jobject thisObj, jint x, jobject y, jobject z) { - // 3 = this + y + z - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - gJava_MyClassNatives_fooIOO_calls++; - ScopedObjectAccess soa(Thread::Current()); - size_t null_args = (y == nullptr ? 1 : 0) + (z == nullptr ? 1 : 0); - EXPECT_TRUE(3U == Thread::Current()->NumStackReferences() || - (3U - null_args) == Thread::Current()->NumStackReferences()); + gJava_MyClassNatives_fooIOO_calls[gCurrentJni]++; switch (x) { case 1: return y; @@ -435,96 +781,89 @@ jobject Java_MyClassNatives_fooIOO(JNIEnv* env, jobject thisObj, jint x, jobject void JniCompilerTest::CompileAndRunIntObjectObjectMethodImpl() { SetUpForTest(false, "fooIOO", "(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;", - reinterpret_cast(&Java_MyClassNatives_fooIOO)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooIOO)); - EXPECT_EQ(0, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); jobject result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, nullptr, nullptr); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(1, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, nullptr, jklass_); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(2, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, nullptr, jklass_); EXPECT_TRUE(env_->IsSameObject(nullptr, result)); - EXPECT_EQ(3, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(3, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, nullptr, jklass_); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(4, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(4, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, jklass_, nullptr); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(5, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(5, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, jklass_, nullptr); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(6, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(6, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, jklass_, nullptr); EXPECT_TRUE(env_->IsSameObject(nullptr, result)); - EXPECT_EQ(7, gJava_MyClassNatives_fooIOO_calls); + EXPECT_EQ(7, gJava_MyClassNatives_fooIOO_calls[gCurrentJni]); - gJava_MyClassNatives_fooIOO_calls = 0; + gJava_MyClassNatives_fooIOO_calls[gCurrentJni] = 0; } -JNI_TEST(CompileAndRunIntObjectObjectMethod) +// TODO: Maybe. @FastNative support for returning Objects? +JNI_TEST_NORMAL_ONLY(CompileAndRunIntObjectObjectMethod) -int gJava_MyClassNatives_fooSII_calls = 0; -jint Java_MyClassNatives_fooSII(JNIEnv* env, jclass klass, jint x, jint y) { - // 1 = klass - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(klass != nullptr); - EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass)); - gJava_MyClassNatives_fooSII_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooSII_calls[kJniKindCount] = {}; +jint Java_MyClassNatives_fooSII(JNIEnv* env ATTRIBUTE_UNUSED, + jclass klass ATTRIBUTE_UNUSED, + jint x, + jint y) { + gJava_MyClassNatives_fooSII_calls[gCurrentJni]++; return x + y; } void JniCompilerTest::CompileAndRunStaticIntIntMethodImpl() { SetUpForTest(true, "fooSII", "(II)I", - reinterpret_cast(&Java_MyClassNatives_fooSII)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooSII)); - EXPECT_EQ(0, gJava_MyClassNatives_fooSII_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooSII_calls[gCurrentJni]); jint result = env_->CallStaticIntMethod(jklass_, jmethod_, 20, 30); EXPECT_EQ(50, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooSII_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooSII_calls[gCurrentJni]); - gJava_MyClassNatives_fooSII_calls = 0; + gJava_MyClassNatives_fooSII_calls[gCurrentJni] = 0; } -JNI_TEST(CompileAndRunStaticIntIntMethod) +JNI_TEST_CRITICAL(CompileAndRunStaticIntIntMethod) -int gJava_MyClassNatives_fooSDD_calls = 0; -jdouble Java_MyClassNatives_fooSDD(JNIEnv* env, jclass klass, jdouble x, jdouble y) { - // 1 = klass - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(klass != nullptr); - EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass)); - gJava_MyClassNatives_fooSDD_calls++; - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooSDD_calls[kJniKindCount] = {}; +jdouble Java_MyClassNatives_fooSDD(JNIEnv* env ATTRIBUTE_UNUSED, + jclass klass ATTRIBUTE_UNUSED, + jdouble x, + jdouble y) { + gJava_MyClassNatives_fooSDD_calls[gCurrentJni]++; return x - y; // non-commutative operator } void JniCompilerTest::CompileAndRunStaticDoubleDoubleMethodImpl() { SetUpForTest(true, "fooSDD", "(DD)D", - reinterpret_cast(&Java_MyClassNatives_fooSDD)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooSDD)); - EXPECT_EQ(0, gJava_MyClassNatives_fooSDD_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooSDD_calls[gCurrentJni]); jdouble result = env_->CallStaticDoubleMethod(jklass_, jmethod_, 99.0, 10.0); EXPECT_DOUBLE_EQ(99.0 - 10.0, result); - EXPECT_EQ(1, gJava_MyClassNatives_fooSDD_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooSDD_calls[gCurrentJni]); jdouble a = 3.14159265358979323846; jdouble b = 0.69314718055994530942; result = env_->CallStaticDoubleMethod(jklass_, jmethod_, a, b); EXPECT_DOUBLE_EQ(a - b, result); - EXPECT_DOUBLE_EQ(2, gJava_MyClassNatives_fooSDD_calls); + EXPECT_DOUBLE_EQ(2, gJava_MyClassNatives_fooSDD_calls[gCurrentJni]); - gJava_MyClassNatives_fooSDD_calls = 0; + gJava_MyClassNatives_fooSDD_calls[gCurrentJni] = 0; } -JNI_TEST(CompileAndRunStaticDoubleDoubleMethod) +JNI_TEST_CRITICAL(CompileAndRunStaticDoubleDoubleMethod) // The x86 generic JNI code had a bug where it assumed a floating // point return value would be in xmm0. We use log, to somehow ensure @@ -534,27 +873,47 @@ jdouble Java_MyClassNatives_logD(JNIEnv*, jclass, jdouble x) { return log(x); } +jdouble Java_MyClassNatives_logD_notNormal(JNIEnv*, jclass, jdouble x) { + EXPECT_DOUBLE_EQ(2.0, x); + return log(x); +} + void JniCompilerTest::RunStaticLogDoubleMethodImpl() { - SetUpForTest(true, "logD", "(D)D", reinterpret_cast(&Java_MyClassNatives_logD)); + void* jni_handler; + if (IsCurrentJniNormal()) { + // This test seems a bit special, don't use a JNI wrapper here. + jni_handler = NORMAL_JNI_ONLY_NOWRAP(Java_MyClassNatives_logD); + } else { + jni_handler = CURRENT_JNI_WRAPPER(Java_MyClassNatives_logD_notNormal); + } + SetUpForTest(true, "logD", "(D)D", jni_handler); jdouble result = env_->CallStaticDoubleMethod(jklass_, jmethod_, 2.0); EXPECT_DOUBLE_EQ(log(2.0), result); } -JNI_TEST(RunStaticLogDoubleMethod) +JNI_TEST_CRITICAL(RunStaticLogDoubleMethod) jfloat Java_MyClassNatives_logF(JNIEnv*, jclass, jfloat x) { return logf(x); } void JniCompilerTest::RunStaticLogFloatMethodImpl() { - SetUpForTest(true, "logF", "(F)F", reinterpret_cast(&Java_MyClassNatives_logF)); + void* jni_handler; + if (IsCurrentJniNormal()) { + // This test seems a bit special, don't use a JNI wrapper here. + jni_handler = NORMAL_JNI_ONLY_NOWRAP(Java_MyClassNatives_logF); + } else { + jni_handler = CURRENT_JNI_WRAPPER(Java_MyClassNatives_logF); + } + + SetUpForTest(true, "logF", "(F)F", jni_handler); jfloat result = env_->CallStaticFloatMethod(jklass_, jmethod_, 2.0); EXPECT_FLOAT_EQ(logf(2.0), result); } -JNI_TEST(RunStaticLogFloatMethod) +JNI_TEST_CRITICAL(RunStaticLogFloatMethod) jboolean Java_MyClassNatives_returnTrue(JNIEnv*, jclass) { return JNI_TRUE; @@ -569,46 +928,67 @@ jint Java_MyClassNatives_returnInt(JNIEnv*, jclass) { } void JniCompilerTest::RunStaticReturnTrueImpl() { - SetUpForTest(true, "returnTrue", "()Z", reinterpret_cast(&Java_MyClassNatives_returnTrue)); + SetUpForTest(true, "returnTrue", "()Z", CURRENT_JNI_WRAPPER(Java_MyClassNatives_returnTrue)); jboolean result = env_->CallStaticBooleanMethod(jklass_, jmethod_); EXPECT_TRUE(result); } -JNI_TEST(RunStaticReturnTrue) +JNI_TEST_CRITICAL(RunStaticReturnTrue) void JniCompilerTest::RunStaticReturnFalseImpl() { SetUpForTest(true, "returnFalse", "()Z", - reinterpret_cast(&Java_MyClassNatives_returnFalse)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_returnFalse)); jboolean result = env_->CallStaticBooleanMethod(jklass_, jmethod_); EXPECT_FALSE(result); } -JNI_TEST(RunStaticReturnFalse) +JNI_TEST_CRITICAL(RunStaticReturnFalse) void JniCompilerTest::RunGenericStaticReturnIntImpl() { - SetUpForTest(true, "returnInt", "()I", reinterpret_cast(&Java_MyClassNatives_returnInt)); + SetUpForTest(true, "returnInt", "()I", CURRENT_JNI_WRAPPER(Java_MyClassNatives_returnInt)); jint result = env_->CallStaticIntMethod(jklass_, jmethod_); EXPECT_EQ(42, result); } -JNI_TEST(RunGenericStaticReturnInt) +JNI_TEST_CRITICAL(RunGenericStaticReturnInt) -int gJava_MyClassNatives_fooSIOO_calls = 0; -jobject Java_MyClassNatives_fooSIOO(JNIEnv* env, jclass klass, jint x, jobject y, - jobject z) { - // 3 = klass + y + z - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(klass != nullptr); - EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass)); - gJava_MyClassNatives_fooSIOO_calls++; - ScopedObjectAccess soa(Thread::Current()); - size_t null_args = (y == nullptr ? 1 : 0) + (z == nullptr ? 1 : 0); - EXPECT_TRUE(3U == Thread::Current()->NumStackReferences() || - (3U - null_args) == Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_returnDouble_calls[kJniKindCount] = {}; +jdouble Java_MyClassNatives_returnDouble(JNIEnv*, jclass) { + gJava_MyClassNatives_returnDouble_calls[gCurrentJni]++; + return 4.0; +} + +void JniCompilerTest::RunGenericStaticReturnDoubleImpl() { + SetUpForTest(true, "returnDouble", "()D", CURRENT_JNI_WRAPPER(Java_MyClassNatives_returnDouble)); + + jdouble result = env_->CallStaticDoubleMethod(jklass_, jmethod_); + EXPECT_DOUBLE_EQ(4.0, result); + EXPECT_EQ(1, gJava_MyClassNatives_returnDouble_calls[gCurrentJni]); + + gJava_MyClassNatives_returnDouble_calls[gCurrentJni] = 0; +} + +JNI_TEST_CRITICAL(RunGenericStaticReturnDouble) + +jlong Java_MyClassNatives_returnLong(JNIEnv*, jclass) { + return 0xFEEDDEADFEEDL; +} + +void JniCompilerTest::RunGenericStaticReturnLongImpl() { + SetUpForTest(true, "returnLong", "()J", CURRENT_JNI_WRAPPER(Java_MyClassNatives_returnLong)); + + jlong result = env_->CallStaticLongMethod(jklass_, jmethod_); + EXPECT_EQ(0xFEEDDEADFEEDL, result); +} + +JNI_TEST_CRITICAL(RunGenericStaticReturnLong) + +int gJava_MyClassNatives_fooSIOO_calls[kJniKindCount] = {}; +jobject Java_MyClassNatives_fooSIOO(JNIEnv*, jclass klass, jint x, jobject y, jobject z) { + gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]++; switch (x) { case 1: return y; @@ -619,54 +999,45 @@ jobject Java_MyClassNatives_fooSIOO(JNIEnv* env, jclass klass, jint x, jobject y } } - void JniCompilerTest::CompileAndRunStaticIntObjectObjectMethodImpl() { SetUpForTest(true, "fooSIOO", "(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;", - reinterpret_cast(&Java_MyClassNatives_fooSIOO)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooSIOO)); - EXPECT_EQ(0, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, nullptr); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(1, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, jobj_); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(2, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, nullptr, jobj_); EXPECT_TRUE(env_->IsSameObject(nullptr, result)); - EXPECT_EQ(3, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(3, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, nullptr, jobj_); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(4, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(4, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, nullptr); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(5, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(5, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, nullptr); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(6, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(6, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr); EXPECT_TRUE(env_->IsSameObject(nullptr, result)); - EXPECT_EQ(7, gJava_MyClassNatives_fooSIOO_calls); + EXPECT_EQ(7, gJava_MyClassNatives_fooSIOO_calls[gCurrentJni]); - gJava_MyClassNatives_fooSIOO_calls = 0; + gJava_MyClassNatives_fooSIOO_calls[gCurrentJni] = 0; } -JNI_TEST(CompileAndRunStaticIntObjectObjectMethod) +// TODO: Maybe. @FastNative support for returning Objects? +JNI_TEST_NORMAL_ONLY(CompileAndRunStaticIntObjectObjectMethod) -int gJava_MyClassNatives_fooSSIOO_calls = 0; -jobject Java_MyClassNatives_fooSSIOO(JNIEnv* env, jclass klass, jint x, jobject y, jobject z) { - // 3 = klass + y + z - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(klass != nullptr); - EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass)); - gJava_MyClassNatives_fooSSIOO_calls++; - ScopedObjectAccess soa(Thread::Current()); - size_t null_args = (y == nullptr ? 1 : 0) + (z == nullptr ? 1 : 0); - EXPECT_TRUE(3U == Thread::Current()->NumStackReferences() || - (3U - null_args) == Thread::Current()->NumStackReferences()); +int gJava_MyClassNatives_fooSSIOO_calls[kJniKindCount] = {}; +jobject Java_MyClassNatives_fooSSIOO(JNIEnv*, jclass klass, jint x, jobject y, jobject z) { + gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]++; switch (x) { case 1: return y; @@ -680,37 +1051,38 @@ jobject Java_MyClassNatives_fooSSIOO(JNIEnv* env, jclass klass, jint x, jobject void JniCompilerTest::CompileAndRunStaticSynchronizedIntObjectObjectMethodImpl() { SetUpForTest(true, "fooSSIOO", "(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;", - reinterpret_cast(&Java_MyClassNatives_fooSSIOO)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooSSIOO)); - EXPECT_EQ(0, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(0, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, nullptr); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(1, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(1, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, jobj_); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(2, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(2, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, nullptr, jobj_); EXPECT_TRUE(env_->IsSameObject(nullptr, result)); - EXPECT_EQ(3, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(3, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, nullptr, jobj_); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(4, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(4, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, nullptr); EXPECT_TRUE(env_->IsSameObject(jklass_, result)); - EXPECT_EQ(5, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(5, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, nullptr); EXPECT_TRUE(env_->IsSameObject(jobj_, result)); - EXPECT_EQ(6, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(6, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr); EXPECT_TRUE(env_->IsSameObject(nullptr, result)); - EXPECT_EQ(7, gJava_MyClassNatives_fooSSIOO_calls); + EXPECT_EQ(7, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); - gJava_MyClassNatives_fooSSIOO_calls = 0; + gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni] = 0; } -JNI_TEST(CompileAndRunStaticSynchronizedIntObjectObjectMethod) +// TODO: Maybe. @FastNative support for returning Objects? +JNI_TEST_NORMAL_ONLY(CompileAndRunStaticSynchronizedIntObjectObjectMethod) void Java_MyClassNatives_throwException(JNIEnv* env, jobject) { jclass c = env->FindClass("java/lang/RuntimeException"); @@ -724,30 +1096,30 @@ void JniCompilerTest::ExceptionHandlingImpl() { class_loader_ = LoadDex("MyClassNatives"); // all compilation needs to happen before Runtime::Start - CompileForTest(class_loader_, false, "foo", "()V"); - CompileForTest(class_loader_, false, "throwException", "()V"); - CompileForTest(class_loader_, false, "foo", "()V"); + CompileForTestWithCurrentJni(class_loader_, false, "foo", "()V"); + CompileForTestWithCurrentJni(class_loader_, false, "throwException", "()V"); + CompileForTestWithCurrentJni(class_loader_, false, "foo", "()V"); } // Start runtime to avoid re-initialization in SetupForTest. Thread::Current()->TransitionFromSuspendedToRunnable(); bool started = runtime_->Start(); CHECK(started); - gJava_MyClassNatives_foo_calls = 0; + gJava_MyClassNatives_foo_calls[gCurrentJni] = 0; // Check a single call of a JNI method is ok - SetUpForTest(false, "foo", "()V", reinterpret_cast(&Java_MyClassNatives_foo)); + SetUpForTest(false, "foo", "()V", CURRENT_JNI_WRAPPER(Java_MyClassNatives_foo)); env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_); - EXPECT_EQ(1, gJava_MyClassNatives_foo_calls); + EXPECT_EQ(1, gJava_MyClassNatives_foo_calls[gCurrentJni]); EXPECT_FALSE(Thread::Current()->IsExceptionPending()); // Get class for exception we expect to be thrown ScopedLocalRef jlre(env_, env_->FindClass("java/lang/RuntimeException")); SetUpForTest(false, "throwException", "()V", - reinterpret_cast(&Java_MyClassNatives_throwException)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_throwException)); // Call Java_MyClassNatives_throwException (JNI method that throws exception) env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_); - EXPECT_EQ(1, gJava_MyClassNatives_foo_calls); + EXPECT_EQ(1, gJava_MyClassNatives_foo_calls[gCurrentJni]); EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE); ScopedLocalRef exception(env_, env_->ExceptionOccurred()); env_->ExceptionClear(); @@ -756,9 +1128,9 @@ void JniCompilerTest::ExceptionHandlingImpl() { // Check a single call of a JNI method is ok SetUpForTest(false, "foo", "()V", reinterpret_cast(&Java_MyClassNatives_foo)); env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_); - EXPECT_EQ(2, gJava_MyClassNatives_foo_calls); + EXPECT_EQ(2, gJava_MyClassNatives_foo_calls[gCurrentJni]); - gJava_MyClassNatives_foo_calls = 0; + gJava_MyClassNatives_foo_calls[gCurrentJni] = 0; } JNI_TEST(ExceptionHandling) @@ -782,7 +1154,7 @@ jint Java_MyClassNatives_nativeUpCall(JNIEnv* env, jobject thisObj, jint i) { mirror::StackTraceElement* ste = trace_array->Get(j); EXPECT_STREQ("MyClassNatives.java", ste->GetFileName()->ToModifiedUtf8().c_str()); EXPECT_STREQ("MyClassNatives", ste->GetDeclaringClass()->ToModifiedUtf8().c_str()); - EXPECT_STREQ("fooI", ste->GetMethodName()->ToModifiedUtf8().c_str()); + EXPECT_EQ(("fooI" + CurrentJniStringSuffix()), ste->GetMethodName()->ToModifiedUtf8()); } // end recursion @@ -790,7 +1162,9 @@ jint Java_MyClassNatives_nativeUpCall(JNIEnv* env, jobject thisObj, jint i) { } else { jclass jklass = env->FindClass("MyClassNatives"); EXPECT_TRUE(jklass != nullptr); - jmethodID jmethod = env->GetMethodID(jklass, "fooI", "(I)I"); + jmethodID jmethod = env->GetMethodID(jklass, + ("fooI" + CurrentJniStringSuffix()).c_str(), + "(I)I"); EXPECT_TRUE(jmethod != nullptr); // Recurse with i - 1 @@ -803,8 +1177,13 @@ jint Java_MyClassNatives_nativeUpCall(JNIEnv* env, jobject thisObj, jint i) { void JniCompilerTest::NativeStackTraceElementImpl() { SetUpForTest(false, "fooI", "(I)I", - reinterpret_cast(&Java_MyClassNatives_nativeUpCall)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_nativeUpCall)); + + // Usual # local references on stack check fails because nativeUpCall calls itself recursively, + // each time the # of local references will therefore go up. + ScopedDisableCheckNumStackReferences disable_num_stack_check; jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 10); + EXPECT_EQ(10+9+8+7+6+5+4+3+2+1, result); } @@ -816,13 +1195,14 @@ jobject Java_MyClassNatives_fooO(JNIEnv* env, jobject, jobject x) { void JniCompilerTest::ReturnGlobalRefImpl() { SetUpForTest(false, "fooO", "(Ljava/lang/Object;)Ljava/lang/Object;", - reinterpret_cast(&Java_MyClassNatives_fooO)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fooO)); jobject result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, jobj_); EXPECT_EQ(JNILocalRefType, env_->GetObjectRefType(result)); EXPECT_TRUE(env_->IsSameObject(result, jobj_)); } -JNI_TEST(ReturnGlobalRef) +// TODO: Maybe. @FastNative support for returning objects? +JNI_TEST_NORMAL_ONLY(ReturnGlobalRef) jint local_ref_test(JNIEnv* env, jobject thisObj, jint x) { // Add 10 local references @@ -834,7 +1214,7 @@ jint local_ref_test(JNIEnv* env, jobject thisObj, jint x) { } void JniCompilerTest::LocalReferenceTableClearingTestImpl() { - SetUpForTest(false, "fooI", "(I)I", reinterpret_cast(&local_ref_test)); + SetUpForTest(false, "fooI", "(I)I", CURRENT_JNI_WRAPPER(local_ref_test)); // 1000 invocations of a method that adds 10 local references for (int i = 0; i < 1000; i++) { jint result = env_->CallIntMethod(jobj_, jmethod_, i); @@ -855,7 +1235,7 @@ void my_arraycopy(JNIEnv* env, jclass klass, jobject src, jint src_pos, jobject void JniCompilerTest::JavaLangSystemArrayCopyImpl() { SetUpForTest(true, "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", - reinterpret_cast(&my_arraycopy)); + CURRENT_JNI_WRAPPER(my_arraycopy)); env_->CallStaticVoidMethod(jklass_, jmethod_, jobj_, 1234, jklass_, 5678, 9876); } @@ -872,7 +1252,7 @@ jboolean my_casi(JNIEnv* env, jobject unsafe, jobject obj, jlong offset, jint ex void JniCompilerTest::CompareAndSwapIntImpl() { SetUpForTest(false, "compareAndSwapInt", "(Ljava/lang/Object;JII)Z", - reinterpret_cast(&my_casi)); + CURRENT_JNI_WRAPPER(my_casi)); jboolean result = env_->CallBooleanMethod(jobj_, jmethod_, jobj_, INT64_C(0x12345678ABCDEF88), 0xCAFEF00D, 0xEBADF00D); EXPECT_EQ(result, JNI_TRUE); @@ -891,7 +1271,7 @@ jint my_gettext(JNIEnv* env, jclass klass, jlong val1, jobject obj1, jlong val2, void JniCompilerTest::GetTextImpl() { SetUpForTest(true, "getText", "(JLjava/lang/Object;JLjava/lang/Object;)I", - reinterpret_cast(&my_gettext)); + CURRENT_JNI_WRAPPER(my_gettext)); jint result = env_->CallStaticIntMethod(jklass_, jmethod_, 0x12345678ABCDEF88ll, jobj_, INT64_C(0x7FEDCBA987654321), jobj_); EXPECT_EQ(result, 42); @@ -899,37 +1279,33 @@ void JniCompilerTest::GetTextImpl() { JNI_TEST(GetText) -int gJava_MyClassNatives_GetSinkProperties_calls = 0; -jarray Java_MyClassNatives_GetSinkProperties(JNIEnv* env, jobject thisObj, jstring s) { - // 1 = thisObj - Thread* self = Thread::Current(); - EXPECT_EQ(kNative, self->GetState()); - Locks::mutator_lock_->AssertNotHeld(self); - EXPECT_EQ(self->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); +int gJava_MyClassNatives_GetSinkProperties_calls[kJniKindCount] = {}; +jarray Java_MyClassNatives_GetSinkProperties(JNIEnv*, jobject thisObj, jstring s) { EXPECT_EQ(s, nullptr); - gJava_MyClassNatives_GetSinkProperties_calls++; + gJava_MyClassNatives_GetSinkProperties_calls[gCurrentJni]++; + + Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - EXPECT_EQ(2U, self->NumStackReferences()); EXPECT_TRUE(self->HoldsLock(soa.Decode(thisObj))); return nullptr; } void JniCompilerTest::GetSinkPropertiesNativeImpl() { SetUpForTest(false, "getSinkPropertiesNative", "(Ljava/lang/String;)[Ljava/lang/Object;", - reinterpret_cast(&Java_MyClassNatives_GetSinkProperties)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_GetSinkProperties)); - EXPECT_EQ(0, gJava_MyClassNatives_GetSinkProperties_calls); + EXPECT_EQ(0, gJava_MyClassNatives_GetSinkProperties_calls[gCurrentJni]); jarray result = down_cast( env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, nullptr)); EXPECT_EQ(nullptr, result); - EXPECT_EQ(1, gJava_MyClassNatives_GetSinkProperties_calls); + EXPECT_EQ(1, gJava_MyClassNatives_GetSinkProperties_calls[gCurrentJni]); - gJava_MyClassNatives_GetSinkProperties_calls = 0; + gJava_MyClassNatives_GetSinkProperties_calls[gCurrentJni] = 0; } -JNI_TEST(GetSinkPropertiesNative) +// @FastNative doesn't support 'synchronized' keyword and +// never will -- locking functions aren't fast. +JNI_TEST_NORMAL_ONLY(GetSinkPropertiesNative) // This should return jclass, but we're imitating a bug pattern. jobject Java_MyClassNatives_instanceMethodThatShouldReturnClass(JNIEnv* env, jobject) { @@ -943,39 +1319,59 @@ jobject Java_MyClassNatives_staticMethodThatShouldReturnClass(JNIEnv* env, jclas void JniCompilerTest::UpcallReturnTypeChecking_InstanceImpl() { SetUpForTest(false, "instanceMethodThatShouldReturnClass", "()Ljava/lang/Class;", - reinterpret_cast(&Java_MyClassNatives_instanceMethodThatShouldReturnClass)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_instanceMethodThatShouldReturnClass)); CheckJniAbortCatcher check_jni_abort_catcher; // This native method is bad, and tries to return a jstring as a jclass. env_->CallObjectMethod(jobj_, jmethod_); - check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.instanceMethodThatShouldReturnClass()"); + check_jni_abort_catcher.Check(std::string() + "attempt to return an instance " + + "of java.lang.String from java.lang.Class " + + "MyClassNatives.instanceMethodThatShouldReturnClass" + + CurrentJniStringSuffix() + "()"); // Here, we just call the method incorrectly; we should catch that too. env_->CallObjectMethod(jobj_, jmethod_); - check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.instanceMethodThatShouldReturnClass()"); + check_jni_abort_catcher.Check(std::string() + "attempt to return an instance " + + "of java.lang.String from java.lang.Class " + + "MyClassNatives.instanceMethodThatShouldReturnClass" + + CurrentJniStringSuffix() + "()"); env_->CallStaticObjectMethod(jklass_, jmethod_); - check_jni_abort_catcher.Check("calling non-static method java.lang.Class MyClassNatives.instanceMethodThatShouldReturnClass() with CallStaticObjectMethodV"); + check_jni_abort_catcher.Check(std::string() + "calling non-static method " + + "java.lang.Class " + + "MyClassNatives.instanceMethodThatShouldReturnClass" + + CurrentJniStringSuffix() + "() with CallStaticObjectMethodV"); } -JNI_TEST(UpcallReturnTypeChecking_Instance) +// TODO: Maybe support returning objects for @FastNative? +JNI_TEST_NORMAL_ONLY(UpcallReturnTypeChecking_Instance) void JniCompilerTest::UpcallReturnTypeChecking_StaticImpl() { SetUpForTest(true, "staticMethodThatShouldReturnClass", "()Ljava/lang/Class;", - reinterpret_cast(&Java_MyClassNatives_staticMethodThatShouldReturnClass)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_staticMethodThatShouldReturnClass)); CheckJniAbortCatcher check_jni_abort_catcher; // This native method is bad, and tries to return a jstring as a jclass. env_->CallStaticObjectMethod(jklass_, jmethod_); - check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.staticMethodThatShouldReturnClass()"); + check_jni_abort_catcher.Check(std::string() + "attempt to return an instance " + + "of java.lang.String from java.lang.Class " + + "MyClassNatives.staticMethodThatShouldReturnClass" + + CurrentJniStringSuffix() + "()"); // Here, we just call the method incorrectly; we should catch that too. env_->CallStaticObjectMethod(jklass_, jmethod_); - check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.staticMethodThatShouldReturnClass()"); + check_jni_abort_catcher.Check(std::string() + "attempt to return an instance " + + "of java.lang.String from java.lang.Class " + + "MyClassNatives.staticMethodThatShouldReturnClass" + + CurrentJniStringSuffix() + "()"); env_->CallObjectMethod(jobj_, jmethod_); - check_jni_abort_catcher.Check("calling static method java.lang.Class MyClassNatives.staticMethodThatShouldReturnClass() with CallObjectMethodV"); + check_jni_abort_catcher.Check(std::string() + "calling static method " + + "java.lang.Class " + + "MyClassNatives.staticMethodThatShouldReturnClass" + + CurrentJniStringSuffix() + "() with CallObjectMethodV"); } -JNI_TEST(UpcallReturnTypeChecking_Static) +// TODO: Maybe support returning objects for @FastNative? +JNI_TEST_NORMAL_ONLY(UpcallReturnTypeChecking_Static) // This should take jclass, but we're imitating a bug pattern. void Java_MyClassNatives_instanceMethodThatShouldTakeClass(JNIEnv*, jobject, jclass) { @@ -990,12 +1386,14 @@ void JniCompilerTest::UpcallArgumentTypeChecking_InstanceImpl() { ScopedLogSeverity sls(LogSeverity::FATAL); SetUpForTest(false, "instanceMethodThatShouldTakeClass", "(ILjava/lang/Class;)V", - reinterpret_cast(&Java_MyClassNatives_instanceMethodThatShouldTakeClass)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_instanceMethodThatShouldTakeClass)); CheckJniAbortCatcher check_jni_abort_catcher; // We deliberately pass a bad second argument here. env_->CallVoidMethod(jobj_, jmethod_, 123, env_->NewStringUTF("not a class!")); - check_jni_abort_catcher.Check("bad arguments passed to void MyClassNatives.instanceMethodThatShouldTakeClass(int, java.lang.Class)"); + check_jni_abort_catcher.Check(std::string() + "bad arguments passed to void " + + "MyClassNatives.instanceMethodThatShouldTakeClass" + + CurrentJniStringSuffix() + "(int, java.lang.Class)"); } JNI_TEST(UpcallArgumentTypeChecking_Instance) @@ -1005,29 +1403,25 @@ void JniCompilerTest::UpcallArgumentTypeChecking_StaticImpl() { ScopedLogSeverity sls(LogSeverity::FATAL); SetUpForTest(true, "staticMethodThatShouldTakeClass", "(ILjava/lang/Class;)V", - reinterpret_cast(&Java_MyClassNatives_staticMethodThatShouldTakeClass)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_staticMethodThatShouldTakeClass)); CheckJniAbortCatcher check_jni_abort_catcher; // We deliberately pass a bad second argument here. env_->CallStaticVoidMethod(jklass_, jmethod_, 123, env_->NewStringUTF("not a class!")); - check_jni_abort_catcher.Check("bad arguments passed to void MyClassNatives.staticMethodThatShouldTakeClass(int, java.lang.Class)"); + check_jni_abort_catcher.Check(std::string() + "bad arguments passed to void " + + "MyClassNatives.staticMethodThatShouldTakeClass" + + CurrentJniStringSuffix() + "(int, java.lang.Class)"); } JNI_TEST(UpcallArgumentTypeChecking_Static) -jfloat Java_MyClassNatives_checkFloats(JNIEnv* env, jobject thisObj, jfloat f1, jfloat f2) { - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - ScopedObjectAccess soa(Thread::Current()); - EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); +jfloat Java_MyClassNatives_checkFloats(JNIEnv*, jobject, jfloat f1, jfloat f2) { return f1 - f2; // non-commutative operator } void JniCompilerTest::CompileAndRunFloatFloatMethodImpl() { SetUpForTest(false, "checkFloats", "(FF)F", - reinterpret_cast(&Java_MyClassNatives_checkFloats)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_checkFloats)); jfloat result = env_->CallNonvirtualFloatMethod(jobj_, jklass_, jmethod_, 99.0F, 10.0F); @@ -1042,28 +1436,22 @@ JNI_TEST(CompileAndRunFloatFloatMethod) void Java_MyClassNatives_checkParameterAlign(JNIEnv* env ATTRIBUTE_UNUSED, jobject thisObj ATTRIBUTE_UNUSED, - jint i1 ATTRIBUTE_UNUSED, - jlong l1 ATTRIBUTE_UNUSED) { -// EXPECT_EQ(kNative, Thread::Current()->GetState()); -// EXPECT_EQ(Thread::Current()->GetJniEnv(), env); -// EXPECT_TRUE(thisObj != nullptr); -// EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); -// ScopedObjectAccess soa(Thread::Current()); -// EXPECT_EQ(1U, Thread::Current()->NumStackReferences()); + jint i1, + jlong l1) { EXPECT_EQ(i1, 1234); EXPECT_EQ(l1, INT64_C(0x12345678ABCDEF0)); } void JniCompilerTest::CheckParameterAlignImpl() { SetUpForTest(false, "checkParameterAlign", "(IJ)V", - reinterpret_cast(&Java_MyClassNatives_checkParameterAlign)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_checkParameterAlign)); env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_, 1234, INT64_C(0x12345678ABCDEF0)); } JNI_TEST(CheckParameterAlign) -void Java_MyClassNatives_maxParamNumber(JNIEnv* env, jobject thisObj, +void Java_MyClassNatives_maxParamNumber(JNIEnv* env, jobject, jobject o0, jobject o1, jobject o2, jobject o3, jobject o4, jobject o5, jobject o6, jobject o7, jobject o8, jobject o9, jobject o10, jobject o11, jobject o12, jobject o13, jobject o14, jobject o15, jobject o16, jobject o17, jobject o18, jobject o19, jobject o20, jobject o21, jobject o22, jobject o23, @@ -1096,13 +1484,6 @@ void Java_MyClassNatives_maxParamNumber(JNIEnv* env, jobject thisObj, jobject o232, jobject o233, jobject o234, jobject o235, jobject o236, jobject o237, jobject o238, jobject o239, jobject o240, jobject o241, jobject o242, jobject o243, jobject o244, jobject o245, jobject o246, jobject o247, jobject o248, jobject o249, jobject o250, jobject o251, jobject o252, jobject o253) { - EXPECT_EQ(kNative, Thread::Current()->GetState()); - EXPECT_EQ(Thread::Current()->GetJniEnv(), env); - EXPECT_TRUE(thisObj != nullptr); - EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_)); - ScopedObjectAccess soa(Thread::Current()); - EXPECT_GE(255U, Thread::Current()->NumStackReferences()); - // two tests possible if (o0 == nullptr) { // 1) everything is null @@ -1470,7 +1851,7 @@ const char* longSig = void JniCompilerTest::MaxParamNumberImpl() { SetUpForTest(false, "maxParamNumber", longSig, - reinterpret_cast(&Java_MyClassNatives_maxParamNumber)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_maxParamNumber)); jvalue args[254]; @@ -1497,7 +1878,7 @@ void JniCompilerTest::WithoutImplementationImpl() { // This will lead to error messages in the log. ScopedLogSeverity sls(LogSeverity::FATAL); - SetUpForTest(false, "withoutImplementation", "()V", nullptr); + SetUpForTest(false, "withoutImplementation", "()V", NORMAL_JNI_ONLY_NULLPTR); env_->CallVoidMethod(jobj_, jmethod_); @@ -1505,13 +1886,18 @@ void JniCompilerTest::WithoutImplementationImpl() { EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE); } -JNI_TEST(WithoutImplementation) +// TODO: Don't test @FastNative here since it goes through a stub lookup (unsupported) which would +// normally fail with an exception, but fails with an assert. +JNI_TEST_NORMAL_ONLY(WithoutImplementation) void JniCompilerTest::WithoutImplementationRefReturnImpl() { // This will lead to error messages in the log. ScopedLogSeverity sls(LogSeverity::FATAL); - SetUpForTest(false, "withoutImplementationRefReturn", "()Ljava/lang/Object;", nullptr); + SetUpForTest(false, + "withoutImplementationRefReturn", + "()Ljava/lang/Object;", + NORMAL_JNI_ONLY_NULLPTR); env_->CallObjectMethod(jobj_, jmethod_); @@ -1519,7 +1905,8 @@ void JniCompilerTest::WithoutImplementationRefReturnImpl() { EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE); } -JNI_TEST(WithoutImplementationRefReturn) +// TODO: Should work for @FastNative too. +JNI_TEST_NORMAL_ONLY(WithoutImplementationRefReturn) void Java_MyClassNatives_stackArgsIntsFirst(JNIEnv*, jclass, jint i1, jint i2, jint i3, jint i4, jint i5, jint i6, jint i7, jint i8, jint i9, @@ -1561,7 +1948,7 @@ void Java_MyClassNatives_stackArgsIntsFirst(JNIEnv*, jclass, jint i1, jint i2, j void JniCompilerTest::StackArgsIntsFirstImpl() { SetUpForTest(true, "stackArgsIntsFirst", "(IIIIIIIIIIFFFFFFFFFF)V", - reinterpret_cast(&Java_MyClassNatives_stackArgsIntsFirst)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_stackArgsIntsFirst)); jint i1 = 1; jint i2 = 2; @@ -1589,7 +1976,7 @@ void JniCompilerTest::StackArgsIntsFirstImpl() { f3, f4, f5, f6, f7, f8, f9, f10); } -JNI_TEST(StackArgsIntsFirst) +JNI_TEST_CRITICAL(StackArgsIntsFirst) void Java_MyClassNatives_stackArgsFloatsFirst(JNIEnv*, jclass, jfloat f1, jfloat f2, jfloat f3, jfloat f4, jfloat f5, jfloat f6, jfloat f7, @@ -1631,7 +2018,7 @@ void Java_MyClassNatives_stackArgsFloatsFirst(JNIEnv*, jclass, jfloat f1, jfloat void JniCompilerTest::StackArgsFloatsFirstImpl() { SetUpForTest(true, "stackArgsFloatsFirst", "(FFFFFFFFFFIIIIIIIIII)V", - reinterpret_cast(&Java_MyClassNatives_stackArgsFloatsFirst)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_stackArgsFloatsFirst)); jint i1 = 1; jint i2 = 2; @@ -1659,7 +2046,7 @@ void JniCompilerTest::StackArgsFloatsFirstImpl() { i4, i5, i6, i7, i8, i9, i10); } -JNI_TEST(StackArgsFloatsFirst) +JNI_TEST_CRITICAL(StackArgsFloatsFirst) void Java_MyClassNatives_stackArgsMixed(JNIEnv*, jclass, jint i1, jfloat f1, jint i2, jfloat f2, jint i3, jfloat f3, jint i4, jfloat f4, jint i5, @@ -1700,7 +2087,7 @@ void Java_MyClassNatives_stackArgsMixed(JNIEnv*, jclass, jint i1, jfloat f1, jin void JniCompilerTest::StackArgsMixedImpl() { SetUpForTest(true, "stackArgsMixed", "(IFIFIFIFIFIFIFIFIFIF)V", - reinterpret_cast(&Java_MyClassNatives_stackArgsMixed)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_stackArgsMixed)); jint i1 = 1; jint i2 = 2; @@ -1728,7 +2115,7 @@ void JniCompilerTest::StackArgsMixedImpl() { f7, i8, f8, i9, f9, i10, f10); } -JNI_TEST(StackArgsMixed) +JNI_TEST_CRITICAL(StackArgsMixed) void Java_MyClassNatives_stackArgsSignExtendedMips64(JNIEnv*, jclass, jint i1, jint i2, jint i3, jint i4, jint i5, jint i6, jint i7, jint i8) { @@ -1760,7 +2147,7 @@ void Java_MyClassNatives_stackArgsSignExtendedMips64(JNIEnv*, jclass, jint i1, j void JniCompilerTest::StackArgsSignExtendedMips64Impl() { SetUpForTest(true, "stackArgsSignExtendedMips64", "(IIIIIIII)V", - reinterpret_cast(&Java_MyClassNatives_stackArgsSignExtendedMips64)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_stackArgsSignExtendedMips64)); jint i1 = 1; jint i2 = 2; jint i3 = 3; @@ -1773,7 +2160,7 @@ void JniCompilerTest::StackArgsSignExtendedMips64Impl() { env_->CallStaticVoidMethod(jklass_, jmethod_, i1, i2, i3, i4, i5, i6, i7, i8); } -JNI_TEST(StackArgsSignExtendedMips64) +JNI_TEST_CRITICAL(StackArgsSignExtendedMips64) void Java_MyClassNatives_normalNative(JNIEnv*, jclass) { // Intentionally left empty. @@ -1785,15 +2172,18 @@ void JniCompilerTest::NormalNativeImpl() { SetUpForTest(/* direct */ true, "normalNative", "()V", - reinterpret_cast(&Java_MyClassNatives_normalNative)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_normalNative)); ScopedObjectAccess soa(Thread::Current()); ArtMethod* method = soa.DecodeMethod(jmethod_); ASSERT_TRUE(method != nullptr); + EXPECT_FALSE(method->IsAnnotatedWithCriticalNative()); EXPECT_FALSE(method->IsAnnotatedWithFastNative()); } -JNI_TEST(NormalNative) + +// TODO: just rename the java functions to the standard convention and remove duplicated tests +JNI_TEST_NORMAL_ONLY(NormalNative) // Methods annotated with @FastNative are considered "fast native" // -- Check that the annotation lookup succeeds. @@ -1805,14 +2195,53 @@ void JniCompilerTest::FastNativeImpl() { SetUpForTest(/* direct */ true, "fastNative", "()V", - reinterpret_cast(&Java_MyClassNatives_fastNative)); + CURRENT_JNI_WRAPPER(Java_MyClassNatives_fastNative)); ScopedObjectAccess soa(Thread::Current()); ArtMethod* method = soa.DecodeMethod(jmethod_); ASSERT_TRUE(method != nullptr); + EXPECT_FALSE(method->IsAnnotatedWithCriticalNative()); EXPECT_TRUE(method->IsAnnotatedWithFastNative()); } -JNI_TEST(FastNative) + +// TODO: just rename the java functions to the standard convention and remove duplicated tests +JNI_TEST_NORMAL_ONLY(FastNative) + +int gJava_myClassNatives_criticalNative_calls[kJniKindCount] = {}; +// Methods annotated with @CriticalNative are considered "critical native" +// -- Check that the annotation lookup succeeds. +void Java_MyClassNatives_criticalNative() { + gJava_myClassNatives_criticalNative_calls[gCurrentJni]++; +} + +void JniCompilerTest::CriticalNativeImpl() { + SetUpForTest(/* direct */ true, + // Important: Don't change the "current jni" yet to avoid a method name suffix. + "criticalNative", + "()V", + // TODO: Use CURRENT_JNI_WRAPPER instead which is more generic. + reinterpret_cast(&Java_MyClassNatives_criticalNative)); + + // TODO: remove this manual updating of the current JNI. Merge with the other tests. + UpdateCurrentJni(JniKind::kCritical); + ASSERT_TRUE(IsCurrentJniCritical()); + + ScopedObjectAccess soa(Thread::Current()); + ArtMethod* method = soa.DecodeMethod(jmethod_); + ASSERT_TRUE(method != nullptr); + + EXPECT_TRUE(method->IsAnnotatedWithCriticalNative()); + EXPECT_FALSE(method->IsAnnotatedWithFastNative()); + + EXPECT_EQ(0, gJava_myClassNatives_criticalNative_calls[gCurrentJni]); + env_->CallStaticVoidMethod(jklass_, jmethod_); + EXPECT_EQ(1, gJava_myClassNatives_criticalNative_calls[gCurrentJni]); + + gJava_myClassNatives_criticalNative_calls[gCurrentJni] = 0; +} + +// TODO: just rename the java functions to the standard convention and remove duplicated tests +JNI_TEST_NORMAL_ONLY(CriticalNative) } // namespace art diff --git a/compiler/jni/quick/arm/calling_convention_arm.cc b/compiler/jni/quick/arm/calling_convention_arm.cc index 0d16260f4..3f29ae5dc 100644 --- a/compiler/jni/quick/arm/calling_convention_arm.cc +++ b/compiler/jni/quick/arm/calling_convention_arm.cc @@ -24,15 +24,33 @@ namespace arm { static_assert(kArmPointerSize == PointerSize::k32, "Unexpected ARM pointer size"); -// Used by hard float. +// +// JNI calling convention constants. +// + +// List of parameters passed via registers for JNI. +// JNI uses soft-float, so there is only a GPR list. +static const Register kJniArgumentRegisters[] = { + R0, R1, R2, R3 +}; + +static const size_t kJniArgumentRegisterCount = arraysize(kJniArgumentRegisters); + +// +// Managed calling convention constants. +// + +// Used by hard float. (General purpose registers.) static const Register kHFCoreArgumentRegisters[] = { R0, R1, R2, R3 }; +// (VFP single-precision registers.) static const SRegister kHFSArgumentRegisters[] = { S0, S1, S2, S3, S4, S5, S6, S7, S8, S9, S10, S11, S12, S13, S14, S15 }; +// (VFP double-precision registers.) static const DRegister kHFDArgumentRegisters[] = { D0, D1, D2, D3, D4, D5, D6, D7 }; @@ -40,6 +58,10 @@ static const DRegister kHFDArgumentRegisters[] = { static_assert(arraysize(kHFDArgumentRegisters) * 2 == arraysize(kHFSArgumentRegisters), "ks d argument registers mismatch"); +// +// Shared managed+JNI calling convention constants. +// + static constexpr ManagedRegister kCalleeSaveRegisters[] = { // Core registers. ArmManagedRegister::FromCoreRegister(R5), @@ -255,23 +277,95 @@ const ManagedRegisterEntrySpills& ArmManagedRuntimeCallingConvention::EntrySpill } // JNI calling convention -ArmJniCallingConvention::ArmJniCallingConvention(bool is_static, bool is_synchronized, +ArmJniCallingConvention::ArmJniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, const char* shorty) - : JniCallingConvention(is_static, is_synchronized, shorty, kArmPointerSize) { - // Compute padding to ensure longs and doubles are not split in AAPCS. Ignore the 'this' jobject - // or jclass for static methods and the JNIEnv. We start at the aligned register r2. - size_t padding = 0; - for (size_t cur_arg = IsStatic() ? 0 : 1, cur_reg = 2; cur_arg < NumArgs(); cur_arg++) { + : JniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty, + kArmPointerSize) { + // AAPCS 4.1 specifies fundamental alignments for each type. All of our stack arguments are + // usually 4-byte aligned, however longs and doubles must be 8 bytes aligned. Add padding to + // maintain 8-byte alignment invariant. + // + // Compute padding to ensure longs and doubles are not split in AAPCS. + size_t shift = 0; + + size_t cur_arg, cur_reg; + if (LIKELY(HasExtraArgumentsForJni())) { + // Ignore the 'this' jobject or jclass for static methods and the JNIEnv. + // We start at the aligned register r2. + // + // Ignore the first 2 parameters because they are guaranteed to be aligned. + cur_arg = NumImplicitArgs(); // skip the "this" arg. + cur_reg = 2; // skip {r0=JNIEnv, r1=jobject} / {r0=JNIEnv, r1=jclass} parameters (start at r2). + } else { + // Check every parameter. + cur_arg = 0; + cur_reg = 0; + } + + // TODO: Maybe should just use IsCurrentParamALongOrDouble instead to be cleaner? + // (this just seems like an unnecessary micro-optimization). + + // Shift across a logical register mapping that looks like: + // + // | r0 | r1 | r2 | r3 | SP | SP+4| SP+8 | SP+12 | ... | SP+n | SP+n+4 | + // + // (where SP is some arbitrary stack pointer that our 0th stack arg would go into). + // + // Any time there would normally be a long/double in an odd logical register, + // we have to push out the rest of the mappings by 4 bytes to maintain an 8-byte alignment. + // + // This works for both physical register pairs {r0, r1}, {r2, r3} and for when + // the value is on the stack. + // + // For example: + // (a) long would normally go into r1, but we shift it into r2 + // | INT | (PAD) | LONG | + // | r0 | r1 | r2 | r3 | + // + // (b) long would normally go into r3, but we shift it into SP + // | INT | INT | INT | (PAD) | LONG | + // | r0 | r1 | r2 | r3 | SP+4 SP+8| + // + // where INT is any <=4 byte arg, and LONG is any 8-byte arg. + for (; cur_arg < NumArgs(); cur_arg++) { if (IsParamALongOrDouble(cur_arg)) { - if ((cur_reg & 1) != 0) { - padding += 4; + if ((cur_reg & 1) != 0) { // check that it's in a logical contiguous register pair + shift += 4; cur_reg++; // additional bump to ensure alignment } - cur_reg++; // additional bump to skip extra long word + cur_reg += 2; // bump the iterator twice for every long argument + } else { + cur_reg++; // bump the iterator for every non-long argument } - cur_reg++; // bump the iterator for every argument } - padding_ = padding; + + if (cur_reg < kJniArgumentRegisterCount) { + // As a special case when, as a result of shifting (or not) there are no arguments on the stack, + // we actually have 0 stack padding. + // + // For example with @CriticalNative and: + // (int, long) -> shifts the long but doesn't need to pad the stack + // + // shift + // \/ + // | INT | (PAD) | LONG | (EMPTY) ... + // | r0 | r1 | r2 | r3 | SP ... + // /\ + // no stack padding + padding_ = 0; + } else { + padding_ = shift; + } + + // TODO: add some new JNI tests for @CriticalNative that introduced new edge cases + // (a) Using r0,r1 pair = f(long,...) + // (b) Shifting r1 long into r2,r3 pair = f(int, long, int, ...); + // (c) Shifting but not introducing a stack padding = f(int, long); } uint32_t ArmJniCallingConvention::CoreSpillMask() const { @@ -289,15 +383,34 @@ ManagedRegister ArmJniCallingConvention::ReturnScratchRegister() const { size_t ArmJniCallingConvention::FrameSize() { // Method*, LR and callee save area size, local reference segment state - size_t frame_data_size = static_cast(kArmPointerSize) - + (2 + CalleeSaveRegisters().size()) * kFramePointerSize; - // References plus 2 words for HandleScope header - size_t handle_scope_size = HandleScope::SizeOf(kArmPointerSize, ReferenceCount()); + const size_t method_ptr_size = static_cast(kArmPointerSize); + const size_t lr_return_addr_size = kFramePointerSize; + const size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize; + size_t frame_data_size = method_ptr_size + lr_return_addr_size + callee_save_area_size; + + if (LIKELY(HasLocalReferenceSegmentState())) { + // local reference segment state + frame_data_size += kFramePointerSize; + // TODO: Probably better to use sizeof(IRTSegmentState) here... + } + + // References plus link_ (pointer) and number_of_references_ (uint32_t) for HandleScope header + const size_t handle_scope_size = HandleScope::SizeOf(kArmPointerSize, ReferenceCount()); + + size_t total_size = frame_data_size; + if (LIKELY(HasHandleScope())) { + // HandleScope is sometimes excluded. + total_size += handle_scope_size; // handle scope size + } + // Plus return value spill area size - return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment); + total_size += SizeOfReturnValue(); + + return RoundUp(total_size, kStackAlignment); } size_t ArmJniCallingConvention::OutArgSize() { + // TODO: Identical to x86_64 except for also adding additional padding. return RoundUp(NumberOfOutgoingStackArgs() * kFramePointerSize + padding_, kStackAlignment); } @@ -309,55 +422,70 @@ ArrayRef ArmJniCallingConvention::CalleeSaveRegisters() c // JniCallingConvention ABI follows AAPCS where longs and doubles must occur // in even register numbers and stack slots void ArmJniCallingConvention::Next() { + // Update the iterator by usual JNI rules. JniCallingConvention::Next(); - size_t arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - if ((itr_args_ >= 2) && - (arg_pos < NumArgs()) && - IsParamALongOrDouble(arg_pos)) { - // itr_slots_ needs to be an even number, according to AAPCS. - if ((itr_slots_ & 0x1u) != 0) { + + if (LIKELY(HasNext())) { // Avoid CHECK failure for IsCurrentParam + // Ensure slot is 8-byte aligned for longs/doubles (AAPCS). + if (IsCurrentParamALongOrDouble() && ((itr_slots_ & 0x1u) != 0)) { + // itr_slots_ needs to be an even number, according to AAPCS. itr_slots_++; } } } bool ArmJniCallingConvention::IsCurrentParamInRegister() { - return itr_slots_ < 4; + return itr_slots_ < kJniArgumentRegisterCount; } bool ArmJniCallingConvention::IsCurrentParamOnStack() { return !IsCurrentParamInRegister(); } -static const Register kJniArgumentRegisters[] = { - R0, R1, R2, R3 -}; ManagedRegister ArmJniCallingConvention::CurrentParamRegister() { - CHECK_LT(itr_slots_, 4u); - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - if ((itr_args_ >= 2) && IsParamALongOrDouble(arg_pos)) { - CHECK_EQ(itr_slots_, 2u); - return ArmManagedRegister::FromRegisterPair(R2_R3); + CHECK_LT(itr_slots_, kJniArgumentRegisterCount); + if (IsCurrentParamALongOrDouble()) { + // AAPCS 5.1.1 requires 64-bit values to be in a consecutive register pair: + // "A double-word sized type is passed in two consecutive registers (e.g., r0 and r1, or r2 and + // r3). The content of the registers is as if the value had been loaded from memory + // representation with a single LDM instruction." + if (itr_slots_ == 0u) { + return ArmManagedRegister::FromRegisterPair(R0_R1); + } else if (itr_slots_ == 2u) { + return ArmManagedRegister::FromRegisterPair(R2_R3); + } else { + // The register can either be R0 (+R1) or R2 (+R3). Cannot be other values. + LOG(FATAL) << "Invalid iterator register position for a long/double " << itr_args_; + UNREACHABLE(); + } } else { - return - ArmManagedRegister::FromCoreRegister(kJniArgumentRegisters[itr_slots_]); + // All other types can fit into one register. + return ArmManagedRegister::FromCoreRegister(kJniArgumentRegisters[itr_slots_]); } } FrameOffset ArmJniCallingConvention::CurrentParamStackOffset() { - CHECK_GE(itr_slots_, 4u); + CHECK_GE(itr_slots_, kJniArgumentRegisterCount); size_t offset = - displacement_.Int32Value() - OutArgSize() + ((itr_slots_ - 4) * kFramePointerSize); + displacement_.Int32Value() + - OutArgSize() + + ((itr_slots_ - kJniArgumentRegisterCount) * kFramePointerSize); CHECK_LT(offset, OutArgSize()); return FrameOffset(offset); } size_t ArmJniCallingConvention::NumberOfOutgoingStackArgs() { - size_t static_args = IsStatic() ? 1 : 0; // count jclass + size_t static_args = HasSelfClass() ? 1 : 0; // count jclass // regular argument parameters and this - size_t param_args = NumArgs() + NumLongOrDoubleArgs(); + size_t param_args = NumArgs() + NumLongOrDoubleArgs(); // twice count 8-byte args + // XX: Why is the long/ordouble counted twice but not JNIEnv* ??? // count JNIEnv* less arguments in registers - return static_args + param_args + 1 - 4; + size_t internal_args = (HasJniEnv() ? 1 : 0 /* jni env */); + size_t total_args = static_args + param_args + internal_args; + + return total_args - std::min(kJniArgumentRegisterCount, static_cast(total_args)); + + // TODO: Very similar to x86_64 except for the return pc. } } // namespace arm diff --git a/compiler/jni/quick/arm/calling_convention_arm.h b/compiler/jni/quick/arm/calling_convention_arm.h index 7c717cc6b..249f20225 100644 --- a/compiler/jni/quick/arm/calling_convention_arm.h +++ b/compiler/jni/quick/arm/calling_convention_arm.h @@ -52,7 +52,10 @@ class ArmManagedRuntimeCallingConvention FINAL : public ManagedRuntimeCallingCon class ArmJniCallingConvention FINAL : public JniCallingConvention { public: - ArmJniCallingConvention(bool is_static, bool is_synchronized, const char* shorty); + ArmJniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, + const char* shorty); ~ArmJniCallingConvention() OVERRIDE {} // Calling convention ManagedRegister ReturnRegister() OVERRIDE; diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.cc b/compiler/jni/quick/arm64/calling_convention_arm64.cc index afa707d2a..3fb7b5628 100644 --- a/compiler/jni/quick/arm64/calling_convention_arm64.cc +++ b/compiler/jni/quick/arm64/calling_convention_arm64.cc @@ -24,6 +24,13 @@ namespace arm64 { static_assert(kArm64PointerSize == PointerSize::k64, "Unexpected ARM64 pointer size"); +// Up to how many float-like (float, double) args can be enregistered. +// The rest of the args must go on the stack. +constexpr size_t kMaxFloatOrDoubleRegisterArguments = 8u; +// Up to how many integer-like (pointers, objects, longs, int, short, bool, etc) args can be +// enregistered. The rest of the args must go on the stack. +constexpr size_t kMaxIntLikeRegisterArguments = 8u; + static const XRegister kXArgumentRegisters[] = { X0, X1, X2, X3, X4, X5, X6, X7 }; @@ -211,9 +218,11 @@ const ManagedRegisterEntrySpills& Arm64ManagedRuntimeCallingConvention::EntrySpi } // JNI calling convention -Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static, bool is_synchronized, +Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, const char* shorty) - : JniCallingConvention(is_static, is_synchronized, shorty, kArm64PointerSize) { + : JniCallingConvention(is_static, is_synchronized, is_critical_native, shorty, kArm64PointerSize) { } uint32_t Arm64JniCallingConvention::CoreSpillMask() const { @@ -230,38 +239,59 @@ ManagedRegister Arm64JniCallingConvention::ReturnScratchRegister() const { size_t Arm64JniCallingConvention::FrameSize() { // Method*, callee save area size, local reference segment state - size_t frame_data_size = kFramePointerSize + - CalleeSaveRegisters().size() * kFramePointerSize + sizeof(uint32_t); + // + // (Unlike x86_64, do not include return address, and the segment state is uint32 + // instead of pointer). + size_t method_ptr_size = static_cast(kFramePointerSize); + size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize; + + size_t frame_data_size = method_ptr_size + callee_save_area_size; + if (LIKELY(HasLocalReferenceSegmentState())) { + frame_data_size += sizeof(uint32_t); + } // References plus 2 words for HandleScope header size_t handle_scope_size = HandleScope::SizeOf(kArm64PointerSize, ReferenceCount()); + + size_t total_size = frame_data_size; + if (LIKELY(HasHandleScope())) { + // HandleScope is sometimes excluded. + total_size += handle_scope_size; // handle scope size + } + // Plus return value spill area size - return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment); + total_size += SizeOfReturnValue(); + + return RoundUp(total_size, kStackAlignment); } size_t Arm64JniCallingConvention::OutArgSize() { + // Same as X86_64 return RoundUp(NumberOfOutgoingStackArgs() * kFramePointerSize, kStackAlignment); } ArrayRef Arm64JniCallingConvention::CalleeSaveRegisters() const { + // Same as X86_64 return ArrayRef(kCalleeSaveRegisters); } bool Arm64JniCallingConvention::IsCurrentParamInRegister() { if (IsCurrentParamAFloatOrDouble()) { - return (itr_float_and_doubles_ < 8); + return (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments); } else { - return ((itr_args_ - itr_float_and_doubles_) < 8); + return ((itr_args_ - itr_float_and_doubles_) < kMaxIntLikeRegisterArguments); } + // TODO: Can we just call CurrentParamRegister to figure this out? } bool Arm64JniCallingConvention::IsCurrentParamOnStack() { + // Is this ever not the same for all the architectures? return !IsCurrentParamInRegister(); } ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() { CHECK(IsCurrentParamInRegister()); if (IsCurrentParamAFloatOrDouble()) { - CHECK_LT(itr_float_and_doubles_, 8u); + CHECK_LT(itr_float_and_doubles_, kMaxFloatOrDoubleRegisterArguments); if (IsCurrentParamADouble()) { return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]); } else { @@ -269,7 +299,7 @@ ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() { } } else { int gp_reg = itr_args_ - itr_float_and_doubles_; - CHECK_LT(static_cast(gp_reg), 8u); + CHECK_LT(static_cast(gp_reg), kMaxIntLikeRegisterArguments); if (IsCurrentParamALong() || IsCurrentParamAReference() || IsCurrentParamJniEnv()) { return Arm64ManagedRegister::FromXRegister(kXArgumentRegisters[gp_reg]); } else { @@ -281,20 +311,30 @@ ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() { FrameOffset Arm64JniCallingConvention::CurrentParamStackOffset() { CHECK(IsCurrentParamOnStack()); size_t args_on_stack = itr_args_ - - std::min(8u, itr_float_and_doubles_) - - std::min(8u, (itr_args_ - itr_float_and_doubles_)); + - std::min(kMaxFloatOrDoubleRegisterArguments, + static_cast(itr_float_and_doubles_)) + - std::min(kMaxIntLikeRegisterArguments, + static_cast(itr_args_ - itr_float_and_doubles_)); size_t offset = displacement_.Int32Value() - OutArgSize() + (args_on_stack * kFramePointerSize); CHECK_LT(offset, OutArgSize()); return FrameOffset(offset); + // TODO: Seems identical to X86_64 code. } size_t Arm64JniCallingConvention::NumberOfOutgoingStackArgs() { // all arguments including JNI args size_t all_args = NumArgs() + NumberOfExtraArgumentsForJni(); - size_t all_stack_args = all_args - - std::min(8u, static_cast(NumFloatOrDoubleArgs())) - - std::min(8u, static_cast((all_args - NumFloatOrDoubleArgs()))); + DCHECK_GE(all_args, NumFloatOrDoubleArgs()); + + size_t all_stack_args = + all_args + - std::min(kMaxFloatOrDoubleRegisterArguments, + static_cast(NumFloatOrDoubleArgs())) + - std::min(kMaxIntLikeRegisterArguments, + static_cast((all_args - NumFloatOrDoubleArgs()))); + + // TODO: Seems similar to X86_64 code except it doesn't count return pc. return all_stack_args; } diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.h b/compiler/jni/quick/arm64/calling_convention_arm64.h index 90b12e546..56189427b 100644 --- a/compiler/jni/quick/arm64/calling_convention_arm64.h +++ b/compiler/jni/quick/arm64/calling_convention_arm64.h @@ -52,7 +52,10 @@ class Arm64ManagedRuntimeCallingConvention FINAL : public ManagedRuntimeCallingC class Arm64JniCallingConvention FINAL : public JniCallingConvention { public: - Arm64JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty); + Arm64JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, + const char* shorty); ~Arm64JniCallingConvention() OVERRIDE {} // Calling convention ManagedRegister ReturnRegister() OVERRIDE; diff --git a/compiler/jni/quick/calling_convention.cc b/compiler/jni/quick/calling_convention.cc index c7ed9c92a..9859b5da3 100644 --- a/compiler/jni/quick/calling_convention.cc +++ b/compiler/jni/quick/calling_convention.cc @@ -149,19 +149,44 @@ bool ManagedRuntimeCallingConvention::IsCurrentParamALong() { std::unique_ptr JniCallingConvention::Create(ArenaAllocator* arena, bool is_static, bool is_synchronized, + bool is_critical_native, const char* shorty, InstructionSet instruction_set) { + if (UNLIKELY(is_critical_native)) { + // Sanity check that the requested JNI instruction set + // is supported for critical natives. Not every one is. + switch (instruction_set) { + case kX86_64: + case kX86: + case kArm64: + case kArm: + case kThumb2: + break; + default: + is_critical_native = false; + LOG(WARNING) << "@CriticalNative support not implemented for " << instruction_set + << "; will crash at runtime if trying to invoke such a method."; + // TODO: implement for MIPS/MIPS64 + } + } + switch (instruction_set) { #ifdef ART_ENABLE_CODEGEN_arm case kArm: case kThumb2: return std::unique_ptr( - new (arena) arm::ArmJniCallingConvention(is_static, is_synchronized, shorty)); + new (arena) arm::ArmJniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty)); #endif #ifdef ART_ENABLE_CODEGEN_arm64 case kArm64: return std::unique_ptr( - new (arena) arm64::Arm64JniCallingConvention(is_static, is_synchronized, shorty)); + new (arena) arm64::Arm64JniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty)); #endif #ifdef ART_ENABLE_CODEGEN_mips case kMips: @@ -176,12 +201,18 @@ std::unique_ptr JniCallingConvention::Create(ArenaAllocato #ifdef ART_ENABLE_CODEGEN_x86 case kX86: return std::unique_ptr( - new (arena) x86::X86JniCallingConvention(is_static, is_synchronized, shorty)); + new (arena) x86::X86JniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty)); #endif #ifdef ART_ENABLE_CODEGEN_x86_64 case kX86_64: return std::unique_ptr( - new (arena) x86_64::X86_64JniCallingConvention(is_static, is_synchronized, shorty)); + new (arena) x86_64::X86_64JniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty)); #endif default: LOG(FATAL) << "Unknown InstructionSet: " << instruction_set; @@ -199,27 +230,36 @@ FrameOffset JniCallingConvention::SavedLocalReferenceCookieOffset() const { } FrameOffset JniCallingConvention::ReturnValueSaveLocation() const { - // Segment state is 4 bytes long - return FrameOffset(SavedLocalReferenceCookieOffset().Int32Value() + 4); + if (LIKELY(HasHandleScope())) { + // Initial offset already includes the displacement. + // -- Remove the additional local reference cookie offset if we don't have a handle scope. + const size_t saved_local_reference_cookie_offset = + SavedLocalReferenceCookieOffset().Int32Value(); + // Segment state is 4 bytes long + const size_t segment_state_size = 4; + return FrameOffset(saved_local_reference_cookie_offset + segment_state_size); + } else { + // Include only the initial Method* as part of the offset. + CHECK_LT(displacement_.SizeValue(), + static_cast(std::numeric_limits::max())); + return FrameOffset(displacement_.Int32Value() + static_cast(frame_pointer_size_)); + } } bool JniCallingConvention::HasNext() { - if (itr_args_ <= kObjectOrClass) { + if (IsCurrentArgExtraForJni()) { return true; } else { - unsigned int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); + unsigned int arg_pos = GetIteratorPositionWithinShorty(); return arg_pos < NumArgs(); } } void JniCallingConvention::Next() { CHECK(HasNext()); - if (itr_args_ > kObjectOrClass) { - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - if (IsParamALongOrDouble(arg_pos)) { - itr_longs_and_doubles_++; - itr_slots_++; - } + if (IsCurrentParamALong() || IsCurrentParamADouble()) { + itr_longs_and_doubles_++; + itr_slots_++; } if (IsCurrentParamAFloatOrDouble()) { itr_float_and_doubles_++; @@ -227,63 +267,73 @@ void JniCallingConvention::Next() { if (IsCurrentParamAReference()) { itr_refs_++; } + // This default/fallthrough case also covers the extra JNIEnv* argument, + // as well as any other single-slot primitives. itr_args_++; itr_slots_++; } bool JniCallingConvention::IsCurrentParamAReference() { - switch (itr_args_) { - case kJniEnv: - return false; // JNIEnv* - case kObjectOrClass: - return true; // jobject or jclass - default: { - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - return IsParamAReference(arg_pos); - } + bool return_value; + if (SwitchExtraJniArguments(itr_args_, + false, // JNIEnv* + true, // jobject or jclass + /* out parameters */ + &return_value)) { + return return_value; + } else { + int arg_pos = GetIteratorPositionWithinShorty(); + return IsParamAReference(arg_pos); } } + bool JniCallingConvention::IsCurrentParamJniEnv() { + if (UNLIKELY(!HasJniEnv())) { + return false; + } return (itr_args_ == kJniEnv); } bool JniCallingConvention::IsCurrentParamAFloatOrDouble() { - switch (itr_args_) { - case kJniEnv: - return false; // JNIEnv* - case kObjectOrClass: - return false; // jobject or jclass - default: { - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - return IsParamAFloatOrDouble(arg_pos); - } + bool return_value; + if (SwitchExtraJniArguments(itr_args_, + false, // jnienv* + false, // jobject or jclass + /* out parameters */ + &return_value)) { + return return_value; + } else { + int arg_pos = GetIteratorPositionWithinShorty(); + return IsParamAFloatOrDouble(arg_pos); } } bool JniCallingConvention::IsCurrentParamADouble() { - switch (itr_args_) { - case kJniEnv: - return false; // JNIEnv* - case kObjectOrClass: - return false; // jobject or jclass - default: { - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - return IsParamADouble(arg_pos); - } + bool return_value; + if (SwitchExtraJniArguments(itr_args_, + false, // jnienv* + false, // jobject or jclass + /* out parameters */ + &return_value)) { + return return_value; + } else { + int arg_pos = GetIteratorPositionWithinShorty(); + return IsParamADouble(arg_pos); } } bool JniCallingConvention::IsCurrentParamALong() { - switch (itr_args_) { - case kJniEnv: - return false; // JNIEnv* - case kObjectOrClass: - return false; // jobject or jclass - default: { - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); - return IsParamALong(arg_pos); - } + bool return_value; + if (SwitchExtraJniArguments(itr_args_, + false, // jnienv* + false, // jobject or jclass + /* out parameters */ + &return_value)) { + return return_value; + } else { + int arg_pos = GetIteratorPositionWithinShorty(); + return IsParamALong(arg_pos); } } @@ -297,19 +347,93 @@ FrameOffset JniCallingConvention::CurrentParamHandleScopeEntryOffset() { return FrameOffset(result); } -size_t JniCallingConvention::CurrentParamSize() { - if (itr_args_ <= kObjectOrClass) { +size_t JniCallingConvention::CurrentParamSize() const { + if (IsCurrentArgExtraForJni()) { return static_cast(frame_pointer_size_); // JNIEnv or jobject/jclass } else { - int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(); + int arg_pos = GetIteratorPositionWithinShorty(); return ParamSize(arg_pos); } } -size_t JniCallingConvention::NumberOfExtraArgumentsForJni() { - // The first argument is the JNIEnv*. - // Static methods have an extra argument which is the jclass. - return IsStatic() ? 2 : 1; +size_t JniCallingConvention::NumberOfExtraArgumentsForJni() const { + if (LIKELY(HasExtraArgumentsForJni())) { + // The first argument is the JNIEnv*. + // Static methods have an extra argument which is the jclass. + return IsStatic() ? 2 : 1; + } else { + // Critical natives exclude the JNIEnv and the jclass/this parameters. + return 0; + } +} + +bool JniCallingConvention::HasHandleScope() const { + // Exclude HandleScope for @CriticalNative methods for optimization speed. + return is_critical_native_ == false; +} + +bool JniCallingConvention::HasLocalReferenceSegmentState() const { + // Exclude local reference segment states for @CriticalNative methods for optimization speed. + return is_critical_native_ == false; +} + +bool JniCallingConvention::HasJniEnv() const { + // Exclude "JNIEnv*" parameter for @CriticalNative methods. + return HasExtraArgumentsForJni(); +} + +bool JniCallingConvention::HasSelfClass() const { + if (!IsStatic()) { + // Virtual functions: There is never an implicit jclass parameter. + return false; + } else { + // Static functions: There is an implicit jclass parameter unless it's @CriticalNative. + return HasExtraArgumentsForJni(); + } +} + +bool JniCallingConvention::HasExtraArgumentsForJni() const { + // @CriticalNative jni implementations exclude both JNIEnv* and the jclass/jobject parameters. + return is_critical_native_ == false; } +unsigned int JniCallingConvention::GetIteratorPositionWithinShorty() const { + // We need to subtract out the extra JNI arguments if we want to use this iterator position + // with the inherited CallingConvention member functions, which rely on scanning the shorty. + // Note that our shorty does *not* include the JNIEnv, jclass/jobject parameters. + DCHECK_GE(itr_args_, NumberOfExtraArgumentsForJni()); + return itr_args_ - NumberOfExtraArgumentsForJni(); +} + +bool JniCallingConvention::IsCurrentArgExtraForJni() const { + if (UNLIKELY(!HasExtraArgumentsForJni())) { + return false; // If there are no extra args, we can never be an extra. + } + // Only parameters kJniEnv and kObjectOrClass are considered extra. + return itr_args_ <= kObjectOrClass; +} + +bool JniCallingConvention::SwitchExtraJniArguments(size_t switch_value, + bool case_jni_env, + bool case_object_or_class, + /* out parameters */ + bool* return_value) const { + DCHECK(return_value != nullptr); + if (UNLIKELY(!HasExtraArgumentsForJni())) { + return false; + } + + switch (switch_value) { + case kJniEnv: + *return_value = case_jni_env; + return true; + case kObjectOrClass: + *return_value = case_object_or_class; + return true; + default: + return false; + } +} + + } // namespace art diff --git a/compiler/jni/quick/calling_convention.h b/compiler/jni/quick/calling_convention.h index 995fa51d4..3d8914625 100644 --- a/compiler/jni/quick/calling_convention.h +++ b/compiler/jni/quick/calling_convention.h @@ -161,6 +161,12 @@ class CallingConvention : public DeletableArenaObject Create(ArenaAllocator* arena, bool is_static, bool is_synchronized, + bool is_critical_native, const char* shorty, InstructionSet instruction_set); @@ -288,7 +295,8 @@ class JniCallingConvention : public CallingConvention { // always at the bottom of a frame, but this doesn't work for outgoing // native args). Includes alignment. virtual size_t FrameSize() = 0; - // Size of outgoing arguments, including alignment + // Size of outgoing arguments (stack portion), including alignment. + // -- Arguments that are passed via registers are excluded from this size. virtual size_t OutArgSize() = 0; // Number of references in stack indirect reference table size_t ReferenceCount() const; @@ -319,8 +327,11 @@ class JniCallingConvention : public CallingConvention { bool IsCurrentParamAFloatOrDouble(); bool IsCurrentParamADouble(); bool IsCurrentParamALong(); + bool IsCurrentParamALongOrDouble() { + return IsCurrentParamALong() || IsCurrentParamADouble(); + } bool IsCurrentParamJniEnv(); - size_t CurrentParamSize(); + size_t CurrentParamSize() const; virtual bool IsCurrentParamInRegister() = 0; virtual bool IsCurrentParamOnStack() = 0; virtual ManagedRegister CurrentParamRegister() = 0; @@ -359,18 +370,62 @@ class JniCallingConvention : public CallingConvention { kObjectOrClass = 1 }; + // TODO: remove this constructor once all are changed to the below one. JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty, PointerSize frame_pointer_size) - : CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size) {} + : CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size), + is_critical_native_(false) {} + + JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, + const char* shorty, + PointerSize frame_pointer_size) + : CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size), + is_critical_native_(is_critical_native) {} // Number of stack slots for outgoing arguments, above which the handle scope is // located virtual size_t NumberOfOutgoingStackArgs() = 0; protected: - size_t NumberOfExtraArgumentsForJni(); + size_t NumberOfExtraArgumentsForJni() const; + + // Does the transition have a StackHandleScope? + bool HasHandleScope() const; + // Does the transition have a local reference segment state? + bool HasLocalReferenceSegmentState() const; + // Has a JNIEnv* parameter implicitly? + bool HasJniEnv() const; + // Has a 'jclass' parameter implicitly? + bool HasSelfClass() const; + + // Are there extra JNI arguments (JNIEnv* and maybe jclass)? + bool HasExtraArgumentsForJni() const; + + // Returns the position of itr_args_, fixed up by removing the offset of extra JNI arguments. + unsigned int GetIteratorPositionWithinShorty() const; + + // Is the current argument (at the iterator) an extra argument for JNI? + bool IsCurrentArgExtraForJni() const; + + const bool is_critical_native_; + + private: + // Shorthand for switching on the switch value but only IF there are extra JNI arguments. + // + // Puts the case value into return_value. + // * (switch_value == kJniEnv) => case_jni_env + // * (switch_value == kObjectOrClass) => case_object_or_class + // + // Returns false otherwise (or if there are no extra JNI arguments). + bool SwitchExtraJniArguments(size_t switch_value, + bool case_jni_env, + bool case_object_or_class, + /* out parameters */ + bool* return_value) const; }; } // namespace art diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc index d092c3f1f..7e58d789d 100644 --- a/compiler/jni/quick/jni_compiler.cc +++ b/compiler/jni/quick/jni_compiler.cc @@ -90,8 +90,10 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, const InstructionSetFeatures* instruction_set_features = driver->GetInstructionSetFeatures(); // i.e. if the method was annotated with @FastNative - const bool is_fast_native = - (static_cast(optimization_flags) & Compiler::kFastNative) != 0; + const bool is_fast_native = (optimization_flags == Compiler::kFastNative); + + // i.e. if the method was annotated with @CriticalNative + bool is_critical_native = (optimization_flags == Compiler::kCriticalNative); VLOG(jni) << "JniCompile: Method :: " << art::PrettyMethod(method_idx, dex_file, /* with signature */ true) @@ -102,12 +104,50 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, << art::PrettyMethod(method_idx, dex_file, /* with signature */ true); } + if (UNLIKELY(is_critical_native)) { + VLOG(jni) << "JniCompile: Critical native method detected :: " + << art::PrettyMethod(method_idx, dex_file, /* with signature */ true); + } + + if (kIsDebugBuild) { + // Don't allow both @FastNative and @CriticalNative. They are mutually exclusive. + if (UNLIKELY(is_fast_native && is_critical_native)) { + LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative" + << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); + } + + // @CriticalNative - extra checks: + // -- Don't allow virtual criticals + // -- Don't allow synchronized criticals + // -- Don't allow any objects as parameter or return value + if (UNLIKELY(is_critical_native)) { + CHECK(is_static) + << "@CriticalNative functions cannot be virtual since that would" + << "require passing a reference parameter (this), which is illegal " + << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); + CHECK(!is_synchronized) + << "@CriticalNative functions cannot be synchronized since that would" + << "require passing a (class and/or this) reference parameter, which is illegal " + << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); + for (size_t i = 0; i < strlen(shorty); ++i) { + CHECK_NE(Primitive::kPrimNot, Primitive::GetType(shorty[i])) + << "@CriticalNative methods' shorty types must not have illegal references " + << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); + } + } + } + ArenaPool pool; ArenaAllocator arena(&pool); // Calling conventions used to iterate over parameters to method - std::unique_ptr main_jni_conv( - JniCallingConvention::Create(&arena, is_static, is_synchronized, shorty, instruction_set)); + std::unique_ptr main_jni_conv = + JniCallingConvention::Create(&arena, + is_static, + is_synchronized, + is_critical_native, + shorty, + instruction_set); bool reference_return = main_jni_conv->IsReturnAReference(); std::unique_ptr mr_conv( @@ -127,8 +167,13 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, jni_end_shorty = "V"; } - std::unique_ptr end_jni_conv(JniCallingConvention::Create( - &arena, is_static, is_synchronized, jni_end_shorty, instruction_set)); + std::unique_ptr end_jni_conv( + JniCallingConvention::Create(&arena, + is_static, + is_synchronized, + is_critical_native, + jni_end_shorty, + instruction_set)); // Assembler that holds generated instructions std::unique_ptr> jni_asm = @@ -141,75 +186,89 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, const Offset monitor_enter(OFFSETOF_MEMBER(JNINativeInterface, MonitorEnter)); const Offset monitor_exit(OFFSETOF_MEMBER(JNINativeInterface, MonitorExit)); - // 1. Build the frame saving all callee saves - const size_t frame_size(main_jni_conv->FrameSize()); + // 1. Build the frame saving all callee saves, Method*, and PC return address. + const size_t frame_size(main_jni_conv->FrameSize()); // Excludes outgoing args. ArrayRef callee_save_regs = main_jni_conv->CalleeSaveRegisters(); __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast(frame_size)); - // 2. Set up the HandleScope - mr_conv->ResetIterator(FrameOffset(frame_size)); - main_jni_conv->ResetIterator(FrameOffset(0)); - __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), - main_jni_conv->ReferenceCount(), - mr_conv->InterproceduralScratchRegister()); - - __ CopyRawPtrFromThread(main_jni_conv->HandleScopeLinkOffset(), - Thread::TopHandleScopeOffset(), - mr_conv->InterproceduralScratchRegister()); - __ StoreStackOffsetToThread(Thread::TopHandleScopeOffset(), - main_jni_conv->HandleScopeOffset(), - mr_conv->InterproceduralScratchRegister()); - - // 3. Place incoming reference arguments into handle scope - main_jni_conv->Next(); // Skip JNIEnv* - // 3.5. Create Class argument for static methods out of passed method - if (is_static) { - FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); - // Check handle scope offset is within frame - CHECK_LT(handle_scope_offset.Uint32Value(), frame_size); - // Note this LoadRef() doesn't need heap unpoisoning since it's from the ArtMethod. - // Note this LoadRef() does not include read barrier. It will be handled below. - __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), - mr_conv->MethodRegister(), ArtMethod::DeclaringClassOffset(), false); - __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); - __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); - main_jni_conv->Next(); // in handle scope so move to next argument - } - while (mr_conv->HasNext()) { - CHECK(main_jni_conv->HasNext()); - bool ref_param = main_jni_conv->IsCurrentParamAReference(); - CHECK(!ref_param || mr_conv->IsCurrentParamAReference()); - // References need placing in handle scope and the entry value passing - if (ref_param) { - // Compute handle scope entry, note null is placed in the handle scope but its boxed value - // must be null. + if (LIKELY(!is_critical_native)) { + // NOTE: @CriticalNative methods don't have a HandleScope + // because they can't have any reference parameters or return values. + + // 2. Set up the HandleScope + mr_conv->ResetIterator(FrameOffset(frame_size)); + main_jni_conv->ResetIterator(FrameOffset(0)); + __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), + main_jni_conv->ReferenceCount(), + mr_conv->InterproceduralScratchRegister()); + + __ CopyRawPtrFromThread(main_jni_conv->HandleScopeLinkOffset(), + Thread::TopHandleScopeOffset(), + mr_conv->InterproceduralScratchRegister()); + __ StoreStackOffsetToThread(Thread::TopHandleScopeOffset(), + main_jni_conv->HandleScopeOffset(), + mr_conv->InterproceduralScratchRegister()); + + // 3. Place incoming reference arguments into handle scope + main_jni_conv->Next(); // Skip JNIEnv* + // 3.5. Create Class argument for static methods out of passed method + if (is_static) { FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); - // Check handle scope offset is within frame and doesn't run into the saved segment state. + // Check handle scope offset is within frame CHECK_LT(handle_scope_offset.Uint32Value(), frame_size); - CHECK_NE(handle_scope_offset.Uint32Value(), - main_jni_conv->SavedLocalReferenceCookieOffset().Uint32Value()); - bool input_in_reg = mr_conv->IsCurrentParamInRegister(); - bool input_on_stack = mr_conv->IsCurrentParamOnStack(); - CHECK(input_in_reg || input_on_stack); - - if (input_in_reg) { - ManagedRegister in_reg = mr_conv->CurrentParamRegister(); - __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull()); - __ StoreRef(handle_scope_offset, in_reg); - } else if (input_on_stack) { - FrameOffset in_off = mr_conv->CurrentParamStackOffset(); - __ VerifyObject(in_off, mr_conv->IsCurrentArgPossiblyNull()); - __ CopyRef(handle_scope_offset, in_off, - mr_conv->InterproceduralScratchRegister()); + // Note this LoadRef() doesn't need heap unpoisoning since it's from the ArtMethod. + // Note this LoadRef() does not include read barrier. It will be handled below. + // + // scratchRegister = *method[DeclaringClassOffset()]; + __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), + mr_conv->MethodRegister(), ArtMethod::DeclaringClassOffset(), false); + __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); + // *handleScopeOffset = scratchRegister + __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); + main_jni_conv->Next(); // in handle scope so move to next argument + } + // Place every reference into the handle scope (ignore other parameters). + while (mr_conv->HasNext()) { + CHECK(main_jni_conv->HasNext()); + bool ref_param = main_jni_conv->IsCurrentParamAReference(); + CHECK(!ref_param || mr_conv->IsCurrentParamAReference()); + // References need placing in handle scope and the entry value passing + if (ref_param) { + // Compute handle scope entry, note null is placed in the handle scope but its boxed value + // must be null. + FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); + // Check handle scope offset is within frame and doesn't run into the saved segment state. + CHECK_LT(handle_scope_offset.Uint32Value(), frame_size); + CHECK_NE(handle_scope_offset.Uint32Value(), + main_jni_conv->SavedLocalReferenceCookieOffset().Uint32Value()); + bool input_in_reg = mr_conv->IsCurrentParamInRegister(); + bool input_on_stack = mr_conv->IsCurrentParamOnStack(); + CHECK(input_in_reg || input_on_stack); + + if (input_in_reg) { + ManagedRegister in_reg = mr_conv->CurrentParamRegister(); + __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull()); + __ StoreRef(handle_scope_offset, in_reg); + } else if (input_on_stack) { + FrameOffset in_off = mr_conv->CurrentParamStackOffset(); + __ VerifyObject(in_off, mr_conv->IsCurrentArgPossiblyNull()); + __ CopyRef(handle_scope_offset, in_off, + mr_conv->InterproceduralScratchRegister()); + } } + mr_conv->Next(); + main_jni_conv->Next(); } - mr_conv->Next(); - main_jni_conv->Next(); - } - // 4. Write out the end of the quick frames. - __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset()); + // 4. Write out the end of the quick frames. + __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset()); + + // NOTE: @CriticalNative does not need to store the stack pointer to the thread + // because garbage collections are disabled within the execution of a + // @CriticalNative method. + // (TODO: We could probably disable it for @FastNative too). + } // if (!is_critical_native) // 5. Move frame down to allow space for out going args. const size_t main_out_arg_size = main_jni_conv->OutArgSize(); @@ -218,7 +277,9 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, // Call the read barrier for the declaring class loaded from the method for a static call. // Note that we always have outgoing param space available for at least two params. - if (kUseReadBarrier && is_static) { + if (kUseReadBarrier && is_static && !is_critical_native) { + // XX: Why is this necessary only for the jclass? Why not for every single object ref? + // Skip this for @CriticalNative because we didn't build a HandleScope to begin with. ThreadOffset read_barrier = QUICK_ENTRYPOINT_OFFSET(kPointerSize, pReadBarrierJni); main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); @@ -255,46 +316,56 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, // can occur. The result is the saved JNI local state that is restored by the exit call. We // abuse the JNI calling convention here, that is guaranteed to support passing 2 pointer // arguments. - ThreadOffset jni_start = - is_synchronized - ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStartSynchronized) - : (is_fast_native - ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastStart) - : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStart)); + FrameOffset locked_object_handle_scope_offset(0xBEEFDEAD); + if (LIKELY(!is_critical_native)) { + // Skip this for @CriticalNative methods. They do not call JniMethodStart. + ThreadOffset jni_start = + is_synchronized + ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStartSynchronized) + : (is_fast_native + ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastStart) + : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStart)); - main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); - FrameOffset locked_object_handle_scope_offset(0); - if (is_synchronized) { - // Pass object for locking. - main_jni_conv->Next(); // Skip JNIEnv. - locked_object_handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); - if (main_jni_conv->IsCurrentParamOnStack()) { - FrameOffset out_off = main_jni_conv->CurrentParamStackOffset(); - __ CreateHandleScopeEntry(out_off, locked_object_handle_scope_offset, - mr_conv->InterproceduralScratchRegister(), false); + locked_object_handle_scope_offset = FrameOffset(0); + if (is_synchronized) { + // Pass object for locking. + main_jni_conv->Next(); // Skip JNIEnv. + locked_object_handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); + main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); + if (main_jni_conv->IsCurrentParamOnStack()) { + FrameOffset out_off = main_jni_conv->CurrentParamStackOffset(); + __ CreateHandleScopeEntry(out_off, locked_object_handle_scope_offset, + mr_conv->InterproceduralScratchRegister(), false); + } else { + ManagedRegister out_reg = main_jni_conv->CurrentParamRegister(); + __ CreateHandleScopeEntry(out_reg, locked_object_handle_scope_offset, + ManagedRegister::NoRegister(), false); + } + main_jni_conv->Next(); + } + if (main_jni_conv->IsCurrentParamInRegister()) { + __ GetCurrentThread(main_jni_conv->CurrentParamRegister()); + __ Call(main_jni_conv->CurrentParamRegister(), + Offset(jni_start), + main_jni_conv->InterproceduralScratchRegister()); } else { - ManagedRegister out_reg = main_jni_conv->CurrentParamRegister(); - __ CreateHandleScopeEntry(out_reg, locked_object_handle_scope_offset, - ManagedRegister::NoRegister(), false); + __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset(), + main_jni_conv->InterproceduralScratchRegister()); + __ CallFromThread(jni_start, main_jni_conv->InterproceduralScratchRegister()); + } + if (is_synchronized) { // Check for exceptions from monitor enter. + __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), main_out_arg_size); } - main_jni_conv->Next(); - } - if (main_jni_conv->IsCurrentParamInRegister()) { - __ GetCurrentThread(main_jni_conv->CurrentParamRegister()); - __ Call(main_jni_conv->CurrentParamRegister(), - Offset(jni_start), - main_jni_conv->InterproceduralScratchRegister()); - } else { - __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset(), - main_jni_conv->InterproceduralScratchRegister()); - __ CallFromThread(jni_start, main_jni_conv->InterproceduralScratchRegister()); } - if (is_synchronized) { // Check for exceptions from monitor enter. - __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), main_out_arg_size); + + // Store into stack_frame[saved_cookie_offset] the return value of JniMethodStart. + FrameOffset saved_cookie_offset( + FrameOffset(0xDEADBEEFu)); // @CriticalNative - use obviously bad value for debugging + if (LIKELY(!is_critical_native)) { + saved_cookie_offset = main_jni_conv->SavedLocalReferenceCookieOffset(); + __ Store(saved_cookie_offset, main_jni_conv->IntReturnRegister(), 4 /* sizeof cookie */); } - FrameOffset saved_cookie_offset = main_jni_conv->SavedLocalReferenceCookieOffset(); - __ Store(saved_cookie_offset, main_jni_conv->IntReturnRegister(), 4); // 7. Iterate over arguments placing values from managed calling convention in // to the convention required for a native call (shuffling). For references @@ -315,9 +386,13 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, for (uint32_t i = 0; i < args_count; ++i) { mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size)); main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); - main_jni_conv->Next(); // Skip JNIEnv*. - if (is_static) { - main_jni_conv->Next(); // Skip Class for now. + + // Skip the extra JNI parameters for now. + if (LIKELY(!is_critical_native)) { + main_jni_conv->Next(); // Skip JNIEnv*. + if (is_static) { + main_jni_conv->Next(); // Skip Class for now. + } } // Skip to the argument we're interested in. for (uint32_t j = 0; j < args_count - i - 1; ++j) { @@ -326,7 +401,7 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, } CopyParameter(jni_asm.get(), mr_conv.get(), main_jni_conv.get(), frame_size, main_out_arg_size); } - if (is_static) { + if (is_static && !is_critical_native) { // Create argument for Class mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size)); main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); @@ -344,24 +419,30 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, } } - // 8. Create 1st argument, the JNI environment ptr. + // Set the iterator back to the incoming Method*. main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); - // Register that will hold local indirect reference table - if (main_jni_conv->IsCurrentParamInRegister()) { - ManagedRegister jni_env = main_jni_conv->CurrentParamRegister(); - DCHECK(!jni_env.Equals(main_jni_conv->InterproceduralScratchRegister())); - __ LoadRawPtrFromThread(jni_env, Thread::JniEnvOffset()); - } else { - FrameOffset jni_env = main_jni_conv->CurrentParamStackOffset(); - __ CopyRawPtrFromThread(jni_env, - Thread::JniEnvOffset(), - main_jni_conv->InterproceduralScratchRegister()); + if (LIKELY(!is_critical_native)) { + // 8. Create 1st argument, the JNI environment ptr. + // Register that will hold local indirect reference table + if (main_jni_conv->IsCurrentParamInRegister()) { + ManagedRegister jni_env = main_jni_conv->CurrentParamRegister(); + DCHECK(!jni_env.Equals(main_jni_conv->InterproceduralScratchRegister())); + __ LoadRawPtrFromThread(jni_env, Thread::JniEnvOffset()); + } else { + FrameOffset jni_env = main_jni_conv->CurrentParamStackOffset(); + __ CopyRawPtrFromThread(jni_env, + Thread::JniEnvOffset(), + main_jni_conv->InterproceduralScratchRegister()); + } } // 9. Plant call to native code associated with method. - MemberOffset jni_entrypoint_offset = ArtMethod::EntryPointFromJniOffset( - InstructionSetPointerSize(instruction_set)); - __ Call(main_jni_conv->MethodStackOffset(), jni_entrypoint_offset, + MemberOffset jni_entrypoint_offset = + ArtMethod::EntryPointFromJniOffset(InstructionSetPointerSize(instruction_set)); + // FIXME: Not sure if MethodStackOffset will work here. What does it even do? + __ Call(main_jni_conv->MethodStackOffset(), + jni_entrypoint_offset, + // XX: Why not the jni conv scratch register? mr_conv->InterproceduralScratchRegister()); // 10. Fix differences in result widths. @@ -377,20 +458,45 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, } } - // 11. Save return value + // 11. Process return value FrameOffset return_save_location = main_jni_conv->ReturnValueSaveLocation(); if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) { - if ((instruction_set == kMips || instruction_set == kMips64) && - main_jni_conv->GetReturnType() == Primitive::kPrimDouble && - return_save_location.Uint32Value() % 8 != 0) { - // Ensure doubles are 8-byte aligned for MIPS - return_save_location = FrameOffset(return_save_location.Uint32Value() - + static_cast(kMipsPointerSize)); + if (LIKELY(!is_critical_native)) { + // For normal JNI, store the return value on the stack because the call to + // JniMethodEnd will clobber the return value. It will be restored in (13). + if ((instruction_set == kMips || instruction_set == kMips64) && + main_jni_conv->GetReturnType() == Primitive::kPrimDouble && + return_save_location.Uint32Value() % 8 != 0) { + // Ensure doubles are 8-byte aligned for MIPS + return_save_location = FrameOffset(return_save_location.Uint32Value() + + static_cast(kMipsPointerSize)); + // TODO: refactor this into the JniCallingConvention code + // as a return value alignment requirement. + } + CHECK_LT(return_save_location.Uint32Value(), frame_size + main_out_arg_size); + __ Store(return_save_location, + main_jni_conv->ReturnRegister(), + main_jni_conv->SizeOfReturnValue()); + } else { + // For @CriticalNative only, + // move the JNI return register into the managed return register (if they don't match). + ManagedRegister jni_return_reg = main_jni_conv->ReturnRegister(); + ManagedRegister mr_return_reg = mr_conv->ReturnRegister(); + + // Check if the JNI return register matches the managed return register. + // If they differ, only then do we have to do anything about it. + // Otherwise the return value is already in the right place when we return. + if (!jni_return_reg.Equals(mr_return_reg)) { + // This is typically only necessary on ARM32 due to native being softfloat + // while managed is hardfloat. + // -- For example VMOV {r0, r1} -> D0; VMOV r0 -> S0. + __ Move(mr_return_reg, jni_return_reg, main_jni_conv->SizeOfReturnValue()); + } else if (jni_return_reg.IsNoRegister() && mr_return_reg.IsNoRegister()) { + // Sanity check: If the return value is passed on the stack for some reason, + // then make sure the size matches. + CHECK_EQ(main_jni_conv->SizeOfReturnValue(), mr_conv->SizeOfReturnValue()); + } } - CHECK_LT(return_save_location.Uint32Value(), frame_size + main_out_arg_size); - __ Store(return_save_location, - main_jni_conv->ReturnRegister(), - main_jni_conv->SizeOfReturnValue()); } // Increase frame size for out args if needed by the end_jni_conv. @@ -398,6 +504,8 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, if (end_out_arg_size > current_out_arg_size) { size_t out_arg_size_diff = end_out_arg_size - current_out_arg_size; current_out_arg_size = end_out_arg_size; + // TODO: This is redundant for @CriticalNative but we need to + // conditionally do __DecreaseFrameSize below. __ IncreaseFrameSize(out_arg_size_diff); saved_cookie_offset = FrameOffset(saved_cookie_offset.SizeValue() + out_arg_size_diff); locked_object_handle_scope_offset = @@ -407,65 +515,71 @@ static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, // thread. end_jni_conv->ResetIterator(FrameOffset(end_out_arg_size)); - ThreadOffset jni_end(-1); - if (reference_return) { - // Pass result. - jni_end = is_synchronized - ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReferenceSynchronized) - : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReference); - SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister()); - end_jni_conv->Next(); - } else { - jni_end = is_synchronized - ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndSynchronized) - : (is_fast_native - ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEnd) - : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEnd)); - } - // Pass saved local reference state. - if (end_jni_conv->IsCurrentParamOnStack()) { - FrameOffset out_off = end_jni_conv->CurrentParamStackOffset(); - __ Copy(out_off, saved_cookie_offset, end_jni_conv->InterproceduralScratchRegister(), 4); - } else { - ManagedRegister out_reg = end_jni_conv->CurrentParamRegister(); - __ Load(out_reg, saved_cookie_offset, 4); - } - end_jni_conv->Next(); - if (is_synchronized) { - // Pass object for unlocking. + if (LIKELY(!is_critical_native)) { + // 12. Call JniMethodEnd + ThreadOffset jni_end(-1); + if (reference_return) { + // Pass result. + jni_end = is_synchronized + ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReferenceSynchronized) + : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReference); + SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister()); + end_jni_conv->Next(); + } else { + jni_end = is_synchronized + ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndSynchronized) + : (is_fast_native + ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEnd) + : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEnd)); + } + // Pass saved local reference state. if (end_jni_conv->IsCurrentParamOnStack()) { FrameOffset out_off = end_jni_conv->CurrentParamStackOffset(); - __ CreateHandleScopeEntry(out_off, locked_object_handle_scope_offset, - end_jni_conv->InterproceduralScratchRegister(), - false); + __ Copy(out_off, saved_cookie_offset, end_jni_conv->InterproceduralScratchRegister(), 4); } else { ManagedRegister out_reg = end_jni_conv->CurrentParamRegister(); - __ CreateHandleScopeEntry(out_reg, locked_object_handle_scope_offset, - ManagedRegister::NoRegister(), false); + __ Load(out_reg, saved_cookie_offset, 4); } end_jni_conv->Next(); - } - if (end_jni_conv->IsCurrentParamInRegister()) { - __ GetCurrentThread(end_jni_conv->CurrentParamRegister()); - __ Call(end_jni_conv->CurrentParamRegister(), - Offset(jni_end), - end_jni_conv->InterproceduralScratchRegister()); - } else { - __ GetCurrentThread(end_jni_conv->CurrentParamStackOffset(), - end_jni_conv->InterproceduralScratchRegister()); - __ CallFromThread(jni_end, end_jni_conv->InterproceduralScratchRegister()); - } + if (is_synchronized) { + // Pass object for unlocking. + if (end_jni_conv->IsCurrentParamOnStack()) { + FrameOffset out_off = end_jni_conv->CurrentParamStackOffset(); + __ CreateHandleScopeEntry(out_off, locked_object_handle_scope_offset, + end_jni_conv->InterproceduralScratchRegister(), + false); + } else { + ManagedRegister out_reg = end_jni_conv->CurrentParamRegister(); + __ CreateHandleScopeEntry(out_reg, locked_object_handle_scope_offset, + ManagedRegister::NoRegister(), false); + } + end_jni_conv->Next(); + } + if (end_jni_conv->IsCurrentParamInRegister()) { + __ GetCurrentThread(end_jni_conv->CurrentParamRegister()); + __ Call(end_jni_conv->CurrentParamRegister(), + Offset(jni_end), + end_jni_conv->InterproceduralScratchRegister()); + } else { + __ GetCurrentThread(end_jni_conv->CurrentParamStackOffset(), + end_jni_conv->InterproceduralScratchRegister()); + __ CallFromThread(jni_end, end_jni_conv->InterproceduralScratchRegister()); + } - // 13. Reload return value - if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) { - __ Load(mr_conv->ReturnRegister(), return_save_location, mr_conv->SizeOfReturnValue()); - } + // 13. Reload return value + if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) { + __ Load(mr_conv->ReturnRegister(), return_save_location, mr_conv->SizeOfReturnValue()); + // NIT: If it's @CriticalNative then we actually only need to do this IF + // the calling convention's native return register doesn't match the managed convention's + // return register. + } + } // if (!is_critical_native) // 14. Move frame up now we're done with the out arg space. __ DecreaseFrameSize(current_out_arg_size); // 15. Process pending exceptions from JNI call or monitor exit. - __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), 0); + __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), 0 /* stack_adjust */); // 16. Remove activation - need to restore callee save registers since the GC may have changed // them. @@ -497,7 +611,8 @@ template static void CopyParameter(JNIMacroAssembler* jni_asm, ManagedRuntimeCallingConvention* mr_conv, JniCallingConvention* jni_conv, - size_t frame_size, size_t out_arg_size) { + size_t frame_size, + size_t out_arg_size) { bool input_in_reg = mr_conv->IsCurrentParamInRegister(); bool output_in_reg = jni_conv->IsCurrentParamInRegister(); FrameOffset handle_scope_offset(0); diff --git a/compiler/jni/quick/x86/calling_convention_x86.cc b/compiler/jni/quick/x86/calling_convention_x86.cc index 1d06f2685..0bfcc3fb4 100644 --- a/compiler/jni/quick/x86/calling_convention_x86.cc +++ b/compiler/jni/quick/x86/calling_convention_x86.cc @@ -24,6 +24,7 @@ namespace art { namespace x86 { static_assert(kX86PointerSize == PointerSize::k32, "Unexpected x86 pointer size"); +static_assert(kStackAlignment >= 16u, "IA-32 cdecl requires at least 16 byte stack alignment"); static constexpr ManagedRegister kCalleeSaveRegisters[] = { // Core registers. @@ -190,9 +191,15 @@ const ManagedRegisterEntrySpills& X86ManagedRuntimeCallingConvention::EntrySpill // JNI calling convention -X86JniCallingConvention::X86JniCallingConvention(bool is_static, bool is_synchronized, +X86JniCallingConvention::X86JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, const char* shorty) - : JniCallingConvention(is_static, is_synchronized, shorty, kX86PointerSize) { + : JniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty, + kX86PointerSize) { } uint32_t X86JniCallingConvention::CoreSpillMask() const { @@ -204,13 +211,31 @@ uint32_t X86JniCallingConvention::FpSpillMask() const { } size_t X86JniCallingConvention::FrameSize() { - // Method*, return address and callee save area size, local reference segment state - size_t frame_data_size = static_cast(kX86PointerSize) + - (2 + CalleeSaveRegisters().size()) * kFramePointerSize; - // References plus 2 words for HandleScope header - size_t handle_scope_size = HandleScope::SizeOf(kX86PointerSize, ReferenceCount()); + // Method*, PC return address and callee save area size, local reference segment state + const size_t method_ptr_size = static_cast(kX86PointerSize); + const size_t pc_return_addr_size = kFramePointerSize; + const size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize; + size_t frame_data_size = method_ptr_size + pc_return_addr_size + callee_save_area_size; + + if (LIKELY(HasLocalReferenceSegmentState())) { // local ref. segment state + // Local reference segment state is sometimes excluded. + frame_data_size += kFramePointerSize; + } + + // References plus link_ (pointer) and number_of_references_ (uint32_t) for HandleScope header + const size_t handle_scope_size = HandleScope::SizeOf(kX86PointerSize, ReferenceCount()); + + size_t total_size = frame_data_size; + if (LIKELY(HasHandleScope())) { + // HandleScope is sometimes excluded. + total_size += handle_scope_size; // handle scope size + } + // Plus return value spill area size - return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment); + total_size += SizeOfReturnValue(); + + return RoundUp(total_size, kStackAlignment); + // TODO: Same thing as x64 except using different pointer size. Refactor? } size_t X86JniCallingConvention::OutArgSize() { @@ -239,11 +264,13 @@ FrameOffset X86JniCallingConvention::CurrentParamStackOffset() { } size_t X86JniCallingConvention::NumberOfOutgoingStackArgs() { - size_t static_args = IsStatic() ? 1 : 0; // count jclass + size_t static_args = HasSelfClass() ? 1 : 0; // count jclass // regular argument parameters and this size_t param_args = NumArgs() + NumLongOrDoubleArgs(); // count JNIEnv* and return pc (pushed after Method*) - size_t total_args = static_args + param_args + 2; + size_t internal_args = 1 /* return pc */ + (HasJniEnv() ? 1 : 0 /* jni env */); + // No register args. + size_t total_args = static_args + param_args + internal_args; return total_args; } diff --git a/compiler/jni/quick/x86/calling_convention_x86.h b/compiler/jni/quick/x86/calling_convention_x86.h index ff92fc990..be83cdaad 100644 --- a/compiler/jni/quick/x86/calling_convention_x86.h +++ b/compiler/jni/quick/x86/calling_convention_x86.h @@ -52,9 +52,13 @@ class X86ManagedRuntimeCallingConvention FINAL : public ManagedRuntimeCallingCon DISALLOW_COPY_AND_ASSIGN(X86ManagedRuntimeCallingConvention); }; +// Implements the x86 cdecl calling convention. class X86JniCallingConvention FINAL : public JniCallingConvention { public: - X86JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty); + X86JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, + const char* shorty); ~X86JniCallingConvention() OVERRIDE {} // Calling convention ManagedRegister ReturnRegister() OVERRIDE; diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc index cbf10bda4..8ca0ffe53 100644 --- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc +++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc @@ -25,8 +25,16 @@ namespace art { namespace x86_64 { constexpr size_t kFramePointerSize = static_cast(PointerSize::k64); - static_assert(kX86_64PointerSize == PointerSize::k64, "Unexpected x86_64 pointer size"); +static_assert(kStackAlignment >= 16u, "System V AMD64 ABI requires at least 16 byte stack alignment"); + +// XMM0..XMM7 can be used to pass the first 8 floating args. The rest must go on the stack. +// -- Managed and JNI calling conventions. +constexpr size_t kMaxFloatOrDoubleRegisterArguments = 8u; +// Up to how many integer-like (pointers, objects, longs, int, short, bool, etc) args can be +// enregistered. The rest of the args must go on the stack. +// -- JNI calling convention only (Managed excludes RDI, so it's actually 5). +constexpr size_t kMaxIntLikeRegisterArguments = 6u; static constexpr ManagedRegister kCalleeSaveRegisters[] = { // Core registers. @@ -130,7 +138,7 @@ ManagedRegister X86_64ManagedRuntimeCallingConvention::CurrentParamRegister() { case 3: res = X86_64ManagedRegister::FromCpuRegister(R8); break; case 4: res = X86_64ManagedRegister::FromCpuRegister(R9); break; } - } else if (itr_float_and_doubles_ < 8) { + } else if (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments) { // First eight float parameters are passed via XMM0..XMM7 res = X86_64ManagedRegister::FromXmmRegister( static_cast(XMM0 + itr_float_and_doubles_)); @@ -165,9 +173,15 @@ const ManagedRegisterEntrySpills& X86_64ManagedRuntimeCallingConvention::EntrySp // JNI calling convention -X86_64JniCallingConvention::X86_64JniCallingConvention(bool is_static, bool is_synchronized, +X86_64JniCallingConvention::X86_64JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, const char* shorty) - : JniCallingConvention(is_static, is_synchronized, shorty, kX86_64PointerSize) { + : JniCallingConvention(is_static, + is_synchronized, + is_critical_native, + shorty, + kX86_64PointerSize) { } uint32_t X86_64JniCallingConvention::CoreSpillMask() const { @@ -179,13 +193,30 @@ uint32_t X86_64JniCallingConvention::FpSpillMask() const { } size_t X86_64JniCallingConvention::FrameSize() { - // Method*, return address and callee save area size, local reference segment state - size_t frame_data_size = static_cast(kX86_64PointerSize) + - (2 + CalleeSaveRegisters().size()) * kFramePointerSize; + // Method*, PC return address and callee save area size, local reference segment state + const size_t method_ptr_size = static_cast(kX86_64PointerSize); + const size_t pc_return_addr_size = kFramePointerSize; + const size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize; + size_t frame_data_size = method_ptr_size + pc_return_addr_size + callee_save_area_size; + + if (LIKELY(HasLocalReferenceSegmentState())) { // local ref. segment state + // Local reference segment state is sometimes excluded. + frame_data_size += kFramePointerSize; + } + // References plus link_ (pointer) and number_of_references_ (uint32_t) for HandleScope header - size_t handle_scope_size = HandleScope::SizeOf(kX86_64PointerSize, ReferenceCount()); + const size_t handle_scope_size = HandleScope::SizeOf(kX86_64PointerSize, ReferenceCount()); + + size_t total_size = frame_data_size; + if (LIKELY(HasHandleScope())) { + // HandleScope is sometimes excluded. + total_size += handle_scope_size; // handle scope size + } + // Plus return value spill area size - return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment); + total_size += SizeOfReturnValue(); + + return RoundUp(total_size, kStackAlignment); } size_t X86_64JniCallingConvention::OutArgSize() { @@ -214,8 +245,9 @@ ManagedRegister X86_64JniCallingConvention::CurrentParamRegister() { case 3: res = X86_64ManagedRegister::FromCpuRegister(RCX); break; case 4: res = X86_64ManagedRegister::FromCpuRegister(R8); break; case 5: res = X86_64ManagedRegister::FromCpuRegister(R9); break; + static_assert(5u == kMaxIntLikeRegisterArguments - 1, "Missing case statement(s)"); } - } else if (itr_float_and_doubles_ < 8) { + } else if (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments) { // First eight float parameters are passed via XMM0..XMM7 res = X86_64ManagedRegister::FromXmmRegister( static_cast(XMM0 + itr_float_and_doubles_)); @@ -224,24 +256,35 @@ ManagedRegister X86_64JniCallingConvention::CurrentParamRegister() { } FrameOffset X86_64JniCallingConvention::CurrentParamStackOffset() { - size_t offset = itr_args_ - - std::min(8U, itr_float_and_doubles_) // Float arguments passed through Xmm0..Xmm7 - - std::min(6U, itr_args_ - itr_float_and_doubles_); // Integer arguments passed through GPR - return FrameOffset(displacement_.Int32Value() - OutArgSize() + (offset * kFramePointerSize)); + CHECK(IsCurrentParamOnStack()); + size_t args_on_stack = itr_args_ + - std::min(kMaxFloatOrDoubleRegisterArguments, + static_cast(itr_float_and_doubles_)) + // Float arguments passed through Xmm0..Xmm7 + - std::min(kMaxIntLikeRegisterArguments, + static_cast(itr_args_ - itr_float_and_doubles_)); + // Integer arguments passed through GPR + size_t offset = displacement_.Int32Value() - OutArgSize() + (args_on_stack * kFramePointerSize); + CHECK_LT(offset, OutArgSize()); + return FrameOffset(offset); } +// TODO: Calling this "NumberArgs" is misleading. +// It's really more like NumberSlots (like itr_slots_) +// because doubles/longs get counted twice. size_t X86_64JniCallingConvention::NumberOfOutgoingStackArgs() { - size_t static_args = IsStatic() ? 1 : 0; // count jclass + size_t static_args = HasSelfClass() ? 1 : 0; // count jclass // regular argument parameters and this size_t param_args = NumArgs() + NumLongOrDoubleArgs(); // count JNIEnv* and return pc (pushed after Method*) - size_t total_args = static_args + param_args + 2; + size_t internal_args = 1 /* return pc */ + (HasJniEnv() ? 1 : 0 /* jni env */); + size_t total_args = static_args + param_args + internal_args; // Float arguments passed through Xmm0..Xmm7 // Other (integer) arguments passed through GPR (RDI, RSI, RDX, RCX, R8, R9) size_t total_stack_args = total_args - - std::min(8U, static_cast(NumFloatOrDoubleArgs())) - - std::min(6U, static_cast(NumArgs() - NumFloatOrDoubleArgs())); + - std::min(kMaxFloatOrDoubleRegisterArguments, static_cast(NumFloatOrDoubleArgs())) + - std::min(kMaxIntLikeRegisterArguments, static_cast(NumArgs() - NumFloatOrDoubleArgs())); return total_stack_args; } diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.h b/compiler/jni/quick/x86_64/calling_convention_x86_64.h index b98f5057e..cdba334d8 100644 --- a/compiler/jni/quick/x86_64/calling_convention_x86_64.h +++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.h @@ -48,7 +48,10 @@ class X86_64ManagedRuntimeCallingConvention FINAL : public ManagedRuntimeCalling class X86_64JniCallingConvention FINAL : public JniCallingConvention { public: - X86_64JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty); + X86_64JniCallingConvention(bool is_static, + bool is_synchronized, + bool is_critical_native, + const char* shorty); ~X86_64JniCallingConvention() OVERRIDE {} // Calling convention ManagedRegister ReturnRegister() OVERRIDE; diff --git a/compiler/utils/arm/jni_macro_assembler_arm.cc b/compiler/utils/arm/jni_macro_assembler_arm.cc index e0bfa12b2..cf7a4d1b7 100644 --- a/compiler/utils/arm/jni_macro_assembler_arm.cc +++ b/compiler/utils/arm/jni_macro_assembler_arm.cc @@ -367,11 +367,21 @@ void ArmJNIMacroAssembler::Move(ManagedRegister m_dst, ManagedRegister m_src, si CHECK(src.IsCoreRegister()) << src; __ mov(dst.AsCoreRegister(), ShifterOperand(src.AsCoreRegister())); } else if (dst.IsDRegister()) { - CHECK(src.IsDRegister()) << src; - __ vmovd(dst.AsDRegister(), src.AsDRegister()); + if (src.IsDRegister()) { + __ vmovd(dst.AsDRegister(), src.AsDRegister()); + } else { + // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi}) + CHECK(src.IsRegisterPair()) << src; + __ vmovdrr(dst.AsDRegister(), src.AsRegisterPairLow(), src.AsRegisterPairHigh()); + } } else if (dst.IsSRegister()) { - CHECK(src.IsSRegister()) << src; - __ vmovs(dst.AsSRegister(), src.AsSRegister()); + if (src.IsSRegister()) { + __ vmovs(dst.AsSRegister(), src.AsSRegister()); + } else { + // VMOV Sn, Rn (Sn = Rn) + CHECK(src.IsCoreRegister()) << src; + __ vmovsr(dst.AsSRegister(), src.AsCoreRegister()); + } } else { CHECK(dst.IsRegisterPair()) << dst; CHECK(src.IsRegisterPair()) << src; diff --git a/compiler/utils/assembler_thumb_test.cc b/compiler/utils/assembler_thumb_test.cc index 367ed9732..3b05173d8 100644 --- a/compiler/utils/assembler_thumb_test.cc +++ b/compiler/utils/assembler_thumb_test.cc @@ -1661,13 +1661,19 @@ void EmitAndCheck(JniAssemblerType* assembler, const char* testname) { TEST_F(ArmVIXLAssemblerTest, VixlJniHelpers) { const bool is_static = true; const bool is_synchronized = false; + const bool is_critical_native = false; const char* shorty = "IIFII"; ArenaPool pool; ArenaAllocator arena(&pool); std::unique_ptr jni_conv( - JniCallingConvention::Create(&arena, is_static, is_synchronized, shorty, kThumb2)); + JniCallingConvention::Create(&arena, + is_static, + is_synchronized, + is_critical_native, + shorty, + kThumb2)); std::unique_ptr mr_conv( ManagedRuntimeCallingConvention::Create(&arena, is_static, is_synchronized, shorty, kThumb2)); const int frame_size(jni_conv->FrameSize()); diff --git a/runtime/art_method.cc b/runtime/art_method.cc index f9bc249a4..1392399bd 100644 --- a/runtime/art_method.cc +++ b/runtime/art_method.cc @@ -335,20 +335,30 @@ bool ArtMethod::IsOverridableByDefaultMethod() { } bool ArtMethod::IsAnnotatedWithFastNative() { + return IsAnnotatedWith(WellKnownClasses::dalvik_annotation_optimization_FastNative, + DexFile::kDexVisibilityBuild); +} + +bool ArtMethod::IsAnnotatedWithCriticalNative() { + return IsAnnotatedWith(WellKnownClasses::dalvik_annotation_optimization_CriticalNative, + DexFile::kDexVisibilityBuild); +} + +bool ArtMethod::IsAnnotatedWith(jclass klass, uint32_t visibility) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); StackHandleScope<1> shs(self); const DexFile& dex_file = GetDeclaringClass()->GetDexFile(); - mirror::Class* fast_native_annotation = - soa.Decode(WellKnownClasses::dalvik_annotation_optimization_FastNative); - Handle fast_native_handle(shs.NewHandle(fast_native_annotation)); + mirror::Class* annotation = soa.Decode(klass); + DCHECK(annotation->IsAnnotation()); + Handle annotation_handle(shs.NewHandle(annotation)); // Note: Resolves any method annotations' classes as a side-effect. // -- This seems allowed by the spec since it says we can preload any classes // referenced by another classes's constant pool table. - return dex_file.IsMethodAnnotationPresent(this, fast_native_handle, DexFile::kDexVisibilityBuild); + return dex_file.IsMethodAnnotationPresent(this, annotation_handle, visibility); } bool ArtMethod::EqualParameters(Handle> params) { diff --git a/runtime/art_method.h b/runtime/art_method.h index b25087cc2..8051a1fc7 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -378,6 +378,10 @@ class ArtMethod FINAL { // -- Independent of kAccFastNative access flags. bool IsAnnotatedWithFastNative(); + // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative + // -- Unrelated to the GC notion of "critical". + bool IsAnnotatedWithCriticalNative(); + // Returns true if this method could be overridden by a default method. bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_); @@ -776,6 +780,8 @@ class ArtMethod FINAL { } ptr_sized_fields_; private: + bool IsAnnotatedWith(jclass klass, uint32_t visibility); + static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) { // Round up to pointer size for padding field. Tested in art_method.cc. return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_), diff --git a/runtime/common_runtime_test.cc b/runtime/common_runtime_test.cc index 741b68299..dba0a8112 100644 --- a/runtime/common_runtime_test.cc +++ b/runtime/common_runtime_test.cc @@ -608,6 +608,10 @@ CheckJniAbortCatcher::~CheckJniAbortCatcher() { EXPECT_TRUE(actual_.empty()) << actual_; } +void CheckJniAbortCatcher::Check(const std::string& expected_text) { + Check(expected_text.c_str()); +} + void CheckJniAbortCatcher::Check(const char* expected_text) { EXPECT_TRUE(actual_.find(expected_text) != std::string::npos) << "\n" << "Expected to find: " << expected_text << "\n" diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h index 00394e9b1..b2090b7e0 100644 --- a/runtime/common_runtime_test.h +++ b/runtime/common_runtime_test.h @@ -184,6 +184,7 @@ class CheckJniAbortCatcher { ~CheckJniAbortCatcher(); + void Check(const std::string& expected_text); void Check(const char* expected_text); private: diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc index 2c992753f..5f5fbc89f 100644 --- a/runtime/well_known_classes.cc +++ b/runtime/well_known_classes.cc @@ -30,6 +30,7 @@ namespace art { jclass WellKnownClasses::com_android_dex_Dex; +jclass WellKnownClasses::dalvik_annotation_optimization_CriticalNative; jclass WellKnownClasses::dalvik_annotation_optimization_FastNative; jclass WellKnownClasses::dalvik_system_DexFile; jclass WellKnownClasses::dalvik_system_DexPathList; @@ -216,6 +217,8 @@ static jmethodID CachePrimitiveBoxingMethod(JNIEnv* env, char prim_name, const c void WellKnownClasses::Init(JNIEnv* env) { com_android_dex_Dex = CacheClass(env, "com/android/dex/Dex"); + dalvik_annotation_optimization_CriticalNative = + CacheClass(env, "dalvik/annotation/optimization/CriticalNative"); dalvik_annotation_optimization_FastNative = CacheClass(env, "dalvik/annotation/optimization/FastNative"); dalvik_system_DexFile = CacheClass(env, "dalvik/system/DexFile"); dalvik_system_DexPathList = CacheClass(env, "dalvik/system/DexPathList"); diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h index 25c942429..ce710ffa2 100644 --- a/runtime/well_known_classes.h +++ b/runtime/well_known_classes.h @@ -41,6 +41,7 @@ struct WellKnownClasses { REQUIRES_SHARED(Locks::mutator_lock_); static jclass com_android_dex_Dex; + static jclass dalvik_annotation_optimization_CriticalNative; static jclass dalvik_annotation_optimization_FastNative; static jclass dalvik_system_DexFile; static jclass dalvik_system_DexPathList; diff --git a/test/004-JniTest/jni_test.cc b/test/004-JniTest/jni_test.cc index bb18a707f..81be531e4 100644 --- a/test/004-JniTest/jni_test.cc +++ b/test/004-JniTest/jni_test.cc @@ -28,9 +28,11 @@ namespace art { static JavaVM* jvm = nullptr; static jint Java_Main_intFastNativeMethod(JNIEnv*, jclass, jint a, jint b, jint c); +static jint Java_Main_intCriticalNativeMethod(jint a, jint b, jint c); static JNINativeMethod sMainMethods[] = { - {"intFastNativeMethod", "(III)I", reinterpret_cast(Java_Main_intFastNativeMethod) } + {"intFastNativeMethod", "(III)I", reinterpret_cast(Java_Main_intFastNativeMethod) }, + {"intCriticalNativeMethod", "(III)I", reinterpret_cast(Java_Main_intCriticalNativeMethod) }, }; extern "C" JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void*) { @@ -766,5 +768,12 @@ static jint Java_Main_intFastNativeMethod(JNIEnv*, jclass, jint a, jint b, jint return a + b + c; } +// Annotated with @CriticalNative in Java code. Doesn't need to be explicitly registered with "!". +// NOTE: Has to be registered explicitly to avoid mutator lock check failures. +static jint Java_Main_intCriticalNativeMethod(jint a, jint b, jint c) { + // Note that unlike a "Fast Native" method this excludes JNIEnv and the jclass parameters. + return a + b + c; +} + } // namespace art diff --git a/test/004-JniTest/src/Main.java b/test/004-JniTest/src/Main.java index 573afdbd1..bb098e44d 100644 --- a/test/004-JniTest/src/Main.java +++ b/test/004-JniTest/src/Main.java @@ -18,6 +18,7 @@ import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; +import dalvik.annotation.optimization.CriticalNative; import dalvik.annotation.optimization.FastNative; public class Main { @@ -49,6 +50,7 @@ public class Main { registerNativesJniTest(); testFastNativeMethods(); + testCriticalNativeMethods(); } private static native boolean registerNativesJniTest(); @@ -288,7 +290,23 @@ public class Main { } } + // Smoke test for @CriticalNative + // TODO: Way more thorough tests since it involved quite a bit of changes. + // Return sum of a+b+c. + @CriticalNative + static native int intCriticalNativeMethod(int a, int b, int c); + + private static void testCriticalNativeMethods() { + int returns[] = { 3, 6, 9, 12, 15 }; + for (int i = 0; i < returns.length; i++) { + int result = intCriticalNativeMethod(i, i+1, i+2); + if (returns[i] != result) { + System.out.println("CriticalNative Int Run " + i + " with " + returns[i] + " vs " + result); + throw new AssertionError(); + } + } + } } @FunctionalInterface diff --git a/test/MyClassNatives/MyClassNatives.java b/test/MyClassNatives/MyClassNatives.java index 45cfd0f59..3cb1f23f8 100644 --- a/test/MyClassNatives/MyClassNatives.java +++ b/test/MyClassNatives/MyClassNatives.java @@ -14,40 +14,77 @@ * limitations under the License. */ +import dalvik.annotation.optimization.CriticalNative; import dalvik.annotation.optimization.FastNative; +/* + * AUTOMATICALLY GENERATED FROM art/tools/mako-source-generator/...../MyClassNatives.java.mako + * + * !!! DO NOT EDIT DIRECTLY !!! + * + */ class MyClassNatives { + + // Normal native native void throwException(); + // Normal native native void foo(); + // Normal native native int bar(int count); + // Normal native static native int sbar(int count); + // Normal native native int fooI(int x); + // Normal native native int fooII(int x, int y); + // Normal native native long fooJJ(long x, long y); + // Normal native native Object fooO(Object x); + // Normal native native double fooDD(double x, double y); + // Normal native synchronized native long fooJJ_synchronized(long x, long y); + // Normal native native Object fooIOO(int x, Object y, Object z); + // Normal native static native Object fooSIOO(int x, Object y, Object z); + // Normal native static native int fooSII(int x, int y); + // Normal native static native double fooSDD(double x, double y); + // Normal native static synchronized native Object fooSSIOO(int x, Object y, Object z); + // Normal native static native void arraycopy(Object src, int src_pos, Object dst, int dst_pos, int length); + // Normal native native boolean compareAndSwapInt(Object obj, long offset, int expected, int newval); + // Normal native static native int getText(long val1, Object obj1, long val2, Object obj2); - synchronized native Object []getSinkPropertiesNative(String path); + // Normal native + synchronized native Object[] getSinkPropertiesNative(String path); + // Normal native native Class instanceMethodThatShouldReturnClass(); + // Normal native static native Class staticMethodThatShouldReturnClass(); + // Normal native native void instanceMethodThatShouldTakeClass(int i, Class c); + // Normal native static native void staticMethodThatShouldTakeClass(int i, Class c); + // TODO: These 3 seem like they could work for @CriticalNative as well if they were static. + // Normal native native float checkFloats(float f1, float f2); + // Normal native native void forceStackParameters(int i1, int i2, int i3, int i4, int i5, int i6, int i8, int i9, float f1, float f2, float f3, float f4, float f5, float f6, float f7, float f8, float f9); + // Normal native native void checkParameterAlign(int i1, long l1); + + // Normal native native void maxParamNumber(Object o0, Object o1, Object o2, Object o3, Object o4, Object o5, Object o6, Object o7, Object o8, Object o9, Object o10, Object o11, Object o12, Object o13, Object o14, Object o15, Object o16, Object o17, Object o18, Object o19, Object o20, Object o21, Object o22, Object o23, @@ -81,32 +118,228 @@ class MyClassNatives { Object o240, Object o241, Object o242, Object o243, Object o244, Object o245, Object o246, Object o247, Object o248, Object o249, Object o250, Object o251, Object o252, Object o253); + // Normal native native void withoutImplementation(); + // Normal native native Object withoutImplementationRefReturn(); + // Normal native native static void stackArgsIntsFirst(int i1, int i2, int i3, int i4, int i5, int i6, int i7, int i8, int i9, int i10, float f1, float f2, float f3, float f4, float f5, float f6, float f7, float f8, float f9, float f10); + // Normal native native static void stackArgsFloatsFirst(float f1, float f2, float f3, float f4, float f5, float f6, float f7, float f8, float f9, float f10, int i1, int i2, int i3, int i4, int i5, int i6, int i7, int i8, int i9, int i10); + // Normal native native static void stackArgsMixed(int i1, float f1, int i2, float f2, int i3, float f3, int i4, float f4, int i5, float f5, int i6, float f6, int i7, float f7, int i8, float f8, int i9, float f9, int i10, float f10); + // Normal native native static void stackArgsSignExtendedMips64(int i1, int i2, int i3, int i4, int i5, int i6, int i7, int i8); + // Normal native static native double logD(double d); + // Normal native static native float logF(float f); + // Normal native static native boolean returnTrue(); + // Normal native static native boolean returnFalse(); + // Normal native static native int returnInt(); + // Normal native + static native double returnDouble(); + // Normal native + static native long returnLong(); + + + + @FastNative + native void throwException_Fast(); + @FastNative + native void foo_Fast(); + @FastNative + native int bar_Fast(int count); + @FastNative + static native int sbar_Fast(int count); + @FastNative + native int fooI_Fast(int x); + @FastNative + native int fooII_Fast(int x, int y); + @FastNative + native long fooJJ_Fast(long x, long y); + @FastNative + native Object fooO_Fast(Object x); + @FastNative + native double fooDD_Fast(double x, double y); + @FastNative + synchronized native long fooJJ_synchronized_Fast(long x, long y); + @FastNative + native Object fooIOO_Fast(int x, Object y, Object z); + @FastNative + static native Object fooSIOO_Fast(int x, Object y, Object z); + @FastNative + static native int fooSII_Fast(int x, int y); + @FastNative + static native double fooSDD_Fast(double x, double y); + @FastNative + static synchronized native Object fooSSIOO_Fast(int x, Object y, Object z); + @FastNative + static native void arraycopy_Fast(Object src, int src_pos, Object dst, int dst_pos, int length); + @FastNative + native boolean compareAndSwapInt_Fast(Object obj, long offset, int expected, int newval); + @FastNative + static native int getText_Fast(long val1, Object obj1, long val2, Object obj2); + @FastNative + synchronized native Object[] getSinkPropertiesNative_Fast(String path); + + @FastNative + native Class instanceMethodThatShouldReturnClass_Fast(); + @FastNative + static native Class staticMethodThatShouldReturnClass_Fast(); + + @FastNative + native void instanceMethodThatShouldTakeClass_Fast(int i, Class c); + @FastNative + static native void staticMethodThatShouldTakeClass_Fast(int i, Class c); + + // TODO: These 3 seem like they could work for @CriticalNative as well if they were static. + @FastNative + native float checkFloats_Fast(float f1, float f2); + @FastNative + native void forceStackParameters_Fast(int i1, int i2, int i3, int i4, int i5, int i6, int i8, int i9, + float f1, float f2, float f3, float f4, float f5, float f6, + float f7, float f8, float f9); + @FastNative + native void checkParameterAlign_Fast(int i1, long l1); + + @FastNative + native void maxParamNumber_Fast(Object o0, Object o1, Object o2, Object o3, Object o4, Object o5, Object o6, Object o7, + Object o8, Object o9, Object o10, Object o11, Object o12, Object o13, Object o14, Object o15, + Object o16, Object o17, Object o18, Object o19, Object o20, Object o21, Object o22, Object o23, + Object o24, Object o25, Object o26, Object o27, Object o28, Object o29, Object o30, Object o31, + Object o32, Object o33, Object o34, Object o35, Object o36, Object o37, Object o38, Object o39, + Object o40, Object o41, Object o42, Object o43, Object o44, Object o45, Object o46, Object o47, + Object o48, Object o49, Object o50, Object o51, Object o52, Object o53, Object o54, Object o55, + Object o56, Object o57, Object o58, Object o59, Object o60, Object o61, Object o62, Object o63, + Object o64, Object o65, Object o66, Object o67, Object o68, Object o69, Object o70, Object o71, + Object o72, Object o73, Object o74, Object o75, Object o76, Object o77, Object o78, Object o79, + Object o80, Object o81, Object o82, Object o83, Object o84, Object o85, Object o86, Object o87, + Object o88, Object o89, Object o90, Object o91, Object o92, Object o93, Object o94, Object o95, + Object o96, Object o97, Object o98, Object o99, Object o100, Object o101, Object o102, Object o103, + Object o104, Object o105, Object o106, Object o107, Object o108, Object o109, Object o110, Object o111, + Object o112, Object o113, Object o114, Object o115, Object o116, Object o117, Object o118, Object o119, + Object o120, Object o121, Object o122, Object o123, Object o124, Object o125, Object o126, Object o127, + Object o128, Object o129, Object o130, Object o131, Object o132, Object o133, Object o134, Object o135, + Object o136, Object o137, Object o138, Object o139, Object o140, Object o141, Object o142, Object o143, + Object o144, Object o145, Object o146, Object o147, Object o148, Object o149, Object o150, Object o151, + Object o152, Object o153, Object o154, Object o155, Object o156, Object o157, Object o158, Object o159, + Object o160, Object o161, Object o162, Object o163, Object o164, Object o165, Object o166, Object o167, + Object o168, Object o169, Object o170, Object o171, Object o172, Object o173, Object o174, Object o175, + Object o176, Object o177, Object o178, Object o179, Object o180, Object o181, Object o182, Object o183, + Object o184, Object o185, Object o186, Object o187, Object o188, Object o189, Object o190, Object o191, + Object o192, Object o193, Object o194, Object o195, Object o196, Object o197, Object o198, Object o199, + Object o200, Object o201, Object o202, Object o203, Object o204, Object o205, Object o206, Object o207, + Object o208, Object o209, Object o210, Object o211, Object o212, Object o213, Object o214, Object o215, + Object o216, Object o217, Object o218, Object o219, Object o220, Object o221, Object o222, Object o223, + Object o224, Object o225, Object o226, Object o227, Object o228, Object o229, Object o230, Object o231, + Object o232, Object o233, Object o234, Object o235, Object o236, Object o237, Object o238, Object o239, + Object o240, Object o241, Object o242, Object o243, Object o244, Object o245, Object o246, Object o247, + Object o248, Object o249, Object o250, Object o251, Object o252, Object o253); + + @FastNative + native void withoutImplementation_Fast(); + @FastNative + native Object withoutImplementationRefReturn_Fast(); + + @FastNative + native static void stackArgsIntsFirst_Fast(int i1, int i2, int i3, int i4, int i5, int i6, int i7, + int i8, int i9, int i10, float f1, float f2, float f3, float f4, float f5, float f6, + float f7, float f8, float f9, float f10); + + @FastNative + native static void stackArgsFloatsFirst_Fast(float f1, float f2, float f3, float f4, float f5, + float f6, float f7, float f8, float f9, float f10, int i1, int i2, int i3, int i4, int i5, + int i6, int i7, int i8, int i9, int i10); + + @FastNative + native static void stackArgsMixed_Fast(int i1, float f1, int i2, float f2, int i3, float f3, int i4, + float f4, int i5, float f5, int i6, float f6, int i7, float f7, int i8, float f8, int i9, + float f9, int i10, float f10); + + @FastNative + native static void stackArgsSignExtendedMips64_Fast(int i1, int i2, int i3, int i4, int i5, int i6, + int i7, int i8); + + @FastNative + static native double logD_Fast(double d); + @FastNative + static native float logF_Fast(float f); + @FastNative + static native boolean returnTrue_Fast(); + @FastNative + static native boolean returnFalse_Fast(); + @FastNative + static native int returnInt_Fast(); + @FastNative + static native double returnDouble_Fast(); + @FastNative + static native long returnLong_Fast(); + + + + @CriticalNative + static native int sbar_Critical(int count); + @CriticalNative + static native int fooSII_Critical(int x, int y); + @CriticalNative + static native double fooSDD_Critical(double x, double y); + + @CriticalNative + native static void stackArgsIntsFirst_Critical(int i1, int i2, int i3, int i4, int i5, int i6, int i7, + int i8, int i9, int i10, float f1, float f2, float f3, float f4, float f5, float f6, + float f7, float f8, float f9, float f10); + + @CriticalNative + native static void stackArgsFloatsFirst_Critical(float f1, float f2, float f3, float f4, float f5, + float f6, float f7, float f8, float f9, float f10, int i1, int i2, int i3, int i4, int i5, + int i6, int i7, int i8, int i9, int i10); + + @CriticalNative + native static void stackArgsMixed_Critical(int i1, float f1, int i2, float f2, int i3, float f3, int i4, + float f4, int i5, float f5, int i6, float f6, int i7, float f7, int i8, float f8, int i9, + float f9, int i10, float f10); + + @CriticalNative + native static void stackArgsSignExtendedMips64_Critical(int i1, int i2, int i3, int i4, int i5, int i6, + int i7, int i8); + + @CriticalNative + static native double logD_Critical(double d); + @CriticalNative + static native float logF_Critical(float f); + @CriticalNative + static native boolean returnTrue_Critical(); + @CriticalNative + static native boolean returnFalse_Critical(); + @CriticalNative + static native int returnInt_Critical(); + @CriticalNative + static native double returnDouble_Critical(); + @CriticalNative + static native long returnLong_Critical(); + + - // Check for @FastNative annotation presence [or lack of presence]. + // Check for @FastNative/@CriticalNative annotation presence [or lack of presence]. public static native void normalNative(); @FastNative public static native void fastNative(); + @CriticalNative + public static native void criticalNative(); } -- 2.11.0