From 98d1cc8033251c93786e2fa8c59a2e555a9493be Mon Sep 17 00:00:00 2001 From: Mingyao Yang Date: Thu, 15 May 2014 17:02:16 -0700 Subject: [PATCH] Improve performance of invokevirtual/invokeinterface with embedded imt/vtable Add an embedded version of imt/vtable into class object. Both tables start at fixed offset within class object so method/entry point can be loaded directly from class object for invokeinterface/invokevirtual. Bug: 8142917 Change-Id: I4240d58cfbe9250107c95c0708c036854c455968 --- {runtime => compiler}/compiled_class.h | 6 +- compiler/compilers.cc | 1 + compiler/dex/quick/gen_invoke.cc | 50 +- compiler/dex/quick/x86/int_x86.cc | 1 + compiler/driver/compiler_driver-inl.h | 1 + compiler/driver/compiler_driver.cc | 1 + compiler/driver/compiler_driver.h | 3 +- compiler/image_writer.cc | 108 +++- compiler/image_writer.h | 8 + compiler/jni/quick/jni_compiler.cc | 1 + compiler/oat_writer.cc | 1 + oatdump/oatdump.cc | 2 +- runtime/arch/arm/entrypoints_init_arm.cc | 1 + runtime/arch/arm64/entrypoints_init_arm64.cc | 1 + runtime/arch/mips/entrypoints_init_mips.cc | 2 + runtime/arch/stub_test.cc | 4 +- runtime/arch/x86/entrypoints_init_x86.cc | 2 + runtime/arch/x86_64/entrypoints_init_x86_64.cc | 6 +- runtime/class_linker.cc | 420 +++++++++---- runtime/class_linker.h | 56 +- runtime/class_linker_test.cc | 63 +- runtime/entrypoints/entrypoint_utils-inl.h | 696 +++++++++++++++++++++ runtime/entrypoints/entrypoint_utils.cc | 5 +- runtime/entrypoints/entrypoint_utils.h | 626 +----------------- runtime/entrypoints/math_entrypoints.cc | 2 +- .../portable/portable_alloc_entrypoints.cc | 2 +- .../portable/portable_cast_entrypoints.cc | 2 +- .../portable/portable_dexcache_entrypoints.cc | 2 +- .../portable/portable_field_entrypoints.cc | 2 +- .../portable/portable_fillarray_entrypoints.cc | 2 +- .../portable/portable_invoke_entrypoints.cc | 2 +- .../portable/portable_jni_entrypoints.cc | 2 +- .../portable/portable_lock_entrypoints.cc | 2 +- .../portable/portable_thread_entrypoints.cc | 2 +- .../portable/portable_throw_entrypoints.cc | 2 +- .../portable/portable_trampoline_entrypoints.cc | 4 +- .../entrypoints/quick/quick_alloc_entrypoints.cc | 2 +- .../quick/quick_dexcache_entrypoints.cc | 2 +- .../entrypoints/quick/quick_field_entrypoints.cc | 2 +- .../quick/quick_instrumentation_entrypoints.cc | 2 +- runtime/entrypoints/quick/quick_jni_entrypoints.cc | 2 +- .../entrypoints/quick/quick_thread_entrypoints.cc | 2 +- .../entrypoints/quick/quick_throw_entrypoints.cc | 2 +- .../quick/quick_trampoline_entrypoints.cc | 2 +- runtime/instrumentation.cc | 62 +- runtime/interpreter/interpreter_common.h | 2 +- runtime/mirror/array-inl.h | 5 + runtime/mirror/array.h | 3 + runtime/mirror/art_field-inl.h | 5 + runtime/mirror/art_field.h | 26 +- runtime/mirror/art_method-inl.h | 23 +- runtime/mirror/art_method.cc | 8 - runtime/mirror/art_method.h | 49 +- runtime/mirror/class-inl.h | 112 +++- runtime/mirror/class.cc | 137 ++-- runtime/mirror/class.h | 163 ++++- runtime/mirror/class_loader.h | 6 + runtime/mirror/dex_cache-inl.h | 5 + runtime/mirror/dex_cache.h | 20 +- runtime/mirror/iftable.h | 2 +- runtime/mirror/object-inl.h | 6 + runtime/mirror/object.h | 45 +- runtime/mirror/object_array.h | 7 +- runtime/mirror/object_test.cc | 2 +- runtime/mirror/proxy.h | 22 +- runtime/mirror/stack_trace_element.h | 2 +- runtime/mirror/string-inl.h | 6 + runtime/mirror/string.h | 22 +- runtime/native/java_lang_Class.cc | 4 +- runtime/native/java_lang_reflect_Method.cc | 4 +- runtime/oat_file.h | 2 +- runtime/object_callbacks.h | 8 +- runtime/object_utils.h | 1 - runtime/quick_exception_handler.cc | 3 + runtime/runtime.cc | 8 +- runtime/thread.h | 3 +- runtime/utils.h | 14 + runtime/verifier/method_verifier_test.cc | 2 +- 78 files changed, 1804 insertions(+), 1092 deletions(-) rename {runtime => compiler}/compiled_class.h (88%) create mode 100644 runtime/entrypoints/entrypoint_utils-inl.h diff --git a/runtime/compiled_class.h b/compiler/compiled_class.h similarity index 88% rename from runtime/compiled_class.h rename to compiler/compiled_class.h index c53d50050..b88d613ad 100644 --- a/runtime/compiled_class.h +++ b/compiler/compiled_class.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_RUNTIME_COMPILED_CLASS_H_ -#define ART_RUNTIME_COMPILED_CLASS_H_ +#ifndef ART_COMPILER_COMPILED_CLASS_H_ +#define ART_COMPILER_COMPILED_CLASS_H_ #include "mirror/class.h" @@ -34,4 +34,4 @@ class CompiledClass { } // namespace art -#endif // ART_RUNTIME_COMPILED_CLASS_H_ +#endif // ART_COMPILER_COMPILED_CLASS_H_ diff --git a/compiler/compilers.cc b/compiler/compilers.cc index f940b54f2..bac1f1282 100644 --- a/compiler/compilers.cc +++ b/compiler/compilers.cc @@ -15,6 +15,7 @@ */ #include "compilers.h" + #include "dex/mir_graph.h" #include "dex/quick/mir_to_lir.h" #include "elf_writer_quick.h" diff --git a/compiler/dex/quick/gen_invoke.cc b/compiler/dex/quick/gen_invoke.cc index 56986b4b6..3a304304e 100755 --- a/compiler/dex/quick/gen_invoke.cc +++ b/compiler/dex/quick/gen_invoke.cc @@ -22,6 +22,7 @@ #include "entrypoints/quick/quick_entrypoints.h" #include "invoke_type.h" #include "mirror/array.h" +#include "mirror/class-inl.h" #include "mirror/object_array-inl.h" #include "mirror/string.h" #include "mir_to_lir-inl.h" @@ -666,25 +667,23 @@ static int NextVCallInsn(CompilationUnit* cu, CallInfo* info, } case 1: // Is "this" null? [use kArg1] cg->GenNullCheck(cg->TargetRefReg(kArg1), info->opt_flags); - // get this->klass_ [use kArg1, set kInvokeTgt] + // get this->klass_ [use kArg1, set kArg0] cg->LoadRefDisp(cg->TargetRefReg(kArg1), mirror::Object::ClassOffset().Int32Value(), - cg->TargetPtrReg(kInvokeTgt), + cg->TargetRefReg(kArg0), kNotVolatile); cg->MarkPossibleNullPointerException(info->opt_flags); break; - case 2: // Get this->klass_->vtable [usr kInvokeTgt, set kInvokeTgt] - cg->LoadRefDisp(cg->TargetPtrReg(kInvokeTgt), mirror::Class::VTableOffset().Int32Value(), - cg->TargetPtrReg(kInvokeTgt), - kNotVolatile); - break; - case 3: // Get target method [use kInvokeTgt, set kArg0] - cg->LoadRefDisp(cg->TargetPtrReg(kInvokeTgt), - ObjArray::OffsetOfElement(method_idx).Int32Value(), - cg->TargetRefReg(kArg0), - kNotVolatile); + case 2: { + // Get this->klass_.embedded_vtable[method_idx] [usr kArg0, set kArg0] + int32_t offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + + method_idx * sizeof(mirror::Class::VTableEntry); + // Load target method from embedded vtable to kArg0 [use kArg0, set kArg0] + cg->LoadRefDisp(cg->TargetRefReg(kArg0), offset, cg->TargetRefReg(kArg0), kNotVolatile); break; - case 4: // Get the compiled code address [uses kArg0, sets kInvokeTgt] + } + case 3: if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { + // Get the compiled code address [use kArg0, set kInvokeTgt] cg->LoadWordDisp(cg->TargetRefReg(kArg0), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(), cg->TargetPtrReg(kInvokeTgt)); @@ -724,27 +723,24 @@ static int NextInterfaceCallInsn(CompilationUnit* cu, CallInfo* info, int state, } case 2: // Is "this" null? [use kArg1] cg->GenNullCheck(cg->TargetRefReg(kArg1), info->opt_flags); - // Get this->klass_ [use kArg1, set kInvokeTgt] + // Get this->klass_ [use kArg1, set kArg0] cg->LoadRefDisp(cg->TargetRefReg(kArg1), mirror::Object::ClassOffset().Int32Value(), - cg->TargetPtrReg(kInvokeTgt), + cg->TargetRefReg(kArg0), kNotVolatile); cg->MarkPossibleNullPointerException(info->opt_flags); break; - case 3: // Get this->klass_->imtable [use kInvokeTgt, set kInvokeTgt] - // NOTE: native pointer. - cg->LoadRefDisp(cg->TargetPtrReg(kInvokeTgt), mirror::Class::ImTableOffset().Int32Value(), - cg->TargetPtrReg(kInvokeTgt), + case 3: { // Get target method [use kInvokeTgt, set kArg0] + int32_t offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() + + (method_idx % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry); + // Load target method from embedded imtable to kArg0 [use kArg0, set kArg0] + cg->LoadRefDisp(cg->TargetRefReg(kArg0), offset, + cg->TargetRefReg(kArg0), kNotVolatile); break; - case 4: // Get target method [use kInvokeTgt, set kArg0] - // NOTE: native pointer. - cg->LoadRefDisp(cg->TargetPtrReg(kInvokeTgt), - ObjArray::OffsetOfElement(method_idx % ClassLinker::kImtSize).Int32Value(), - cg->TargetRefReg(kArg0), - kNotVolatile); - break; - case 5: // Get the compiled code address [use kArg0, set kInvokeTgt] + } + case 4: if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { + // Get the compiled code address [use kArg0, set kInvokeTgt] cg->LoadWordDisp(cg->TargetRefReg(kArg0), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(), cg->TargetPtrReg(kInvokeTgt)); diff --git a/compiler/dex/quick/x86/int_x86.cc b/compiler/dex/quick/x86/int_x86.cc index 4ecc5d867..1c63da40d 100755 --- a/compiler/dex/quick/x86/int_x86.cc +++ b/compiler/dex/quick/x86/int_x86.cc @@ -19,6 +19,7 @@ #include "codegen_x86.h" #include "dex/quick/mir_to_lir-inl.h" #include "dex/reg_storage_eq.h" +#include "mirror/art_method.h" #include "mirror/array.h" #include "x86_lir.h" diff --git a/compiler/driver/compiler_driver-inl.h b/compiler/driver/compiler_driver-inl.h index 324f7172a..99fcc26aa 100644 --- a/compiler/driver/compiler_driver-inl.h +++ b/compiler/driver/compiler_driver-inl.h @@ -18,6 +18,7 @@ #define ART_COMPILER_DRIVER_COMPILER_DRIVER_INL_H_ #include "compiler_driver.h" + #include "dex/compiler_ir.h" #include "mirror/art_field.h" #include "mirror/art_field-inl.h" diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index 9bf51359c..4b4d0d0d2 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -25,6 +25,7 @@ #include "base/stl_util.h" #include "base/timing_logger.h" #include "class_linker.h" +#include "compiled_class.h" #include "compiler.h" #include "compiler_driver-inl.h" #include "dex_compilation_unit.h" diff --git a/compiler/driver/compiler_driver.h b/compiler/driver/compiler_driver.h index 990342133..ae709f8b9 100644 --- a/compiler/driver/compiler_driver.h +++ b/compiler/driver/compiler_driver.h @@ -24,7 +24,6 @@ #include "base/mutex.h" #include "base/timing_logger.h" #include "class_reference.h" -#include "compiled_class.h" #include "compiled_method.h" #include "compiler.h" #include "dex_file.h" @@ -32,6 +31,7 @@ #include "instruction_set.h" #include "invoke_type.h" #include "method_reference.h" +#include "mirror/class.h" // For mirror::Class::Status. #include "os.h" #include "profiler.h" #include "runtime.h" @@ -46,6 +46,7 @@ namespace verifier { class MethodVerifier; } // namespace verifier +class CompiledClass; class CompilerOptions; class DexCompilationUnit; class DexFileToMethodInlinerMap; diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index acfa607f3..38b4100eb 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -630,11 +630,33 @@ class FixupVisitor { mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent())); } - private: + protected: ImageWriter* const image_writer_; mirror::Object* const copy_; }; +class FixupClassVisitor FINAL : public FixupVisitor { + public: + FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) { + } + + void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const + EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { + DCHECK(obj->IsClass()); + FixupVisitor::operator()(obj, offset, false); + + if (offset.Uint32Value() < mirror::Class::EmbeddedVTableOffset().Uint32Value()) { + return; + } + } + + void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) + EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) { + LOG(FATAL) << "Reference not expected here."; + } +}; + void ImageWriter::FixupObject(Object* orig, Object* copy) { DCHECK(orig != nullptr); DCHECK(copy != nullptr); @@ -646,13 +668,68 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig)); } } - FixupVisitor visitor(this, copy); - orig->VisitReferences(visitor, visitor); + if (orig->IsClass() && orig->AsClass()->ShouldHaveEmbeddedImtAndVTable()) { + FixupClassVisitor visitor(this, copy); + orig->VisitReferences(visitor, visitor); + } else { + FixupVisitor visitor(this, copy); + orig->VisitReferences(visitor, visitor); + } if (orig->IsArtMethod()) { FixupMethod(orig->AsArtMethod(), down_cast(copy)); } } +const byte* ImageWriter::GetQuickCode(mirror::ArtMethod* method, bool* quick_is_interpreted) { + DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() && + !method->IsAbstract()) << PrettyMethod(method); + + // Use original code if it exists. Otherwise, set the code pointer to the resolution + // trampoline. + + // Quick entrypoint: + const byte* quick_code = GetOatAddress(method->GetQuickOatCodeOffset()); + *quick_is_interpreted = false; + if (quick_code != nullptr && + (!method->IsStatic() || method->IsConstructor() || method->GetDeclaringClass()->IsInitialized())) { + // We have code for a non-static or initialized method, just use the code. + } else if (quick_code == nullptr && method->IsNative() && + (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) { + // Non-static or initialized native method missing compiled code, use generic JNI version. + quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_); + } else if (quick_code == nullptr && !method->IsNative()) { + // We don't have code at all for a non-native method, use the interpreter. + quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_); + *quick_is_interpreted = true; + } else { + CHECK(!method->GetDeclaringClass()->IsInitialized()); + // We have code for a static method, but need to go through the resolution stub for class + // initialization. + quick_code = GetOatAddress(quick_resolution_trampoline_offset_); + } + return quick_code; +} + +const byte* ImageWriter::GetQuickEntryPoint(mirror::ArtMethod* method) { + // Calculate the quick entry point following the same logic as FixupMethod() below. + // The resolution method has a special trampoline to call. + if (UNLIKELY(method == Runtime::Current()->GetResolutionMethod())) { + return GetOatAddress(quick_resolution_trampoline_offset_); + } else if (UNLIKELY(method == Runtime::Current()->GetImtConflictMethod())) { + return GetOatAddress(quick_imt_conflict_trampoline_offset_); + } else { + // We assume all methods have code. If they don't currently then we set them to the use the + // resolution trampoline. Abstract methods never have code and so we need to make sure their + // use results in an AbstractMethodError. We use the interpreter to achieve this. + if (UNLIKELY(method->IsAbstract())) { + return GetOatAddress(quick_to_interpreter_bridge_offset_); + } else { + bool quick_is_interpreted; + return GetQuickCode(method, &quick_is_interpreted); + } + } +} + void ImageWriter::FixupMethod(ArtMethod* orig, ArtMethod* copy) { // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to // oat_begin_ @@ -674,29 +751,8 @@ void ImageWriter::FixupMethod(ArtMethod* orig, ArtMethod* copy) { copy->SetEntryPointFromInterpreter(reinterpret_cast (const_cast(GetOatAddress(interpreter_to_interpreter_bridge_offset_)))); } else { - // Use original code if it exists. Otherwise, set the code pointer to the resolution - // trampoline. - - // Quick entrypoint: - const byte* quick_code = GetOatAddress(orig->GetQuickOatCodeOffset()); - bool quick_is_interpreted = false; - if (quick_code != nullptr && - (!orig->IsStatic() || orig->IsConstructor() || orig->GetDeclaringClass()->IsInitialized())) { - // We have code for a non-static or initialized method, just use the code. - } else if (quick_code == nullptr && orig->IsNative() && - (!orig->IsStatic() || orig->GetDeclaringClass()->IsInitialized())) { - // Non-static or initialized native method missing compiled code, use generic JNI version. - quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_); - } else if (quick_code == nullptr && !orig->IsNative()) { - // We don't have code at all for a non-native method, use the interpreter. - quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_); - quick_is_interpreted = true; - } else { - CHECK(!orig->GetDeclaringClass()->IsInitialized()); - // We have code for a static method, but need to go through the resolution stub for class - // initialization. - quick_code = GetOatAddress(quick_resolution_trampoline_offset_); - } + bool quick_is_interpreted; + const byte* quick_code = GetQuickCode(orig, &quick_is_interpreted); copy->SetEntryPointFromQuickCompiledCode(quick_code); // Portable entrypoint: diff --git a/compiler/image_writer.h b/compiler/image_writer.h index 2bcb41e3f..cf5bc930d 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -149,6 +149,13 @@ class ImageWriter { void FixupObject(mirror::Object* orig, mirror::Object* copy) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // Get quick code for non-resolution/imt_conflict/abstract method. + const byte* GetQuickCode(mirror::ArtMethod* method, bool* quick_is_interpreted) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + const byte* GetQuickEntryPoint(mirror::ArtMethod* method) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // Patches references in OatFile to expect runtime addresses. void PatchOatCodeAndMethods(File* elf_file) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -192,6 +199,7 @@ class ImageWriter { uint32_t quick_to_interpreter_bridge_offset_; friend class FixupVisitor; + friend class FixupClassVisitor; DISALLOW_COPY_AND_ASSIGN(ImageWriter); }; diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc index 3bbb723bc..dec84f1b5 100644 --- a/compiler/jni/quick/jni_compiler.cc +++ b/compiler/jni/quick/jni_compiler.cc @@ -28,6 +28,7 @@ #include "driver/compiler_driver.h" #include "entrypoints/quick/quick_entrypoints.h" #include "jni_internal.h" +#include "mirror/art_method.h" #include "utils/assembler.h" #include "utils/managed_register.h" #include "utils/arm/managed_register_arm.h" diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc index 4b6d50172..a21351bae 100644 --- a/compiler/oat_writer.cc +++ b/compiler/oat_writer.cc @@ -22,6 +22,7 @@ #include "base/stl_util.h" #include "base/unix_file/fd_file.h" #include "class_linker.h" +#include "compiled_class.h" #include "dex_file-inl.h" #include "dex/verification_results.h" #include "gc/space/space.h" diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc index 12970fcaa..3f8e92526 100644 --- a/oatdump/oatdump.cc +++ b/oatdump/oatdump.cc @@ -976,7 +976,7 @@ class ImageDumper { const void* GetQuickOatCodeBegin(mirror::ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { const void* quick_code = m->GetEntryPointFromQuickCompiledCode(); - if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) { + if (quick_code == Runtime::Current()->GetClassLinker()->GetQuickResolutionTrampoline()) { quick_code = oat_dumper_->GetQuickOatCode(m); } if (oat_dumper_->GetInstructionSet() == kThumb2) { diff --git a/runtime/arch/arm/entrypoints_init_arm.cc b/runtime/arch/arm/entrypoints_init_arm.cc index 3fa09cbed..cf68c65db 100644 --- a/runtime/arch/arm/entrypoints_init_arm.cc +++ b/runtime/arch/arm/entrypoints_init_arm.cc @@ -15,6 +15,7 @@ */ #include "entrypoints/interpreter/interpreter_entrypoints.h" +#include "entrypoints/jni/jni_entrypoints.h" #include "entrypoints/portable/portable_entrypoints.h" #include "entrypoints/quick/quick_alloc_entrypoints.h" #include "entrypoints/quick/quick_entrypoints.h" diff --git a/runtime/arch/arm64/entrypoints_init_arm64.cc b/runtime/arch/arm64/entrypoints_init_arm64.cc index c19b79efe..0c33d9ce8 100644 --- a/runtime/arch/arm64/entrypoints_init_arm64.cc +++ b/runtime/arch/arm64/entrypoints_init_arm64.cc @@ -15,6 +15,7 @@ */ #include "entrypoints/interpreter/interpreter_entrypoints.h" +#include "entrypoints/jni/jni_entrypoints.h" #include "entrypoints/portable/portable_entrypoints.h" #include "entrypoints/quick/quick_entrypoints.h" #include "entrypoints/entrypoint_utils.h" diff --git a/runtime/arch/mips/entrypoints_init_mips.cc b/runtime/arch/mips/entrypoints_init_mips.cc index 70a961944..7a2e961f8 100644 --- a/runtime/arch/mips/entrypoints_init_mips.cc +++ b/runtime/arch/mips/entrypoints_init_mips.cc @@ -14,6 +14,8 @@ * limitations under the License. */ +#include "entrypoints/interpreter/interpreter_entrypoints.h" +#include "entrypoints/jni/jni_entrypoints.h" #include "entrypoints/portable/portable_entrypoints.h" #include "entrypoints/quick/quick_alloc_entrypoints.h" #include "entrypoints/quick/quick_entrypoints.h" diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc index eb490eb50..ce8faea70 100644 --- a/runtime/arch/stub_test.cc +++ b/runtime/arch/stub_test.cc @@ -1739,8 +1739,8 @@ TEST_F(StubTest, IMT) { // Sanity check: check that there is a conflict for List.contains in ArrayList. mirror::Class* arraylist_class = soa.Decode(arraylist_jclass); - mirror::ArtMethod* m = arraylist_class->GetImTable()->Get( - inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize); + mirror::ArtMethod* m = arraylist_class->GetEmbeddedImTableEntry( + inf_contains->GetDexMethodIndex() % mirror::Class::kImtSize); if (!m->IsImtConflictMethod()) { LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " << diff --git a/runtime/arch/x86/entrypoints_init_x86.cc b/runtime/arch/x86/entrypoints_init_x86.cc index b217cd66b..a07299650 100644 --- a/runtime/arch/x86/entrypoints_init_x86.cc +++ b/runtime/arch/x86/entrypoints_init_x86.cc @@ -14,6 +14,8 @@ * limitations under the License. */ +#include "entrypoints/interpreter/interpreter_entrypoints.h" +#include "entrypoints/jni/jni_entrypoints.h" #include "entrypoints/portable/portable_entrypoints.h" #include "entrypoints/quick/quick_alloc_entrypoints.h" #include "entrypoints/quick/quick_entrypoints.h" diff --git a/runtime/arch/x86_64/entrypoints_init_x86_64.cc b/runtime/arch/x86_64/entrypoints_init_x86_64.cc index 204d52c72..35a0cf4f2 100644 --- a/runtime/arch/x86_64/entrypoints_init_x86_64.cc +++ b/runtime/arch/x86_64/entrypoints_init_x86_64.cc @@ -14,6 +14,8 @@ * limitations under the License. */ +#include "entrypoints/interpreter/interpreter_entrypoints.h" +#include "entrypoints/jni/jni_entrypoints.h" #include "entrypoints/portable/portable_entrypoints.h" #include "entrypoints/quick/quick_alloc_entrypoints.h" #include "entrypoints/quick/quick_entrypoints.h" @@ -27,8 +29,8 @@ extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result); extern "C" void artInterpreterToCompiledCodeBridge(Thread* self, MethodHelper& mh, - const DexFile::CodeItem* code_item, - ShadowFrame* shadow_frame, JValue* result); + const DexFile::CodeItem* code_item, + ShadowFrame* shadow_frame, JValue* result); // Portable entrypoints. extern "C" void art_portable_resolution_trampoline(mirror::ArtMethod*); diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index 5180e3431..85ce10a37 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -55,6 +55,7 @@ #include "mirror/object_array-inl.h" #include "mirror/proxy.h" #include "mirror/stack_trace_element.h" +#include "mirror/string-inl.h" #include "object_utils.h" #include "os.h" #include "runtime.h" @@ -207,7 +208,8 @@ void ClassLinker::InitFromCompiler(const std::vector& boot_class heap->IncrementDisableMovingGC(self); StackHandleScope<64> hs(self); // 64 is picked arbitrarily. Handle java_lang_Class(hs.NewHandle(down_cast( - heap->AllocNonMovableObject(self, nullptr, sizeof(mirror::ClassClass), + heap->AllocNonMovableObject(self, nullptr, + mirror::Class::ClassClassSize(), VoidFunctor())))); CHECK(java_lang_Class.Get() != NULL); mirror::Class::SetClassClass(java_lang_Class.Get()); @@ -215,43 +217,44 @@ void ClassLinker::InitFromCompiler(const std::vector& boot_class if (kUseBakerOrBrooksReadBarrier) { java_lang_Class->AssertReadBarrierPointer(); } - java_lang_Class->SetClassSize(sizeof(mirror::ClassClass)); + java_lang_Class->SetClassSize(mirror::Class::ClassClassSize()); heap->DecrementDisableMovingGC(self); // AllocClass(mirror::Class*) can now be used // Class[] is used for reflection support. - Handle class_array_class( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle class_array_class(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::ObjectArray::ClassSize()))); class_array_class->SetComponentType(java_lang_Class.Get()); // java_lang_Object comes next so that object_array_class can be created. - Handle java_lang_Object( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle java_lang_Object(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize()))); CHECK(java_lang_Object.Get() != NULL); // backfill Object as the super class of Class. java_lang_Class->SetSuperClass(java_lang_Object.Get()); java_lang_Object->SetStatus(mirror::Class::kStatusLoaded, self); // Object[] next to hold class roots. - Handle object_array_class( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle object_array_class(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::ObjectArray::ClassSize()))); object_array_class->SetComponentType(java_lang_Object.Get()); - // Setup the char class to be used for char[]. - Handle char_class(hs.NewHandle(AllocClass(self, java_lang_Class.Get(), - sizeof(mirror::Class)))); + // Setup the char (primitive) class to be used for char[]. + Handle char_class(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::Class::PrimitiveClassSize()))); // Setup the char[] class to be used for String. - Handle char_array_class(hs.NewHandle(AllocClass(self, java_lang_Class.Get(), - sizeof(mirror::Class)))); + Handle char_array_class(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), + mirror::Array::ClassSize()))); char_array_class->SetComponentType(char_class.Get()); mirror::CharArray::SetArrayClass(char_array_class.Get()); // Setup String. - Handle java_lang_String(hs.NewHandle(AllocClass(self, java_lang_Class.Get(), - sizeof(mirror::StringClass)))); + Handle java_lang_String(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::String::ClassSize()))); mirror::String::SetClass(java_lang_String.Get()); - java_lang_String->SetObjectSize(sizeof(mirror::String)); + java_lang_String->SetObjectSize(mirror::String::InstanceSize()); java_lang_String->SetStatus(mirror::Class::kStatusResolved, self); // Create storage for root classes, save away our work so far (requires descriptors). @@ -279,8 +282,8 @@ void ClassLinker::InitFromCompiler(const std::vector& boot_class array_iftable_ = AllocIfTable(self, 2); // Create int array type for AllocDexCache (done in AppendToBootClassPath). - Handle int_array_class( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle int_array_class(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::Array::ClassSize()))); int_array_class->SetComponentType(GetClassRoot(kPrimitiveInt)); mirror::IntArray::SetArrayClass(int_array_class.Get()); SetClassRoot(kIntArrayClass, int_array_class.Get()); @@ -288,44 +291,47 @@ void ClassLinker::InitFromCompiler(const std::vector& boot_class // now that these are registered, we can use AllocClass() and AllocObjectArray // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache. - Handle java_lang_DexCache( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::DexCacheClass)))); + Handle java_lang_DexCache(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize()))); SetClassRoot(kJavaLangDexCache, java_lang_DexCache.Get()); - java_lang_DexCache->SetObjectSize(sizeof(mirror::DexCache)); + java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize()); java_lang_DexCache->SetStatus(mirror::Class::kStatusResolved, self); // Constructor, Field, Method, and AbstractMethod are necessary so // that FindClass can link members. - Handle java_lang_reflect_ArtField( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::ArtFieldClass)))); + Handle java_lang_reflect_ArtField(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::ArtField::ClassSize()))); CHECK(java_lang_reflect_ArtField.Get() != NULL); - java_lang_reflect_ArtField->SetObjectSize(sizeof(mirror::ArtField)); + java_lang_reflect_ArtField->SetObjectSize(mirror::ArtField::InstanceSize()); SetClassRoot(kJavaLangReflectArtField, java_lang_reflect_ArtField.Get()); java_lang_reflect_ArtField->SetStatus(mirror::Class::kStatusResolved, self); mirror::ArtField::SetClass(java_lang_reflect_ArtField.Get()); - Handle java_lang_reflect_ArtMethod( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::ArtMethodClass)))); + Handle java_lang_reflect_ArtMethod(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), mirror::ArtMethod::ClassSize()))); CHECK(java_lang_reflect_ArtMethod.Get() != NULL); - java_lang_reflect_ArtMethod->SetObjectSize(sizeof(mirror::ArtMethod)); + java_lang_reflect_ArtMethod->SetObjectSize(mirror::ArtMethod::InstanceSize()); SetClassRoot(kJavaLangReflectArtMethod, java_lang_reflect_ArtMethod.Get()); java_lang_reflect_ArtMethod->SetStatus(mirror::Class::kStatusResolved, self); mirror::ArtMethod::SetClass(java_lang_reflect_ArtMethod.Get()); // Set up array classes for string, field, method - Handle object_array_string( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle object_array_string(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), + mirror::ObjectArray::ClassSize()))); object_array_string->SetComponentType(java_lang_String.Get()); SetClassRoot(kJavaLangStringArrayClass, object_array_string.Get()); - Handle object_array_art_method( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle object_array_art_method(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), + mirror::ObjectArray::ClassSize()))); object_array_art_method->SetComponentType(java_lang_reflect_ArtMethod.Get()); SetClassRoot(kJavaLangReflectArtMethodArrayClass, object_array_art_method.Get()); - Handle object_array_art_field( - hs.NewHandle(AllocClass(self, java_lang_Class.Get(), sizeof(mirror::Class)))); + Handle object_array_art_field(hs.NewHandle( + AllocClass(self, java_lang_Class.Get(), + mirror::ObjectArray::ClassSize()))); object_array_art_field->SetComponentType(java_lang_reflect_ArtField.Get()); SetClassRoot(kJavaLangReflectArtFieldArrayClass, object_array_art_field.Get()); @@ -359,16 +365,19 @@ void ClassLinker::InitFromCompiler(const std::vector& boot_class java_lang_Object->SetStatus(mirror::Class::kStatusNotReady, self); mirror::Class* Object_class = FindSystemClass(self, "Ljava/lang/Object;"); CHECK_EQ(java_lang_Object.Get(), Object_class); - CHECK_EQ(java_lang_Object->GetObjectSize(), sizeof(mirror::Object)); + CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize()); java_lang_String->SetStatus(mirror::Class::kStatusNotReady, self); mirror::Class* String_class = FindSystemClass(self, "Ljava/lang/String;"); - CHECK_EQ(java_lang_String.Get(), String_class); - CHECK_EQ(java_lang_String->GetObjectSize(), sizeof(mirror::String)); + std::ostringstream os1, os2; + java_lang_String->DumpClass(os1, mirror::Class::kDumpClassFullDetail); + String_class->DumpClass(os2, mirror::Class::kDumpClassFullDetail); + CHECK_EQ(java_lang_String.Get(), String_class) << os1.str() << "\n\n" << os2.str(); + CHECK_EQ(java_lang_String->GetObjectSize(), mirror::String::InstanceSize()); java_lang_DexCache->SetStatus(mirror::Class::kStatusNotReady, self); mirror::Class* DexCache_class = FindSystemClass(self, "Ljava/lang/DexCache;"); CHECK_EQ(java_lang_String.Get(), String_class); CHECK_EQ(java_lang_DexCache.Get(), DexCache_class); - CHECK_EQ(java_lang_DexCache->GetObjectSize(), sizeof(mirror::DexCache)); + CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize()); // Setup the primitive array type classes - can't be done until Object has a vtable. SetClassRoot(kBooleanArrayClass, FindSystemClass(self, "[Z")); @@ -476,7 +485,7 @@ void ClassLinker::InitFromCompiler(const std::vector& boot_class // Setup the ClassLoader, verifying the object_size_. mirror::Class* java_lang_ClassLoader = FindSystemClass(self, "Ljava/lang/ClassLoader;"); - CHECK_EQ(java_lang_ClassLoader->GetObjectSize(), sizeof(mirror::ClassLoader)); + CHECK_EQ(java_lang_ClassLoader->GetObjectSize(), mirror::ClassLoader::InstanceSize()); SetClassRoot(kJavaLangClassLoader, java_lang_ClassLoader); // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and @@ -1400,36 +1409,11 @@ mirror::DexCache* ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_fi return dex_cache.Get(); } -// Used to initialize a class in the allocation code path to ensure it is guarded by a StoreStore -// fence. -class InitializeClassVisitor { - public: - explicit InitializeClassVisitor(uint32_t class_size) : class_size_(class_size) { - } - - void operator()(mirror::Object* obj, size_t usable_size) const - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK_LE(class_size_, usable_size); - // Avoid AsClass as object is not yet in live bitmap or allocation stack. - mirror::Class* klass = down_cast(obj); - // DCHECK(klass->IsClass()); - klass->SetClassSize(class_size_); - klass->SetPrimitiveType(Primitive::kPrimNot); // Default to not being primitive. - klass->SetDexClassDefIndex(DexFile::kDexNoIndex16); // Default to no valid class def index. - klass->SetDexTypeIndex(DexFile::kDexNoIndex16); // Default to no valid type index. - } - - private: - const uint32_t class_size_; - - DISALLOW_COPY_AND_ASSIGN(InitializeClassVisitor); -}; - mirror::Class* ClassLinker::AllocClass(Thread* self, mirror::Class* java_lang_Class, uint32_t class_size) { DCHECK_GE(class_size, sizeof(mirror::Class)); gc::Heap* heap = Runtime::Current()->GetHeap(); - InitializeClassVisitor visitor(class_size); + mirror::Class::InitializeClassVisitor visitor(class_size); mirror::Object* k = kMovingClasses ? heap->AllocObject(self, java_lang_Class, class_size, visitor) : heap->AllocNonMovableObject(self, java_lang_Class, class_size, visitor); @@ -1460,9 +1444,33 @@ mirror::ObjectArray* ClassLinker::AllocStackTraceElem self, GetClassRoot(kJavaLangStackTraceElementArrayClass), length); } -static mirror::Class* EnsureResolved(Thread* self, mirror::Class* klass) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { +mirror::Class* ClassLinker::EnsureResolved(Thread* self, const char* descriptor, + mirror::Class* klass) { DCHECK(klass != NULL); + + // For temporary classes we must wait for them to be retired. + if (init_done_ && klass->IsTemp()) { + CHECK(!klass->IsResolved()); + if (klass->IsErroneous()) { + ThrowEarlierClassFailure(klass); + return nullptr; + } + StackHandleScope<1> hs(self); + Handle h_class(hs.NewHandle(klass)); + ObjectLock lock(self, h_class); + // Loop and wait for the resolving thread to retire this class. + while (!h_class->IsRetired() && !h_class->IsErroneous()) { + lock.WaitIgnoringInterrupts(); + } + if (h_class->IsErroneous()) { + ThrowEarlierClassFailure(h_class.Get()); + return nullptr; + } + CHECK(h_class->IsRetired()); + // Get the updated class from class table. + klass = LookupClass(descriptor, h_class.Get()->GetClassLoader()); + } + // Wait for the class if it has not already been linked. if (!klass->IsResolved() && !klass->IsErroneous()) { StackHandleScope<1> hs(self); @@ -1479,6 +1487,7 @@ static mirror::Class* EnsureResolved(Thread* self, mirror::Class* klass) lock.WaitIgnoringInterrupts(); } } + if (klass->IsErroneous()) { ThrowEarlierClassFailure(klass); return nullptr; @@ -1502,7 +1511,7 @@ mirror::Class* ClassLinker::FindClass(Thread* self, const char* descriptor, // Find the class in the loaded classes table. mirror::Class* klass = LookupClass(descriptor, class_loader.Get()); if (klass != NULL) { - return EnsureResolved(self, klass); + return EnsureResolved(self, descriptor, klass); } // Class is not yet loaded. if (descriptor[0] == '[') { @@ -1576,8 +1585,10 @@ mirror::Class* ClassLinker::DefineClass(const char* descriptor, const DexFile& dex_file, const DexFile::ClassDef& dex_class_def) { Thread* self = Thread::Current(); - StackHandleScope<2> hs(self); + StackHandleScope<3> hs(self); auto klass = hs.NewHandle(nullptr); + bool should_allocate = false; + // Load the class from the dex file. if (UNLIKELY(!init_done_)) { // finish up init of hand crafted class_roots_ @@ -1594,10 +1605,18 @@ mirror::Class* ClassLinker::DefineClass(const char* descriptor, } else if (strcmp(descriptor, "Ljava/lang/reflect/ArtMethod;") == 0) { klass.Assign(GetClassRoot(kJavaLangReflectArtMethod)); } else { - klass.Assign(AllocClass(self, SizeOfClass(dex_file, dex_class_def))); + should_allocate = true; } } else { - klass.Assign(AllocClass(self, SizeOfClass(dex_file, dex_class_def))); + should_allocate = true; + } + + if (should_allocate) { + // Allocate a class with the status of not ready. + // Interface object should get the right size here. Regular class will + // figure out the right size later and be replaced with one of the right + // size when the class becomes resolved. + klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def))); } if (UNLIKELY(klass.Get() == NULL)) { CHECK(self->IsExceptionPending()); // Expect an OOME. @@ -1612,13 +1631,15 @@ mirror::Class* ClassLinker::DefineClass(const char* descriptor, } ObjectLock lock(self, klass); klass->SetClinitThreadId(self->GetTid()); + // Add the newly loaded class to the loaded classes table. mirror::Class* existing = InsertClass(descriptor, klass.Get(), Hash(descriptor)); if (existing != NULL) { // We failed to insert because we raced with another thread. Calling EnsureResolved may cause // this thread to block. - return EnsureResolved(self, existing); + return EnsureResolved(self, descriptor, existing); } + // Finish loading (if necessary) by finding parents CHECK(!klass->IsLoaded()); if (!LoadSuperAndInterfaces(klass, dex_file)) { @@ -1631,12 +1652,17 @@ mirror::Class* ClassLinker::DefineClass(const char* descriptor, CHECK(!klass->IsResolved()); // TODO: Use fast jobjects? auto interfaces = hs.NewHandle>(nullptr); - if (!LinkClass(self, klass, interfaces)) { + + mirror::Class* new_class = nullptr; + if (!LinkClass(self, descriptor, klass, interfaces, &new_class)) { // Linking failed. klass->SetStatus(mirror::Class::kStatusError, self); return NULL; } - CHECK(klass->IsResolved()); + CHECK(new_class != nullptr) << descriptor; + CHECK(new_class->IsResolved()) << descriptor; + + Handle new_class_h(hs.NewHandle(new_class)); /* * We send CLASS_PREPARE events to the debugger from here. The @@ -1649,14 +1675,13 @@ mirror::Class* ClassLinker::DefineClass(const char* descriptor, * The class has been prepared and resolved but possibly not yet verified * at this point. */ - Dbg::PostClassPrepare(klass.Get()); + Dbg::PostClassPrepare(new_class_h.Get()); - return klass.Get(); + return new_class_h.Get(); } -// Precomputes size that will be needed for Class, matching LinkStaticFields -uint32_t ClassLinker::SizeOfClass(const DexFile& dex_file, - const DexFile::ClassDef& dex_class_def) { +uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file, + const DexFile::ClassDef& dex_class_def) { const byte* class_data = dex_file.GetClassData(dex_class_def); size_t num_ref = 0; size_t num_32 = 0; @@ -1675,24 +1700,7 @@ uint32_t ClassLinker::SizeOfClass(const DexFile& dex_file, } } } - // start with generic class data - uint32_t size = sizeof(mirror::Class); - // follow with reference fields which must be contiguous at start - size += (num_ref * sizeof(uint32_t)); - // if there are 64-bit fields to add, make sure they are aligned - if (num_64 != 0 && size != RoundUp(size, 8)) { // for 64-bit alignment - if (num_32 != 0) { - // use an available 32-bit field for padding - num_32--; - } - size += sizeof(uint32_t); // either way, we are adding a word - DCHECK_EQ(size, RoundUp(size, 8)); - } - // tack on any 64-bit fields now that alignment is assured - size += (num_64 * sizeof(uint64_t)); - // tack on any remaining 32-bit fields - size += (num_32 * sizeof(uint32_t)); - return size; + return mirror::Class::ComputeClassSize(false, 0, num_32, num_64, num_ref); } OatFile::OatClass ClassLinker::GetOatClass(const DexFile& dex_file, uint16_t class_def_idx) { @@ -2306,7 +2314,7 @@ void ClassLinker::FixupDexCaches(mirror::ArtMethod* resolution_method) { } mirror::Class* ClassLinker::CreatePrimitiveClass(Thread* self, Primitive::Type type) { - mirror::Class* klass = AllocClass(self, sizeof(mirror::Class)); + mirror::Class* klass = AllocClass(self, mirror::Class::PrimitiveClassSize()); if (UNLIKELY(klass == NULL)) { return NULL; } @@ -2411,7 +2419,7 @@ mirror::Class* ClassLinker::CreateArrayClass(Thread* self, const char* descripto } } if (new_class.Get() == nullptr) { - new_class.Assign(AllocClass(self, sizeof(mirror::Class))); + new_class.Assign(AllocClass(self, mirror::Array::ClassSize())); if (new_class.Get() == nullptr) { return nullptr; } @@ -2424,6 +2432,8 @@ mirror::Class* ClassLinker::CreateArrayClass(Thread* self, const char* descripto new_class->SetVTable(java_lang_Object->GetVTable()); new_class->SetPrimitiveType(Primitive::kPrimNot); new_class->SetClassLoader(component_type->GetClassLoader()); + new_class->SetStatus(mirror::Class::kStatusLoaded, self); + new_class->PopulateEmbeddedImtAndVTable(); new_class->SetStatus(mirror::Class::kStatusInitialized, self); // don't need to set new_class->SetObjectSize(..) // because Object::SizeOf delegates to Array::SizeOf @@ -2517,7 +2527,8 @@ mirror::Class* ClassLinker::InsertClass(const char* descriptor, mirror::Class* k if (existing != NULL) { return existing; } - if (kIsDebugBuild && klass->GetClassLoader() == NULL && dex_cache_image_class_lookup_required_) { + if (kIsDebugBuild && !klass->IsTemp() && klass->GetClassLoader() == NULL && + dex_cache_image_class_lookup_required_) { // Check a class loaded with the system class loader matches one in the image if the class // is in the image. existing = LookupClassFromImage(descriptor); @@ -2533,6 +2544,50 @@ mirror::Class* ClassLinker::InsertClass(const char* descriptor, mirror::Class* k return NULL; } +mirror::Class* ClassLinker::UpdateClass(const char* descriptor, mirror::Class* klass, + size_t hash) { + WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); + mirror::Class* existing = + LookupClassFromTableLocked(descriptor, klass->GetClassLoader(), hash); + + if (existing == nullptr) { + CHECK(klass->IsProxyClass()); + return nullptr; + } + + CHECK_NE(existing, klass) << descriptor; + CHECK(!existing->IsResolved()) << descriptor; + CHECK_EQ(klass->GetStatus(), mirror::Class::kStatusResolving) << descriptor; + + for (auto it = class_table_.lower_bound(hash), end = class_table_.end(); it != end && it->first == hash; + ++it) { + mirror::Class* entry = it->second; + if (entry == existing) { + class_table_.erase(it); + break; + } + } + + CHECK(!klass->IsTemp()) << descriptor; + if (kIsDebugBuild && klass->GetClassLoader() == nullptr && + dex_cache_image_class_lookup_required_) { + // Check a class loaded with the system class loader matches one in the image if the class + // is in the image. + existing = LookupClassFromImage(descriptor); + if (existing != nullptr) { + CHECK(klass == existing) << descriptor; + } + } + VerifyObject(klass); + + class_table_.insert(std::make_pair(hash, klass)); + if (log_new_class_table_roots_) { + new_class_roots_.push_back(std::make_pair(hash, klass)); + } + + return existing; +} + bool ClassLinker::RemoveClass(const char* descriptor, const mirror::ClassLoader* class_loader) { size_t hash = Hash(descriptor); WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); @@ -2952,8 +3007,8 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& jobjectArray methods, jobjectArray throws) { Thread* self = soa.Self(); StackHandleScope<8> hs(self); - Handle klass(hs.NewHandle(AllocClass(self, GetClassRoot(kJavaLangClass), - sizeof(mirror::SynthesizedProxyClass)))); + Handle klass(hs.NewHandle( + AllocClass(self, GetClassRoot(kJavaLangClass), sizeof(mirror::Class)))); if (klass.Get() == NULL) { CHECK(self->IsExceptionPending()); // OOME. return NULL; @@ -3044,20 +3099,31 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& klass->SetStatus(mirror::Class::kStatusLoaded, self); // Now effectively in the loaded state. self->AssertNoPendingException(); + std::string descriptor(GetDescriptorForProxy(klass.Get())); + mirror::Class* new_class = nullptr; { - ObjectLock lock(self, klass); // Must hold lock on object when resolved. + ObjectLock resolution_lock(self, klass); // Must hold lock on object when resolved. // Link the fields and virtual methods, creating vtable and iftables - Handle> h_interfaces( + Handle > h_interfaces( hs.NewHandle(soa.Decode*>(interfaces))); - if (!LinkClass(self, klass, h_interfaces)) { + if (!LinkClass(self, descriptor.c_str(), klass, h_interfaces, &new_class)) { klass->SetStatus(mirror::Class::kStatusError, self); return nullptr; } + } + + CHECK(klass->IsRetired()); + CHECK_NE(klass.Get(), new_class); + klass.Assign(new_class); + + CHECK_EQ(interfaces_sfield->GetDeclaringClass(), new_class); + interfaces_sfield->SetObject(klass.Get(), soa.Decode*>(interfaces)); + CHECK_EQ(throws_sfield->GetDeclaringClass(), new_class); + throws_sfield->SetObject(klass.Get(), soa.Decode >*>(throws)); - interfaces_sfield->SetObject( - klass.Get(), soa.Decode*>(interfaces)); - throws_sfield->SetObject( - klass.Get(), soa.Decode>*>(throws)); + { + // Lock on klass is released. Lock new class object. + ObjectLock initialization_lock(self, klass); klass->SetStatus(mirror::Class::kStatusInitialized, self); } @@ -3083,14 +3149,11 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& decoded_name->ToModifiedUtf8().c_str())); CHECK_EQ(PrettyField(klass->GetStaticField(1)), throws_field_name); - mirror::SynthesizedProxyClass* synth_proxy_class = - down_cast(klass.Get()); - CHECK_EQ(synth_proxy_class->GetInterfaces(), + CHECK_EQ(klass.Get()->GetInterfaces(), soa.Decode*>(interfaces)); - CHECK_EQ(synth_proxy_class->GetThrows(), + CHECK_EQ(klass.Get()->GetThrows(), soa.Decode>*>(throws)); } - std::string descriptor(GetDescriptorForProxy(klass.Get())); mirror::Class* existing = InsertClass(descriptor.c_str(), klass.Get(), Hash(descriptor.c_str())); CHECK(existing == nullptr); return klass.Get(); @@ -3524,9 +3587,49 @@ void ClassLinker::ConstructFieldMap(const DexFile& dex_file, const DexFile::Clas } } -bool ClassLinker::LinkClass(Thread* self, Handle klass, - Handle> interfaces) { +void ClassLinker::FixupTemporaryDeclaringClass(mirror::Class* temp_class, mirror::Class* new_class) { + mirror::ObjectArray* fields = new_class->GetIFields(); + if (fields != nullptr) { + for (int index = 0; index < fields->GetLength(); index ++) { + if (fields->Get(index)->GetDeclaringClass() == temp_class) { + fields->Get(index)->SetDeclaringClass(new_class); + } + } + } + + fields = new_class->GetSFields(); + if (fields != nullptr) { + for (int index = 0; index < fields->GetLength(); index ++) { + if (fields->Get(index)->GetDeclaringClass() == temp_class) { + fields->Get(index)->SetDeclaringClass(new_class); + } + } + } + + mirror::ObjectArray* methods = new_class->GetDirectMethods(); + if (methods != nullptr) { + for (int index = 0; index < methods->GetLength(); index ++) { + if (methods->Get(index)->GetDeclaringClass() == temp_class) { + methods->Get(index)->SetDeclaringClass(new_class); + } + } + } + + methods = new_class->GetVirtualMethods(); + if (methods != nullptr) { + for (int index = 0; index < methods->GetLength(); index ++) { + if (methods->Get(index)->GetDeclaringClass() == temp_class) { + methods->Get(index)->SetDeclaringClass(new_class); + } + } + } +} + +bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle klass, + Handle> interfaces, + mirror::Class** new_class) { CHECK_EQ(mirror::Class::kStatusLoaded, klass->GetStatus()); + if (!LinkSuperClass(klass)) { return false; } @@ -3536,13 +3639,60 @@ bool ClassLinker::LinkClass(Thread* self, Handle klass, if (!LinkInstanceFields(klass)) { return false; } - if (!LinkStaticFields(klass)) { + size_t class_size; + if (!LinkStaticFields(klass, &class_size)) { return false; } CreateReferenceInstanceOffsets(klass); CreateReferenceStaticOffsets(klass); CHECK_EQ(mirror::Class::kStatusLoaded, klass->GetStatus()); - klass->SetStatus(mirror::Class::kStatusResolved, self); + + if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) { + // We don't need to retire this class as it has no embedded tables or it was created the + // correct size during class linker initialization. + CHECK_EQ(klass->GetClassSize(), class_size) << PrettyDescriptor(klass.Get()); + + if (klass->ShouldHaveEmbeddedImtAndVTable()) { + klass->PopulateEmbeddedImtAndVTable(); + } + + // This will notify waiters on klass that saw the not yet resolved + // class in the class_table_ during EnsureResolved. + klass->SetStatus(mirror::Class::kStatusResolved, self); + *new_class = klass.Get(); + } else { + CHECK(!klass->IsResolved()); + // Retire the temporary class and create the correctly sized resolved class. + *new_class = klass->CopyOf(self, class_size); + if (UNLIKELY(*new_class == NULL)) { + CHECK(self->IsExceptionPending()); // Expect an OOME. + klass->SetStatus(mirror::Class::kStatusError, self); + return false; + } + + CHECK_EQ((*new_class)->GetClassSize(), class_size); + StackHandleScope<1> hs(self); + auto new_class_h = hs.NewHandleWrapper(new_class); + ObjectLock lock(self, new_class_h); + + FixupTemporaryDeclaringClass(klass.Get(), new_class_h.Get()); + + mirror::Class* existing = UpdateClass(descriptor, new_class_h.Get(), Hash(descriptor)); + CHECK(existing == NULL || existing == klass.Get()); + + // This will notify waiters on temp class that saw the not yet resolved class in the + // class_table_ during EnsureResolved. + klass->SetStatus(mirror::Class::kStatusRetired, self); + + CHECK_EQ(new_class_h->GetStatus(), mirror::Class::kStatusResolving); + // This will notify waiters on new_class that saw the not yet resolved + // class in the class_table_ during EnsureResolved. + new_class_h->SetStatus(mirror::Class::kStatusResolved, self); + + // Only embedded imt should be used from this point. + new_class_h->SetImTable(NULL); + // TODO: remove vtable and only use embedded vtable. + } return true; } @@ -3563,6 +3713,7 @@ bool ClassLinker::LoadSuperAndInterfaces(Handle klass, const DexF PrettyDescriptor(klass.Get()).c_str()); return false; } + CHECK(super_class->IsResolved()); klass->SetSuperClass(super_class); } const DexFile::TypeList* interfaces = dex_file.GetInterfacesList(class_def); @@ -3876,7 +4027,7 @@ bool ClassLinker::LinkInterfaceMethods(Handle klass, // Allocate imtable bool imtable_changed = false; Handle> imtable( - hs.NewHandle(AllocArtMethodArray(self, kImtSize))); + hs.NewHandle(AllocArtMethodArray(self, mirror::Class::kImtSize))); if (UNLIKELY(imtable.Get() == NULL)) { CHECK(self->IsExceptionPending()); // OOME. return false; @@ -3923,7 +4074,7 @@ bool ClassLinker::LinkInterfaceMethods(Handle klass, } method_array->Set(j, vtable_method); // Place method in imt if entry is empty, place conflict otherwise. - uint32_t imt_index = interface_method->GetDexMethodIndex() % kImtSize; + uint32_t imt_index = interface_method->GetDexMethodIndex() % mirror::Class::kImtSize; if (imtable->Get(imt_index) == NULL) { imtable->Set(imt_index, vtable_method); imtable_changed = true; @@ -3961,7 +4112,7 @@ bool ClassLinker::LinkInterfaceMethods(Handle klass, if (imtable_changed) { // Fill in empty entries in interface method table with conflict. mirror::ArtMethod* imt_conflict_method = runtime->GetImtConflictMethod(); - for (size_t i = 0; i < kImtSize; i++) { + for (size_t i = 0; i < mirror::Class::kImtSize; i++) { if (imtable->Get(i) == NULL) { imtable->Set(i, imt_conflict_method); } @@ -4018,15 +4169,12 @@ bool ClassLinker::LinkInterfaceMethods(Handle klass, bool ClassLinker::LinkInstanceFields(Handle klass) { CHECK(klass.Get() != NULL); - return LinkFields(klass, false); + return LinkFields(klass, false, nullptr); } -bool ClassLinker::LinkStaticFields(Handle klass) { +bool ClassLinker::LinkStaticFields(Handle klass, size_t* class_size) { CHECK(klass.Get() != NULL); - size_t allocated_class_size = klass->GetClassSize(); - bool success = LinkFields(klass, true); - CHECK_EQ(allocated_class_size, klass->GetClassSize()); - return success; + return LinkFields(klass, true, class_size); } struct LinkFieldsComparator { @@ -4056,19 +4204,23 @@ struct LinkFieldsComparator { } }; -bool ClassLinker::LinkFields(Handle klass, bool is_static) { +bool ClassLinker::LinkFields(Handle klass, bool is_static, size_t* class_size) { size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields(); mirror::ObjectArray* fields = is_static ? klass->GetSFields() : klass->GetIFields(); - // Initialize size and field_offset - size_t size; + // Initialize field_offset MemberOffset field_offset(0); if (is_static) { - size = klass->GetClassSize(); - field_offset = mirror::Class::FieldsOffset(); + uint32_t base = sizeof(mirror::Class); // Static fields come after the class. + if (klass->ShouldHaveEmbeddedImtAndVTable()) { + // Static fields come after the embedded tables. + base = mirror::Class::ComputeClassSize(true, klass->GetVTableDuringLinking()->GetLength(), + 0, 0, 0); + } + field_offset = MemberOffset(base); } else { mirror::Class* super_class = klass->GetSuperClass(); if (super_class != NULL) { @@ -4076,7 +4228,6 @@ bool ClassLinker::LinkFields(Handle klass, bool is_static) { << PrettyClass(klass.Get()) << " " << PrettyClass(super_class); field_offset = MemberOffset(super_class->GetObjectSize()); } - size = field_offset.Uint32Value(); } CHECK_EQ(num_fields == 0, fields == NULL) << PrettyClass(klass.Get()); @@ -4189,11 +4340,12 @@ bool ClassLinker::LinkFields(Handle klass, bool is_static) { DCHECK_EQ(num_fields, num_reference_fields) << PrettyClass(klass.Get()); } } - size = field_offset.Uint32Value(); + + size_t size = field_offset.Uint32Value(); // Update klass if (is_static) { klass->SetNumReferenceStaticFields(num_reference_fields); - klass->SetClassSize(size); + *class_size = size; } else { klass->SetNumReferenceInstanceFields(num_reference_fields); if (!klass->IsVariableSize()) { diff --git a/runtime/class_linker.h b/runtime/class_linker.h index 60dad7b93..d9b3d2541 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -31,6 +31,7 @@ #include "read_barrier.h" namespace art { + namespace gc { namespace space { class ImageSpace; @@ -56,11 +57,6 @@ enum VisitRootFlags : uint8_t; class ClassLinker { public: - // Interface method table size. Increasing this value reduces the chance of two interface methods - // colliding in the interface method table but increases the size of classes that implement - // (non-marker) interfaces. - static constexpr size_t kImtSize = 64; - explicit ClassLinker(InternTable* intern_table); ~ClassLinker(); @@ -385,6 +381,14 @@ class ClassLinker { // Special code to allocate an art method, use this instead of class->AllocObject. mirror::ArtMethod* AllocArtMethod(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + mirror::ObjectArray* GetClassRoots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + mirror::ObjectArray* class_roots = + ReadBarrier::BarrierForRoot, kWithReadBarrier>( + &class_roots_); + DCHECK(class_roots != NULL); + return class_roots; + } + private: const OatFile::OatMethod GetOatMethodFor(mirror::ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -427,8 +431,10 @@ class ClassLinker { mirror::Class* c, SafeMap& field_map) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - uint32_t SizeOfClass(const DexFile& dex_file, - const DexFile::ClassDef& dex_class_def); + // Precomputes size needed for Class, in the case of a non-temporary class this size must be + // sufficient to hold all static fields. + uint32_t SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file, + const DexFile::ClassDef& dex_class_def); void LoadClass(const DexFile& dex_file, const DexFile::ClassDef& dex_class_def, @@ -481,8 +487,9 @@ class ClassLinker { mirror::Class* klass2) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - bool LinkClass(Thread* self, Handle klass, - Handle> interfaces) + bool LinkClass(Thread* self, const char* descriptor, Handle klass, + Handle> interfaces, + mirror::Class** new_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); bool LinkSuperClass(Handle klass) @@ -502,17 +509,16 @@ class ClassLinker { Handle> interfaces) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - bool LinkStaticFields(Handle klass) + bool LinkStaticFields(Handle klass, size_t* class_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); bool LinkInstanceFields(Handle klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - bool LinkFields(Handle klass, bool is_static) + bool LinkFields(Handle klass, bool is_static, size_t* class_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); void LinkCode(Handle method, const OatFile::OatClass* oat_class, const DexFile& dex_file, uint32_t dex_method_index, uint32_t method_index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - void CreateReferenceInstanceOffsets(Handle klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); void CreateReferenceStaticOffsets(Handle klass) @@ -612,11 +618,27 @@ class ClassLinker { size_t hash) SHARED_LOCKS_REQUIRED(Locks::classlinker_classes_lock_, Locks::mutator_lock_); + mirror::Class* UpdateClass(const char* descriptor, mirror::Class* klass, size_t hash) + LOCKS_EXCLUDED(Locks::classlinker_classes_lock_) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + void MoveImageClassesToClassTable() LOCKS_EXCLUDED(Locks::classlinker_classes_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); mirror::Class* LookupClassFromImage(const char* descriptor) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // EnsureResolved is called to make sure that a class in the class_table_ has been resolved + // before returning it to the caller. Its the responsibility of the thread that placed the class + // in the table to make it resolved. The thread doing resolution must notify on the class' lock + // when resolution has occurred. This happens in mirror::Class::SetStatus. As resolution may + // retire a class, the version of the class in the table is returned and this may differ from + // the class passed in. + mirror::Class* EnsureResolved(Thread* self, const char* descriptor, mirror::Class* klass) + __attribute__((warn_unused_result)) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + void FixupTemporaryDeclaringClass(mirror::Class* temp_class, mirror::Class* new_class) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // indexes into class_roots_. // needs to be kept in sync with class_roots_descriptors_. enum ClassRoot { @@ -664,14 +686,6 @@ class ClassLinker { void SetClassRoot(ClassRoot class_root, mirror::Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - mirror::ObjectArray* GetClassRoots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - mirror::ObjectArray* class_roots = - ReadBarrier::BarrierForRoot, kWithReadBarrier>( - &class_roots_); - DCHECK(class_roots != NULL); - return class_roots; - } - static const char* class_roots_descriptors_[]; const char* GetClassRootDescriptor(ClassRoot class_root) { @@ -695,6 +709,8 @@ class ClassLinker { InternTable* intern_table_; + // Trampolines within the image the bounce to runtime entrypoints. Done so that there is a single + // patch point within the image. TODO: make these proper relocations. const void* portable_resolution_trampoline_; const void* quick_resolution_trampoline_; const void* portable_imt_conflict_trampoline_; diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index 04f6946aa..7b5a5026d 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -22,7 +22,7 @@ #include "class_linker-inl.h" #include "common_runtime_test.h" #include "dex_file.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "gc/heap.h" #include "mirror/art_field-inl.h" #include "mirror/art_method.h" @@ -34,6 +34,7 @@ #include "mirror/proxy.h" #include "mirror/reference.h" #include "mirror/stack_trace_element.h" +#include "mirror/string-inl.h" #include "handle_scope-inl.h" namespace art { @@ -572,37 +573,6 @@ struct ProxyOffsets : public CheckOffsets { }; }; -struct ClassClassOffsets : public CheckOffsets { - ClassClassOffsets() : CheckOffsets(true, "Ljava/lang/Class;") { - // alphabetical 64-bit - offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::ClassClass, serialVersionUID_), "serialVersionUID")); - }; -}; - -struct StringClassOffsets : public CheckOffsets { - StringClassOffsets() : CheckOffsets(true, "Ljava/lang/String;") { - // alphabetical references - offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::StringClass, ASCII_), "ASCII")); - offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::StringClass, CASE_INSENSITIVE_ORDER_), "CASE_INSENSITIVE_ORDER")); - - // alphabetical 32-bit - offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::StringClass, REPLACEMENT_CHAR_), "REPLACEMENT_CHAR")); - - // alphabetical 64-bit - offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::StringClass, serialVersionUID_), "serialVersionUID")); - }; -}; - -struct ArtFieldClassOffsets : public CheckOffsets { - ArtFieldClassOffsets() : CheckOffsets(true, "Ljava/lang/reflect/ArtField;") { - }; -}; - -struct ArtMethodClassOffsets : public CheckOffsets { - ArtMethodClassOffsets() : CheckOffsets(true, "Ljava/lang/reflect/ArtMethod;") { - }; -}; - struct DexCacheOffsets : public CheckOffsets { DexCacheOffsets() : CheckOffsets(false, "Ljava/lang/DexCache;") { // alphabetical references @@ -652,11 +622,6 @@ TEST_F(ClassLinkerTest, ValidateFieldOrderOfJavaCppUnionClasses) { EXPECT_TRUE(DexCacheOffsets().Check()); EXPECT_TRUE(ReferenceOffsets().Check()); EXPECT_TRUE(FinalizerReferenceOffsets().Check()); - - EXPECT_TRUE(ClassClassOffsets().Check()); - EXPECT_TRUE(StringClassOffsets().Check()); - EXPECT_TRUE(ArtFieldClassOffsets().Check()); - EXPECT_TRUE(ArtMethodClassOffsets().Check()); } TEST_F(ClassLinkerTest, FindClassNonexistent) { @@ -1091,4 +1056,28 @@ TEST_F(ClassLinkerTest, ClassRootDescriptors) { } } +TEST_F(ClassLinkerTest, ValidatePredefinedClassSizes) { + ScopedObjectAccess soa(Thread::Current()); + NullHandle class_loader; + mirror::Class* c; + + c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Class;", class_loader); + EXPECT_EQ(c->GetClassSize(), mirror::Class::ClassClassSize()); + + c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Object;", class_loader); + EXPECT_EQ(c->GetClassSize(), mirror::Object::ClassSize()); + + c = class_linker_->FindClass(soa.Self(), "Ljava/lang/String;", class_loader); + EXPECT_EQ(c->GetClassSize(), mirror::String::ClassSize()); + + c = class_linker_->FindClass(soa.Self(), "Ljava/lang/DexCache;", class_loader); + EXPECT_EQ(c->GetClassSize(), mirror::DexCache::ClassSize()); + + c = class_linker_->FindClass(soa.Self(), "Ljava/lang/reflect/ArtField;", class_loader); + EXPECT_EQ(c->GetClassSize(), mirror::ArtField::ClassSize()); + + c = class_linker_->FindClass(soa.Self(), "Ljava/lang/reflect/ArtMethod;", class_loader); + EXPECT_EQ(c->GetClassSize(), mirror::ArtMethod::ClassSize()); +} + } // namespace art diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h new file mode 100644 index 000000000..482ad47d5 --- /dev/null +++ b/runtime/entrypoints/entrypoint_utils-inl.h @@ -0,0 +1,696 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ +#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ + +#include "entrypoint_utils.h" + +#include "class_linker-inl.h" +#include "common_throws.h" +#include "dex_file.h" +#include "indirect_reference_table.h" +#include "invoke_type.h" +#include "jni_internal.h" +#include "mirror/art_method.h" +#include "mirror/array.h" +#include "mirror/class-inl.h" +#include "mirror/object-inl.h" +#include "mirror/throwable.h" +#include "object_utils.h" +#include "handle_scope-inl.h" +#include "thread.h" + +namespace art { + +// TODO: Fix no thread safety analysis when GCC can handle template specialization. +template +ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, + mirror::ArtMethod* method, + Thread* self, bool* slow_path) + NO_THREAD_SAFETY_ANALYSIS { + mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); + if (UNLIKELY(klass == NULL)) { + klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); + *slow_path = true; + if (klass == NULL) { + DCHECK(self->IsExceptionPending()); + return nullptr; // Failure + } + } + if (kAccessCheck) { + if (UNLIKELY(!klass->IsInstantiable())) { + ThrowLocation throw_location = self->GetCurrentLocationForThrow(); + self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;", + PrettyDescriptor(klass).c_str()); + *slow_path = true; + return nullptr; // Failure + } + mirror::Class* referrer = method->GetDeclaringClass(); + if (UNLIKELY(!referrer->CanAccess(klass))) { + ThrowIllegalAccessErrorClass(referrer, klass); + *slow_path = true; + return nullptr; // Failure + } + } + if (UNLIKELY(!klass->IsInitialized())) { + StackHandleScope<1> hs(self); + Handle h_klass(hs.NewHandle(klass)); + // EnsureInitialized (the class initializer) might cause a GC. + // may cause us to suspend meaning that another thread may try to + // change the allocator while we are stuck in the entrypoints of + // an old allocator. Also, the class initialization may fail. To + // handle these cases we mark the slow path boolean as true so + // that the caller knows to check the allocator type to see if it + // has changed and to null-check the return value in case the + // initialization fails. + *slow_path = true; + if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_klass, true, true)) { + DCHECK(self->IsExceptionPending()); + return nullptr; // Failure + } + return h_klass.Get(); + } + return klass; +} + +// TODO: Fix no thread safety analysis when annotalysis is smarter. +ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, + Thread* self, bool* slow_path) + NO_THREAD_SAFETY_ANALYSIS { + if (UNLIKELY(!klass->IsInitialized())) { + StackHandleScope<1> hs(self); + Handle h_class(hs.NewHandle(klass)); + // EnsureInitialized (the class initializer) might cause a GC. + // may cause us to suspend meaning that another thread may try to + // change the allocator while we are stuck in the entrypoints of + // an old allocator. Also, the class initialization may fail. To + // handle these cases we mark the slow path boolean as true so + // that the caller knows to check the allocator type to see if it + // has changed and to null-check the return value in case the + // initialization fails. + *slow_path = true; + if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) { + DCHECK(self->IsExceptionPending()); + return nullptr; // Failure + } + return h_class.Get(); + } + return klass; +} + +// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it +// cannot be resolved, throw an error. If it can, use it to create an instance. +// When verification/compiler hasn't been able to verify access, optionally perform an access +// check. +// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. +template +ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, + mirror::ArtMethod* method, + Thread* self, + gc::AllocatorType allocator_type) + NO_THREAD_SAFETY_ANALYSIS { + bool slow_path = false; + mirror::Class* klass = CheckObjectAlloc(type_idx, method, self, &slow_path); + if (UNLIKELY(slow_path)) { + if (klass == nullptr) { + return nullptr; + } + return klass->Alloc(self, Runtime::Current()->GetHeap()->GetCurrentAllocator()); + } + DCHECK(klass != nullptr); + return klass->Alloc(self, allocator_type); +} + +// Given the context of a calling Method and a resolved class, create an instance. +// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. +template +ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, + mirror::ArtMethod* method, + Thread* self, + gc::AllocatorType allocator_type) + NO_THREAD_SAFETY_ANALYSIS { + DCHECK(klass != nullptr); + bool slow_path = false; + klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path); + if (UNLIKELY(slow_path)) { + if (klass == nullptr) { + return nullptr; + } + gc::Heap* heap = Runtime::Current()->GetHeap(); + // Pass in false since the object can not be finalizable. + return klass->Alloc(self, heap->GetCurrentAllocator()); + } + // Pass in false since the object can not be finalizable. + return klass->Alloc(self, allocator_type); +} + +// Given the context of a calling Method and an initialized class, create an instance. +// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. +template +ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, + mirror::ArtMethod* method, + Thread* self, + gc::AllocatorType allocator_type) + NO_THREAD_SAFETY_ANALYSIS { + DCHECK(klass != nullptr); + // Pass in false since the object can not be finalizable. + return klass->Alloc(self, allocator_type); +} + + +// TODO: Fix no thread safety analysis when GCC can handle template specialization. +template +ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, + mirror::ArtMethod* method, + int32_t component_count, + bool* slow_path) + NO_THREAD_SAFETY_ANALYSIS { + if (UNLIKELY(component_count < 0)) { + ThrowNegativeArraySizeException(component_count); + *slow_path = true; + return nullptr; // Failure + } + mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); + if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve + klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); + *slow_path = true; + if (klass == nullptr) { // Error + DCHECK(Thread::Current()->IsExceptionPending()); + return nullptr; // Failure + } + CHECK(klass->IsArrayClass()) << PrettyClass(klass); + } + if (kAccessCheck) { + mirror::Class* referrer = method->GetDeclaringClass(); + if (UNLIKELY(!referrer->CanAccess(klass))) { + ThrowIllegalAccessErrorClass(referrer, klass); + *slow_path = true; + return nullptr; // Failure + } + } + return klass; +} + +// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If +// it cannot be resolved, throw an error. If it can, use it to create an array. +// When verification/compiler hasn't been able to verify access, optionally perform an access +// check. +// TODO: Fix no thread safety analysis when GCC can handle template specialization. +template +ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, + mirror::ArtMethod* method, + int32_t component_count, + Thread* self, + gc::AllocatorType allocator_type) + NO_THREAD_SAFETY_ANALYSIS { + bool slow_path = false; + mirror::Class* klass = CheckArrayAlloc(type_idx, method, component_count, + &slow_path); + if (UNLIKELY(slow_path)) { + if (klass == nullptr) { + return nullptr; + } + gc::Heap* heap = Runtime::Current()->GetHeap(); + return mirror::Array::Alloc(self, klass, component_count, + klass->GetComponentSize(), + heap->GetCurrentAllocator()); + } + return mirror::Array::Alloc(self, klass, component_count, + klass->GetComponentSize(), allocator_type); +} + +template +ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, + mirror::ArtMethod* method, + int32_t component_count, + Thread* self, + gc::AllocatorType allocator_type) + NO_THREAD_SAFETY_ANALYSIS { + DCHECK(klass != nullptr); + if (UNLIKELY(component_count < 0)) { + ThrowNegativeArraySizeException(component_count); + return nullptr; // Failure + } + if (kAccessCheck) { + mirror::Class* referrer = method->GetDeclaringClass(); + if (UNLIKELY(!referrer->CanAccess(klass))) { + ThrowIllegalAccessErrorClass(referrer, klass); + return nullptr; // Failure + } + } + // No need to retry a slow-path allocation as the above code won't cause a GC or thread + // suspension. + return mirror::Array::Alloc(self, klass, component_count, + klass->GetComponentSize(), allocator_type); +} + +template +static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, + Thread* self, size_t expected_size) { + bool is_primitive; + bool is_set; + bool is_static; + switch (type) { + case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; + case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; + case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; + case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; + case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; + case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; + case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; + case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through. + default: is_primitive = true; is_set = true; is_static = true; break; + } + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static); + if (UNLIKELY(resolved_field == nullptr)) { + DCHECK(self->IsExceptionPending()); // Throw exception and unwind. + return nullptr; // Failure. + } + mirror::Class* fields_class = resolved_field->GetDeclaringClass(); + if (access_check) { + if (UNLIKELY(resolved_field->IsStatic() != is_static)) { + ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer); + return nullptr; + } + mirror::Class* referring_class = referrer->GetDeclaringClass(); + if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field, + field_idx))) { + DCHECK(self->IsExceptionPending()); // Throw exception and unwind. + return nullptr; // Failure. + } + if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) { + ThrowIllegalAccessErrorFinalField(referrer, resolved_field); + return nullptr; // Failure. + } else { + if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || + resolved_field->FieldSize() != expected_size)) { + ThrowLocation throw_location = self->GetCurrentLocationForThrow(); + DCHECK(throw_location.GetMethod() == referrer); + self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;", + "Attempted read of %zd-bit %s on field '%s'", + expected_size * (32 / sizeof(int32_t)), + is_primitive ? "primitive" : "non-primitive", + PrettyField(resolved_field, true).c_str()); + return nullptr; // Failure. + } + } + } + if (!is_static) { + // instance fields must be being accessed on an initialized class + return resolved_field; + } else { + // If the class is initialized we're done. + if (LIKELY(fields_class->IsInitialized())) { + return resolved_field; + } else { + StackHandleScope<1> hs(self); + Handle h_class(hs.NewHandle(fields_class)); + if (LIKELY(class_linker->EnsureInitialized(h_class, true, true))) { + // Otherwise let's ensure the class is initialized before resolving the field. + return resolved_field; + } + DCHECK(self->IsExceptionPending()); // Throw exception and unwind + return nullptr; // Failure. + } + } +} + +// Explicit template declarations of FindFieldFromCode for all field access types. +#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ +template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ +mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ + mirror::ArtMethod* referrer, \ + Thread* self, size_t expected_size) \ + +#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ + EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ + EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true) + +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead); +EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); + +#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL +#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL + +template +static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, + mirror::Object** this_object, + mirror::ArtMethod** referrer, Thread* self) { + ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); + mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer, type); + if (resolved_method == nullptr) { + StackHandleScope<1> hs(self); + mirror::Object* null_this = nullptr; + HandleWrapper h_this( + hs.NewHandleWrapper(type == kStatic ? &null_this : this_object)); + resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type); + } + if (UNLIKELY(resolved_method == nullptr)) { + DCHECK(self->IsExceptionPending()); // Throw exception and unwind. + return nullptr; // Failure. + } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) { + // Maintain interpreter-like semantics where NullPointerException is thrown + // after potential NoSuchMethodError from class linker. + ThrowLocation throw_location = self->GetCurrentLocationForThrow(); + DCHECK_EQ(*referrer, throw_location.GetMethod()); + ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type); + return nullptr; // Failure. + } else if (access_check) { + // Incompatible class change should have been handled in resolve method. + if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) { + ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method, + *referrer); + return nullptr; // Failure. + } + mirror::Class* methods_class = resolved_method->GetDeclaringClass(); + mirror::Class* referring_class = (*referrer)->GetDeclaringClass(); + bool can_access_resolved_method = + referring_class->CheckResolvedMethodAccess(methods_class, resolved_method, + method_idx); + if (UNLIKELY(!can_access_resolved_method)) { + DCHECK(self->IsExceptionPending()); // Throw exception and unwind. + return nullptr; // Failure. + } + } + switch (type) { + case kStatic: + case kDirect: + return resolved_method; + case kVirtual: { + mirror::ObjectArray* vtable = (*this_object)->GetClass()->GetVTable(); + uint16_t vtable_index = resolved_method->GetMethodIndex(); + if (access_check && + (vtable == nullptr || vtable_index >= static_cast(vtable->GetLength()))) { + // Behavior to agree with that of the verifier. + ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), + resolved_method->GetName(), resolved_method->GetSignature()); + return nullptr; // Failure. + } + DCHECK(vtable != nullptr); + return vtable->GetWithoutChecks(vtable_index); + } + case kSuper: { + mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass(); + uint16_t vtable_index = resolved_method->GetMethodIndex(); + mirror::ObjectArray* vtable; + if (access_check) { + // Check existence of super class. + vtable = (super_class != nullptr) ? super_class->GetVTable() : nullptr; + if (vtable == nullptr || vtable_index >= static_cast(vtable->GetLength())) { + // Behavior to agree with that of the verifier. + ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), + resolved_method->GetName(), resolved_method->GetSignature()); + return nullptr; // Failure. + } + } else { + // Super class must exist. + DCHECK(super_class != nullptr); + vtable = super_class->GetVTable(); + } + DCHECK(vtable != nullptr); + return vtable->GetWithoutChecks(vtable_index); + } + case kInterface: { + uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize; + mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index); + if (!imt_method->IsImtConflictMethod()) { + return imt_method; + } else { + mirror::ArtMethod* interface_method = + (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method); + if (UNLIKELY(interface_method == nullptr)) { + ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, + *this_object, *referrer); + return nullptr; // Failure. + } + return interface_method; + } + } + default: + LOG(FATAL) << "Unknown invoke type " << type; + return nullptr; // Failure. + } +} + +// Explicit template declarations of FindMethodFromCode for all invoke types. +#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ + template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ + mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \ + mirror::Object** this_object, \ + mirror::ArtMethod** referrer, \ + Thread* self) +#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ + EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \ + EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) + +EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic); +EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect); +EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual); +EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper); +EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); + +#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL +#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL + +// Fast path field resolution that can't initialize classes or throw exceptions. +static inline mirror::ArtField* FindFieldFast(uint32_t field_idx, + mirror::ArtMethod* referrer, + FindFieldType type, size_t expected_size) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + mirror::ArtField* resolved_field = + referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx); + if (UNLIKELY(resolved_field == nullptr)) { + return nullptr; + } + // Check for incompatible class change. + bool is_primitive; + bool is_set; + bool is_static; + switch (type) { + case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; + case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; + case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; + case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; + case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; + case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; + case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; + case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break; + default: + LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings. + is_primitive = true; + is_set = true; + is_static = true; + break; + } + if (UNLIKELY(resolved_field->IsStatic() != is_static)) { + // Incompatible class change. + return nullptr; + } + mirror::Class* fields_class = resolved_field->GetDeclaringClass(); + if (is_static) { + // Check class is initialized else fail so that we can contend to initialize the class with + // other threads that may be racing to do this. + if (UNLIKELY(!fields_class->IsInitialized())) { + return nullptr; + } + } + mirror::Class* referring_class = referrer->GetDeclaringClass(); + if (UNLIKELY(!referring_class->CanAccess(fields_class) || + !referring_class->CanAccessMember(fields_class, + resolved_field->GetAccessFlags()) || + (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { + // Illegal access. + return nullptr; + } + if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || + resolved_field->FieldSize() != expected_size)) { + return nullptr; + } + return resolved_field; +} + +// Fast path method resolution that can't throw exceptions. +static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, + mirror::Object* this_object, + mirror::ArtMethod* referrer, + bool access_check, InvokeType type) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + bool is_direct = type == kStatic || type == kDirect; + if (UNLIKELY(this_object == NULL && !is_direct)) { + return NULL; + } + mirror::ArtMethod* resolved_method = + referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx); + if (UNLIKELY(resolved_method == NULL)) { + return NULL; + } + if (access_check) { + // Check for incompatible class change errors and access. + bool icce = resolved_method->CheckIncompatibleClassChange(type); + if (UNLIKELY(icce)) { + return NULL; + } + mirror::Class* methods_class = resolved_method->GetDeclaringClass(); + mirror::Class* referring_class = referrer->GetDeclaringClass(); + if (UNLIKELY(!referring_class->CanAccess(methods_class) || + !referring_class->CanAccessMember(methods_class, + resolved_method->GetAccessFlags()))) { + // Potential illegal access, may need to refine the method's class. + return NULL; + } + } + if (type == kInterface) { // Most common form of slow path dispatch. + return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method); + } else if (is_direct) { + return resolved_method; + } else if (type == kSuper) { + return referrer->GetDeclaringClass()->GetSuperClass()->GetVTable()-> + Get(resolved_method->GetMethodIndex()); + } else { + DCHECK(type == kVirtual); + return this_object->GetClass()->GetVTable()->Get(resolved_method->GetMethodIndex()); + } +} + +static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, + mirror::ArtMethod* referrer, + Thread* self, bool can_run_clinit, + bool verify_access) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); + if (UNLIKELY(klass == nullptr)) { + CHECK(self->IsExceptionPending()); + return nullptr; // Failure - Indicate to caller to deliver exception + } + // Perform access check if necessary. + mirror::Class* referring_class = referrer->GetDeclaringClass(); + if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) { + ThrowIllegalAccessErrorClass(referring_class, klass); + return nullptr; // Failure - Indicate to caller to deliver exception + } + // If we're just implementing const-class, we shouldn't call . + if (!can_run_clinit) { + return klass; + } + // If we are the of this class, just return our storage. + // + // Do not set the DexCache InitializedStaticStorage, since that implies has finished + // running. + if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) { + return klass; + } + StackHandleScope<1> hs(self); + Handle h_class(hs.NewHandle(klass)); + if (!class_linker->EnsureInitialized(h_class, true, true)) { + CHECK(self->IsExceptionPending()); + return nullptr; // Failure - Indicate to caller to deliver exception + } + return h_class.Get(); +} + +static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer, + uint32_t string_idx) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + return class_linker->ResolveString(string_idx, referrer); +} + +static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) + NO_THREAD_SAFETY_ANALYSIS /* SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) */ { + // Save any pending exception over monitor exit call. + mirror::Throwable* saved_exception = NULL; + ThrowLocation saved_throw_location; + bool is_exception_reported = self->IsExceptionReportedToInstrumentation(); + if (UNLIKELY(self->IsExceptionPending())) { + saved_exception = self->GetException(&saved_throw_location); + self->ClearException(); + } + // Decode locked object and unlock, before popping local references. + self->DecodeJObject(locked)->MonitorExit(self); + if (UNLIKELY(self->IsExceptionPending())) { + LOG(FATAL) << "Synchronized JNI code returning with an exception:\n" + << saved_exception->Dump() + << "\nEncountered second exception during implicit MonitorExit:\n" + << self->GetException(NULL)->Dump(); + } + // Restore pending exception. + if (saved_exception != NULL) { + self->SetException(saved_throw_location, saved_exception); + self->SetExceptionReportedToInstrumentation(is_exception_reported); + } +} + +static inline void CheckReferenceResult(mirror::Object* o, Thread* self) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + if (o == NULL) { + return; + } + mirror::ArtMethod* m = self->GetCurrentMethod(NULL); + if (o == kInvalidIndirectRefObject) { + JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str()); + } + // Make sure that the result is an instance of the type this method was expected to return. + StackHandleScope<1> hs(self); + Handle h_m(hs.NewHandle(m)); + mirror::Class* return_type = MethodHelper(h_m).GetReturnType(); + + if (!o->InstanceOf(return_type)) { + JniAbortF(NULL, "attempt to return an instance of %s from %s", PrettyTypeOf(o).c_str(), + PrettyMethod(h_m.Get()).c_str()); + } +} + +static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + for (;;) { + if (thread->ReadFlag(kCheckpointRequest)) { + thread->RunCheckpointFunction(); + } else if (thread->ReadFlag(kSuspendRequest)) { + thread->FullSuspendCheck(); + } else { + break; + } + } +} + +template +static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) { + const INT_TYPE kMaxInt = static_cast(std::numeric_limits::max()); + const INT_TYPE kMinInt = static_cast(std::numeric_limits::min()); + const FLOAT_TYPE kMaxIntAsFloat = static_cast(kMaxInt); + const FLOAT_TYPE kMinIntAsFloat = static_cast(kMinInt); + if (LIKELY(f > kMinIntAsFloat)) { + if (LIKELY(f < kMaxIntAsFloat)) { + return static_cast(f); + } else { + return kMaxInt; + } + } else { + return (f != f) ? 0 : kMinInt; // f != f implies NaN + } +} + +} // namespace art + +#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc index a0e32f520..d029df2c7 100644 --- a/runtime/entrypoints/entrypoint_utils.cc +++ b/runtime/entrypoints/entrypoint_utils.cc @@ -16,6 +16,7 @@ #include "entrypoints/entrypoint_utils.h" +#include "base/mutex.h" #include "class_linker-inl.h" #include "dex_file-inl.h" #include "gc/accounting/card_table-inl.h" @@ -25,7 +26,6 @@ #include "mirror/object-inl.h" #include "object_utils.h" #include "mirror/object_array-inl.h" -#include "mirror/proxy.h" #include "reflection.h" #include "scoped_thread_state_change.h" #include "ScopedLocalRef.h" @@ -219,8 +219,7 @@ JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, cons mirror::Throwable* exception = soa.Self()->GetException(NULL); if (exception->IsCheckedException()) { mirror::Object* rcvr = soa.Decode(rcvr_jobj); - mirror::SynthesizedProxyClass* proxy_class = - down_cast(rcvr->GetClass()); + mirror::Class* proxy_class = rcvr->GetClass(); mirror::ArtMethod* interface_method = soa.Decode(interface_method_jobj); mirror::ArtMethod* proxy_method = diff --git a/runtime/entrypoints/entrypoint_utils.h b/runtime/entrypoints/entrypoint_utils.h index ff836a474..11a67ac5d 100644 --- a/runtime/entrypoints/entrypoint_utils.h +++ b/runtime/entrypoints/entrypoint_utils.h @@ -17,105 +17,40 @@ #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ +#include +#include + #include "base/macros.h" -#include "class_linker-inl.h" -#include "common_throws.h" -#include "dex_file.h" -#include "indirect_reference_table.h" +#include "base/mutex.h" +#include "gc/allocator_type.h" #include "invoke_type.h" -#include "jni_internal.h" -#include "mirror/art_method.h" -#include "mirror/array.h" -#include "mirror/class-inl.h" -#include "mirror/object-inl.h" -#include "mirror/throwable.h" -#include "object_utils.h" -#include "handle_scope-inl.h" -#include "thread.h" +#include "jvalue.h" namespace art { namespace mirror { class Class; + class Array; class ArtField; + class ArtMethod; class Object; + class String; } // namespace mirror +class ScopedObjectAccessAlreadyRunnable; +class Thread; + // TODO: Fix no thread safety analysis when GCC can handle template specialization. template ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, mirror::ArtMethod* method, Thread* self, bool* slow_path) - NO_THREAD_SAFETY_ANALYSIS { - mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); - if (UNLIKELY(klass == NULL)) { - klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); - *slow_path = true; - if (klass == NULL) { - DCHECK(self->IsExceptionPending()); - return nullptr; // Failure - } - } - if (kAccessCheck) { - if (UNLIKELY(!klass->IsInstantiable())) { - ThrowLocation throw_location = self->GetCurrentLocationForThrow(); - self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;", - PrettyDescriptor(klass).c_str()); - *slow_path = true; - return nullptr; // Failure - } - mirror::Class* referrer = method->GetDeclaringClass(); - if (UNLIKELY(!referrer->CanAccess(klass))) { - ThrowIllegalAccessErrorClass(referrer, klass); - *slow_path = true; - return nullptr; // Failure - } - } - if (UNLIKELY(!klass->IsInitialized())) { - StackHandleScope<1> hs(self); - Handle h_klass(hs.NewHandle(klass)); - // EnsureInitialized (the class initializer) might cause a GC. - // may cause us to suspend meaning that another thread may try to - // change the allocator while we are stuck in the entrypoints of - // an old allocator. Also, the class initialization may fail. To - // handle these cases we mark the slow path boolean as true so - // that the caller knows to check the allocator type to see if it - // has changed and to null-check the return value in case the - // initialization fails. - *slow_path = true; - if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_klass, true, true)) { - DCHECK(self->IsExceptionPending()); - return nullptr; // Failure - } - return h_klass.Get(); - } - return klass; -} + NO_THREAD_SAFETY_ANALYSIS; // TODO: Fix no thread safety analysis when annotalysis is smarter. ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, Thread* self, bool* slow_path) - NO_THREAD_SAFETY_ANALYSIS { - if (UNLIKELY(!klass->IsInitialized())) { - StackHandleScope<1> hs(self); - Handle h_class(hs.NewHandle(klass)); - // EnsureInitialized (the class initializer) might cause a GC. - // may cause us to suspend meaning that another thread may try to - // change the allocator while we are stuck in the entrypoints of - // an old allocator. Also, the class initialization may fail. To - // handle these cases we mark the slow path boolean as true so - // that the caller knows to check the allocator type to see if it - // has changed and to null-check the return value in case the - // initialization fails. - *slow_path = true; - if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) { - DCHECK(self->IsExceptionPending()); - return nullptr; // Failure - } - return h_class.Get(); - } - return klass; -} + NO_THREAD_SAFETY_ANALYSIS; // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it // cannot be resolved, throw an error. If it can, use it to create an instance. @@ -126,19 +61,7 @@ template ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, mirror::ArtMethod* method, Thread* self, - gc::AllocatorType allocator_type) - NO_THREAD_SAFETY_ANALYSIS { - bool slow_path = false; - mirror::Class* klass = CheckObjectAlloc(type_idx, method, self, &slow_path); - if (UNLIKELY(slow_path)) { - if (klass == nullptr) { - return nullptr; - } - return klass->Alloc(self, Runtime::Current()->GetHeap()->GetCurrentAllocator()); - } - DCHECK(klass != nullptr); - return klass->Alloc(self, allocator_type); -} + gc::AllocatorType allocator_type); // Given the context of a calling Method and a resolved class, create an instance. // TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. @@ -147,21 +70,7 @@ ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror:: mirror::ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) - NO_THREAD_SAFETY_ANALYSIS { - DCHECK(klass != nullptr); - bool slow_path = false; - klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path); - if (UNLIKELY(slow_path)) { - if (klass == nullptr) { - return nullptr; - } - gc::Heap* heap = Runtime::Current()->GetHeap(); - // Pass in false since the object can not be finalizable. - return klass->Alloc(self, heap->GetCurrentAllocator()); - } - // Pass in false since the object can not be finalizable. - return klass->Alloc(self, allocator_type); -} + NO_THREAD_SAFETY_ANALYSIS; // Given the context of a calling Method and an initialized class, create an instance. // TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. @@ -169,12 +78,7 @@ template ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, mirror::ArtMethod* method, Thread* self, - gc::AllocatorType allocator_type) - NO_THREAD_SAFETY_ANALYSIS { - DCHECK(klass != nullptr); - // Pass in false since the object can not be finalizable. - return klass->Alloc(self, allocator_type); -} + gc::AllocatorType allocator_type); // TODO: Fix no thread safety analysis when GCC can handle template specialization. @@ -183,32 +87,7 @@ ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, bool* slow_path) - NO_THREAD_SAFETY_ANALYSIS { - if (UNLIKELY(component_count < 0)) { - ThrowNegativeArraySizeException(component_count); - *slow_path = true; - return nullptr; // Failure - } - mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); - if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve - klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); - *slow_path = true; - if (klass == nullptr) { // Error - DCHECK(Thread::Current()->IsExceptionPending()); - return nullptr; // Failure - } - CHECK(klass->IsArrayClass()) << PrettyClass(klass); - } - if (kAccessCheck) { - mirror::Class* referrer = method->GetDeclaringClass(); - if (UNLIKELY(!referrer->CanAccess(klass))) { - ThrowIllegalAccessErrorClass(referrer, klass); - *slow_path = true; - return nullptr; // Failure - } - } - return klass; -} + NO_THREAD_SAFETY_ANALYSIS; // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If // it cannot be resolved, throw an error. If it can, use it to create an array. @@ -221,22 +100,7 @@ ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, int32_t component_count, Thread* self, gc::AllocatorType allocator_type) - NO_THREAD_SAFETY_ANALYSIS { - bool slow_path = false; - mirror::Class* klass = CheckArrayAlloc(type_idx, method, component_count, - &slow_path); - if (UNLIKELY(slow_path)) { - if (klass == nullptr) { - return nullptr; - } - gc::Heap* heap = Runtime::Current()->GetHeap(); - return mirror::Array::Alloc(self, klass, component_count, - klass->GetComponentSize(), - heap->GetCurrentAllocator()); - } - return mirror::Array::Alloc(self, klass, component_count, - klass->GetComponentSize(), allocator_type); -} + NO_THREAD_SAFETY_ANALYSIS; template ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, @@ -244,24 +108,7 @@ ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Cl int32_t component_count, Thread* self, gc::AllocatorType allocator_type) - NO_THREAD_SAFETY_ANALYSIS { - DCHECK(klass != nullptr); - if (UNLIKELY(component_count < 0)) { - ThrowNegativeArraySizeException(component_count); - return nullptr; // Failure - } - if (kAccessCheck) { - mirror::Class* referrer = method->GetDeclaringClass(); - if (UNLIKELY(!referrer->CanAccess(klass))) { - ThrowIllegalAccessErrorClass(referrer, klass); - return nullptr; // Failure - } - } - // No need to retry a slow-path allocation as the above code won't cause a GC or thread - // suspension. - return mirror::Array::Alloc(self, klass, component_count, - klass->GetComponentSize(), allocator_type); -} + NO_THREAD_SAFETY_ANALYSIS; extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, @@ -290,422 +137,47 @@ enum FindFieldType { template static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - Thread* self, size_t expected_size) { - bool is_primitive; - bool is_set; - bool is_static; - switch (type) { - case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; - case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; - case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; - case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; - case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; - case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; - case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; - case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through. - default: is_primitive = true; is_set = true; is_static = true; break; - } - ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static); - if (UNLIKELY(resolved_field == nullptr)) { - DCHECK(self->IsExceptionPending()); // Throw exception and unwind. - return nullptr; // Failure. - } - mirror::Class* fields_class = resolved_field->GetDeclaringClass(); - if (access_check) { - if (UNLIKELY(resolved_field->IsStatic() != is_static)) { - ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer); - return nullptr; - } - mirror::Class* referring_class = referrer->GetDeclaringClass(); - if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field, - field_idx))) { - DCHECK(self->IsExceptionPending()); // Throw exception and unwind. - return nullptr; // Failure. - } - if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) { - ThrowIllegalAccessErrorFinalField(referrer, resolved_field); - return nullptr; // Failure. - } else { - if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || - resolved_field->FieldSize() != expected_size)) { - ThrowLocation throw_location = self->GetCurrentLocationForThrow(); - DCHECK(throw_location.GetMethod() == referrer); - self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;", - "Attempted read of %zd-bit %s on field '%s'", - expected_size * (32 / sizeof(int32_t)), - is_primitive ? "primitive" : "non-primitive", - PrettyField(resolved_field, true).c_str()); - return nullptr; // Failure. - } - } - } - if (!is_static) { - // instance fields must be being accessed on an initialized class - return resolved_field; - } else { - // If the class is initialized we're done. - if (LIKELY(fields_class->IsInitialized())) { - return resolved_field; - } else { - StackHandleScope<1> hs(self); - Handle h_class(hs.NewHandle(fields_class)); - if (LIKELY(class_linker->EnsureInitialized(h_class, true, true))) { - // Otherwise let's ensure the class is initialized before resolving the field. - return resolved_field; - } - DCHECK(self->IsExceptionPending()); // Throw exception and unwind - return nullptr; // Failure. - } - } -} - -// Explicit template declarations of FindFieldFromCode for all field access types. -#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ -template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ -mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ - mirror::ArtMethod* referrer, \ - Thread* self, size_t expected_size) \ - -#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ - EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ - EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true) - -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead); -EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); - -#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL -#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL + Thread* self, size_t expected_size) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object, - mirror::ArtMethod** referrer, Thread* self) { - ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); - mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer, type); - if (resolved_method == nullptr) { - StackHandleScope<1> hs(self); - mirror::Object* null_this = nullptr; - HandleWrapper h_this( - hs.NewHandleWrapper(type == kStatic ? &null_this : this_object)); - resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type); - } - if (UNLIKELY(resolved_method == nullptr)) { - DCHECK(self->IsExceptionPending()); // Throw exception and unwind. - return nullptr; // Failure. - } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) { - // Maintain interpreter-like semantics where NullPointerException is thrown - // after potential NoSuchMethodError from class linker. - ThrowLocation throw_location = self->GetCurrentLocationForThrow(); - DCHECK_EQ(*referrer, throw_location.GetMethod()); - ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type); - return nullptr; // Failure. - } else if (access_check) { - // Incompatible class change should have been handled in resolve method. - if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) { - ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method, - *referrer); - return nullptr; // Failure. - } - mirror::Class* methods_class = resolved_method->GetDeclaringClass(); - mirror::Class* referring_class = (*referrer)->GetDeclaringClass(); - bool can_access_resolved_method = - referring_class->CheckResolvedMethodAccess(methods_class, resolved_method, - method_idx); - if (UNLIKELY(!can_access_resolved_method)) { - DCHECK(self->IsExceptionPending()); // Throw exception and unwind. - return nullptr; // Failure. - } - } - switch (type) { - case kStatic: - case kDirect: - return resolved_method; - case kVirtual: { - mirror::ObjectArray* vtable = (*this_object)->GetClass()->GetVTable(); - uint16_t vtable_index = resolved_method->GetMethodIndex(); - if (access_check && - (vtable == nullptr || vtable_index >= static_cast(vtable->GetLength()))) { - // Behavior to agree with that of the verifier. - ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), - resolved_method->GetName(), resolved_method->GetSignature()); - return nullptr; // Failure. - } - DCHECK(vtable != nullptr); - return vtable->GetWithoutChecks(vtable_index); - } - case kSuper: { - mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass(); - uint16_t vtable_index = resolved_method->GetMethodIndex(); - mirror::ObjectArray* vtable; - if (access_check) { - // Check existence of super class. - vtable = (super_class != nullptr) ? super_class->GetVTable() : nullptr; - if (vtable == nullptr || vtable_index >= static_cast(vtable->GetLength())) { - // Behavior to agree with that of the verifier. - ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), - resolved_method->GetName(), resolved_method->GetSignature()); - return nullptr; // Failure. - } - } else { - // Super class must exist. - DCHECK(super_class != nullptr); - vtable = super_class->GetVTable(); - } - DCHECK(vtable != nullptr); - return vtable->GetWithoutChecks(vtable_index); - } - case kInterface: { - uint32_t imt_index = resolved_method->GetDexMethodIndex() % ClassLinker::kImtSize; - mirror::ObjectArray* imt_table = (*this_object)->GetClass()->GetImTable(); - mirror::ArtMethod* imt_method = imt_table->Get(imt_index); - if (!imt_method->IsImtConflictMethod()) { - return imt_method; - } else { - mirror::ArtMethod* interface_method = - (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method); - if (UNLIKELY(interface_method == nullptr)) { - ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, - *this_object, *referrer); - return nullptr; // Failure. - } - return interface_method; - } - } - default: - LOG(FATAL) << "Unknown invoke type " << type; - return nullptr; // Failure. - } -} - -// Explicit template declarations of FindMethodFromCode for all invoke types. -#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ - template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ - mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \ - mirror::Object** this_object, \ - mirror::ArtMethod** referrer, \ - Thread* self) -#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ - EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \ - EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) - -EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic); -EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect); -EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual); -EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper); -EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); - -#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL -#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL + mirror::ArtMethod** referrer, Thread* self) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Fast path field resolution that can't initialize classes or throw exceptions. static inline mirror::ArtField* FindFieldFast(uint32_t field_idx, mirror::ArtMethod* referrer, FindFieldType type, size_t expected_size) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - mirror::ArtField* resolved_field = - referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx); - if (UNLIKELY(resolved_field == nullptr)) { - return nullptr; - } - // Check for incompatible class change. - bool is_primitive; - bool is_set; - bool is_static; - switch (type) { - case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; - case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; - case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; - case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; - case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; - case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; - case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; - case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break; - default: - LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings. - is_primitive = true; - is_set = true; - is_static = true; - break; - } - if (UNLIKELY(resolved_field->IsStatic() != is_static)) { - // Incompatible class change. - return nullptr; - } - mirror::Class* fields_class = resolved_field->GetDeclaringClass(); - if (is_static) { - // Check class is initialized else fail so that we can contend to initialize the class with - // other threads that may be racing to do this. - if (UNLIKELY(!fields_class->IsInitialized())) { - return nullptr; - } - } - mirror::Class* referring_class = referrer->GetDeclaringClass(); - if (UNLIKELY(!referring_class->CanAccess(fields_class) || - !referring_class->CanAccessMember(fields_class, - resolved_field->GetAccessFlags()) || - (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { - // Illegal access. - return nullptr; - } - if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || - resolved_field->FieldSize() != expected_size)) { - return nullptr; - } - return resolved_field; -} + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Fast path method resolution that can't throw exceptions. static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object, mirror::ArtMethod* referrer, bool access_check, InvokeType type) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - bool is_direct = type == kStatic || type == kDirect; - if (UNLIKELY(this_object == NULL && !is_direct)) { - return NULL; - } - mirror::ArtMethod* resolved_method = - referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx); - if (UNLIKELY(resolved_method == NULL)) { - return NULL; - } - if (access_check) { - // Check for incompatible class change errors and access. - bool icce = resolved_method->CheckIncompatibleClassChange(type); - if (UNLIKELY(icce)) { - return NULL; - } - mirror::Class* methods_class = resolved_method->GetDeclaringClass(); - mirror::Class* referring_class = referrer->GetDeclaringClass(); - if (UNLIKELY(!referring_class->CanAccess(methods_class) || - !referring_class->CanAccessMember(methods_class, - resolved_method->GetAccessFlags()))) { - // Potential illegal access, may need to refine the method's class. - return NULL; - } - } - if (type == kInterface) { // Most common form of slow path dispatch. - return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method); - } else if (is_direct) { - return resolved_method; - } else if (type == kSuper) { - return referrer->GetDeclaringClass()->GetSuperClass()->GetVTable()-> - Get(resolved_method->GetMethodIndex()); - } else { - DCHECK(type == kVirtual); - return this_object->GetClass()->GetVTable()->Get(resolved_method->GetMethodIndex()); - } -} + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, mirror::ArtMethod* referrer, Thread* self, bool can_run_clinit, bool verify_access) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); - if (UNLIKELY(klass == nullptr)) { - CHECK(self->IsExceptionPending()); - return nullptr; // Failure - Indicate to caller to deliver exception - } - // Perform access check if necessary. - mirror::Class* referring_class = referrer->GetDeclaringClass(); - if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) { - ThrowIllegalAccessErrorClass(referring_class, klass); - return nullptr; // Failure - Indicate to caller to deliver exception - } - // If we're just implementing const-class, we shouldn't call . - if (!can_run_clinit) { - return klass; - } - // If we are the of this class, just return our storage. - // - // Do not set the DexCache InitializedStaticStorage, since that implies has finished - // running. - if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) { - return klass; - } - StackHandleScope<1> hs(self); - Handle h_class(hs.NewHandle(klass)); - if (!class_linker->EnsureInitialized(h_class, true, true)) { - CHECK(self->IsExceptionPending()); - return nullptr; // Failure - Indicate to caller to deliver exception - } - return h_class.Get(); -} + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer, uint32_t string_idx) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - return class_linker->ResolveString(string_idx, referrer); -} + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) - NO_THREAD_SAFETY_ANALYSIS /* SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) */ { - // Save any pending exception over monitor exit call. - mirror::Throwable* saved_exception = NULL; - ThrowLocation saved_throw_location; - bool is_exception_reported = self->IsExceptionReportedToInstrumentation(); - if (UNLIKELY(self->IsExceptionPending())) { - saved_exception = self->GetException(&saved_throw_location); - self->ClearException(); - } - // Decode locked object and unlock, before popping local references. - self->DecodeJObject(locked)->MonitorExit(self); - if (UNLIKELY(self->IsExceptionPending())) { - LOG(FATAL) << "Synchronized JNI code returning with an exception:\n" - << saved_exception->Dump() - << "\nEncountered second exception during implicit MonitorExit:\n" - << self->GetException(NULL)->Dump(); - } - // Restore pending exception. - if (saved_exception != NULL) { - self->SetException(saved_throw_location, saved_exception); - self->SetExceptionReportedToInstrumentation(is_exception_reported); - } -} + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); static inline void CheckReferenceResult(mirror::Object* o, Thread* self) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - if (o == NULL) { - return; - } - mirror::ArtMethod* m = self->GetCurrentMethod(NULL); - if (o == kInvalidIndirectRefObject) { - JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str()); - } - // Make sure that the result is an instance of the type this method was expected to return. - StackHandleScope<1> hs(self); - Handle h_m(hs.NewHandle(m)); - mirror::Class* return_type = MethodHelper(h_m).GetReturnType(); - - if (!o->InstanceOf(return_type)) { - JniAbortF(NULL, "attempt to return an instance of %s from %s", PrettyTypeOf(o).c_str(), - PrettyMethod(h_m.Get()).c_str()); - } -} + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); -static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - for (;;) { - if (thread->ReadFlag(kCheckpointRequest)) { - thread->RunCheckpointFunction(); - } else if (thread->ReadFlag(kSuspendRequest)) { - thread->FullSuspendCheck(); - } else { - break; - } - } -} +static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty, jobject rcvr_jobj, jobject interface_art_method_jobj, @@ -750,26 +222,6 @@ static inline const void* GetQuickToPortableBridge() { return GetQuickToInterpreterBridge(); } -static inline const void* GetPortableResolutionTrampoline(ClassLinker* class_linker) { - return class_linker->GetPortableResolutionTrampoline(); -} - -static inline const void* GetQuickResolutionTrampoline(ClassLinker* class_linker) { - return class_linker->GetQuickResolutionTrampoline(); -} - -static inline const void* GetPortableImtConflictTrampoline(ClassLinker* class_linker) { - return class_linker->GetPortableImtConflictTrampoline(); -} - -static inline const void* GetQuickImtConflictTrampoline(ClassLinker* class_linker) { - return class_linker->GetQuickImtConflictTrampoline(); -} - -static inline const void* GetQuickToInterpreterBridgeTrampoline(ClassLinker* class_linker) { - return class_linker->GetQuickToInterpreterBridgeTrampoline(); -} - extern "C" void art_portable_proxy_invoke_handler(); static inline const void* GetPortableProxyInvokeHandler() { return reinterpret_cast(art_portable_proxy_invoke_handler); @@ -786,21 +238,7 @@ static inline void* GetJniDlsymLookupStub() { } template -static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) { - const INT_TYPE kMaxInt = static_cast(std::numeric_limits::max()); - const INT_TYPE kMinInt = static_cast(std::numeric_limits::min()); - const FLOAT_TYPE kMaxIntAsFloat = static_cast(kMaxInt); - const FLOAT_TYPE kMinIntAsFloat = static_cast(kMinInt); - if (LIKELY(f > kMinIntAsFloat)) { - if (LIKELY(f < kMaxIntAsFloat)) { - return static_cast(f); - } else { - return kMaxInt; - } - } else { - return (f != f) ? 0 : kMinInt; // f != f implies NaN - } -} +static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f); } // namespace art diff --git a/runtime/entrypoints/math_entrypoints.cc b/runtime/entrypoints/math_entrypoints.cc index b839b6317..b0eaf1ed0 100644 --- a/runtime/entrypoints/math_entrypoints.cc +++ b/runtime/entrypoints/math_entrypoints.cc @@ -16,7 +16,7 @@ #include "math_entrypoints.h" -#include "entrypoint_utils.h" +#include "entrypoint_utils-inl.h" namespace art { diff --git a/runtime/entrypoints/portable/portable_alloc_entrypoints.cc b/runtime/entrypoints/portable/portable_alloc_entrypoints.cc index 4c05e755a..de95f7dfb 100644 --- a/runtime/entrypoints/portable/portable_alloc_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_alloc_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/portable/portable_cast_entrypoints.cc b/runtime/entrypoints/portable/portable_cast_entrypoints.cc index a553a22df..151b1785c 100644 --- a/runtime/entrypoints/portable/portable_cast_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_cast_entrypoints.cc @@ -15,7 +15,7 @@ */ #include "common_throws.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/object-inl.h" namespace art { diff --git a/runtime/entrypoints/portable/portable_dexcache_entrypoints.cc b/runtime/entrypoints/portable/portable_dexcache_entrypoints.cc index b37ebcf78..9364c46ab 100644 --- a/runtime/entrypoints/portable/portable_dexcache_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_dexcache_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "gc/accounting/card_table-inl.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/portable/portable_field_entrypoints.cc b/runtime/entrypoints/portable/portable_field_entrypoints.cc index f48f1a901..371aca4b2 100644 --- a/runtime/entrypoints/portable/portable_field_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_field_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_field-inl.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/portable/portable_fillarray_entrypoints.cc b/runtime/entrypoints/portable/portable_fillarray_entrypoints.cc index 335a61770..686954bc3 100644 --- a/runtime/entrypoints/portable/portable_fillarray_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_fillarray_entrypoints.cc @@ -15,7 +15,7 @@ */ #include "dex_instruction.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/portable/portable_invoke_entrypoints.cc b/runtime/entrypoints/portable/portable_invoke_entrypoints.cc index eb50ec327..6f9c083c8 100644 --- a/runtime/entrypoints/portable/portable_invoke_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_invoke_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/dex_cache-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/portable/portable_jni_entrypoints.cc b/runtime/entrypoints/portable/portable_jni_entrypoints.cc index 3e7b30a57..0d0f21b79 100644 --- a/runtime/entrypoints/portable/portable_jni_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_jni_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" #include "thread-inl.h" diff --git a/runtime/entrypoints/portable/portable_lock_entrypoints.cc b/runtime/entrypoints/portable/portable_lock_entrypoints.cc index 358ac233d..fcd3e9d49 100644 --- a/runtime/entrypoints/portable/portable_lock_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_lock_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/object-inl.h" namespace art { diff --git a/runtime/entrypoints/portable/portable_thread_entrypoints.cc b/runtime/entrypoints/portable/portable_thread_entrypoints.cc index 9e62e0e9f..23e1c3640 100644 --- a/runtime/entrypoints/portable/portable_thread_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_thread_entrypoints.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method.h" #include "mirror/object-inl.h" #include "verifier/dex_gc_map.h" diff --git a/runtime/entrypoints/portable/portable_throw_entrypoints.cc b/runtime/entrypoints/portable/portable_throw_entrypoints.cc index 189e6b590..9e36a0584 100644 --- a/runtime/entrypoints/portable/portable_throw_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_throw_entrypoints.cc @@ -15,7 +15,7 @@ */ #include "dex_instruction.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/portable/portable_trampoline_entrypoints.cc b/runtime/entrypoints/portable/portable_trampoline_entrypoints.cc index 2da016f79..7ee869b67 100644 --- a/runtime/entrypoints/portable/portable_trampoline_entrypoints.cc +++ b/runtime/entrypoints/portable/portable_trampoline_entrypoints.cc @@ -18,7 +18,7 @@ #define ART_RUNTIME_ENTRYPOINTS_PORTABLE_PORTABLE_ARGUMENT_VISITOR_H_ #include "dex_instruction-inl.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "interpreter/interpreter.h" #include "mirror/art_method-inl.h" #include "mirror/object-inl.h" @@ -431,7 +431,7 @@ extern "C" const void* artPortableResolutionTrampoline(mirror::ArtMethod* called // Expect class to at least be initializing. DCHECK(called->GetDeclaringClass()->IsInitializing()); // Don't want infinite recursion. - DCHECK(code != GetPortableResolutionTrampoline(linker)); + DCHECK(code != linker->GetPortableResolutionTrampoline()); // Set up entry into main method *called_addr = called; } diff --git a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc index dde74de87..1f2713a4f 100644 --- a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc @@ -17,7 +17,7 @@ #include "entrypoints/quick/quick_alloc_entrypoints.h" #include "callee_save_frame.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include "mirror/object_array-inl.h" diff --git a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc index 53c9b97c2..704db0503 100644 --- a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc @@ -15,7 +15,7 @@ */ #include "callee_save_frame.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "class_linker-inl.h" #include "dex_file-inl.h" #include "gc/accounting/card_table-inl.h" diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc index 5cb0f3662..cd1e2470c 100644 --- a/runtime/entrypoints/quick/quick_field_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc @@ -16,7 +16,7 @@ #include "callee_save_frame.h" #include "dex_file-inl.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_field-inl.h" #include "mirror/art_method-inl.h" #include "mirror/class-inl.h" diff --git a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc index 2edcb78be..9a22c154f 100644 --- a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc @@ -38,7 +38,7 @@ extern "C" const void* artInstrumentationMethodEntryFromCode(mirror::ArtMethod* } else { result = instrumentation->GetQuickCodeFor(method); } - DCHECK(result != GetQuickToInterpreterBridgeTrampoline(Runtime::Current()->GetClassLinker())); + DCHECK(result != Runtime::Current()->GetClassLinker()->GetQuickToInterpreterBridgeTrampoline()); bool interpreter_entry = (result == GetQuickToInterpreterBridge()); instrumentation->PushInstrumentationStackFrame(self, method->IsStatic() ? nullptr : this_object, method, lr, interpreter_entry); diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc index 140b0754b..30e86097a 100644 --- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc @@ -15,7 +15,7 @@ */ #include "dex_file-inl.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include "mirror/object.h" diff --git a/runtime/entrypoints/quick/quick_thread_entrypoints.cc b/runtime/entrypoints/quick/quick_thread_entrypoints.cc index 5c48fc74c..118cd7fca 100644 --- a/runtime/entrypoints/quick/quick_thread_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_thread_entrypoints.cc @@ -15,7 +15,7 @@ */ #include "callee_save_frame.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "thread.h" #include "thread_list.h" diff --git a/runtime/entrypoints/quick/quick_throw_entrypoints.cc b/runtime/entrypoints/quick/quick_throw_entrypoints.cc index e6f294ace..4dcb1c8dc 100644 --- a/runtime/entrypoints/quick/quick_throw_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_throw_entrypoints.cc @@ -16,7 +16,7 @@ #include "callee_save_frame.h" #include "common_throws.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "mirror/object-inl.h" #include "object_utils.h" #include "thread.h" diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 2a66f2fe3..f7cb1263f 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -18,7 +18,7 @@ #include "common_throws.h" #include "dex_file-inl.h" #include "dex_instruction-inl.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "gc/accounting/card_table-inl.h" #include "instruction_set.h" #include "interpreter/interpreter.h" diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc index 0e05b62dd..05320ced6 100644 --- a/runtime/instrumentation.cc +++ b/runtime/instrumentation.cc @@ -93,16 +93,17 @@ static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code, method->ClearIsPortableCompiled(); } if (!method->IsResolutionMethod()) { + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); if (quick_code == GetQuickToInterpreterBridge() || - quick_code == GetQuickToInterpreterBridgeTrampoline(Runtime::Current()->GetClassLinker()) || - (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) && + quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() || + (quick_code == class_linker->GetQuickResolutionTrampoline() && Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly() && !method->IsNative() && !method->IsProxyMethod())) { if (kIsDebugBuild) { if (quick_code == GetQuickToInterpreterBridge()) { DCHECK(portable_code == GetPortableToInterpreterBridge()); - } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) { - DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())); + } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) { + DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline()); } } DCHECK(!method->IsNative()) << PrettyMethod(method); @@ -133,8 +134,8 @@ void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) { new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code); new_quick_code = class_linker->GetQuickOatCodeFor(method); } else { - new_portable_code = GetPortableResolutionTrampoline(class_linker); - new_quick_code = GetQuickResolutionTrampoline(class_linker); + new_portable_code = class_linker->GetPortableResolutionTrampoline(); + new_quick_code = class_linker->GetQuickResolutionTrampoline(); } } else { // !uninstall if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) && @@ -152,11 +153,11 @@ void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) { } else { new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code); new_quick_code = class_linker->GetQuickOatCodeFor(method); - DCHECK(new_quick_code != GetQuickToInterpreterBridgeTrampoline(class_linker)); + DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline()); } } else { - new_portable_code = GetPortableResolutionTrampoline(class_linker); - new_quick_code = GetQuickResolutionTrampoline(class_linker); + new_portable_code = class_linker->GetPortableResolutionTrampoline(); + new_quick_code = class_linker->GetQuickResolutionTrampoline(); } } } @@ -592,22 +593,25 @@ void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* q new_portable_code = GetPortableToInterpreterBridge(); new_quick_code = GetQuickToInterpreterBridge(); new_have_portable_code = false; - } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) || - quick_code == GetQuickToInterpreterBridgeTrampoline(Runtime::Current()->GetClassLinker()) || - quick_code == GetQuickToInterpreterBridge()) { - DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) || - (portable_code == GetPortableToInterpreterBridge())); - new_portable_code = portable_code; - new_quick_code = quick_code; - new_have_portable_code = have_portable_code; - } else if (entry_exit_stubs_installed_) { - new_quick_code = GetQuickInstrumentationEntryPoint(); - new_portable_code = GetPortableToInterpreterBridge(); - new_have_portable_code = false; } else { - new_portable_code = portable_code; - new_quick_code = quick_code; - new_have_portable_code = have_portable_code; + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + if (quick_code == class_linker->GetQuickResolutionTrampoline() || + quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() || + quick_code == GetQuickToInterpreterBridge()) { + DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) || + (portable_code == GetPortableToInterpreterBridge())); + new_portable_code = portable_code; + new_quick_code = quick_code; + new_have_portable_code = have_portable_code; + } else if (entry_exit_stubs_installed_) { + new_quick_code = GetQuickInstrumentationEntryPoint(); + new_portable_code = GetPortableToInterpreterBridge(); + new_have_portable_code = false; + } else { + new_portable_code = portable_code; + new_quick_code = quick_code; + new_have_portable_code = have_portable_code; + } } } UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code); @@ -661,8 +665,9 @@ void Instrumentation::Undeoptimize(mirror::ArtMethod* method) { ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); if (method->IsStatic() && !method->IsConstructor() && !method->GetDeclaringClass()->IsInitialized()) { - UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker), - GetPortableResolutionTrampoline(class_linker), false); + // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline. + UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(), + class_linker->GetPortableResolutionTrampoline(), false); } else { bool have_portable_code = false; const void* quick_code = class_linker->GetQuickOatCodeFor(method); @@ -742,8 +747,9 @@ const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const { if (LIKELY(!instrumentation_stubs_installed_)) { const void* code = method->GetEntryPointFromQuickCompiledCode(); DCHECK(code != nullptr); - if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker())) && - LIKELY(code != GetQuickToInterpreterBridgeTrampoline(runtime->GetClassLinker())) && + ClassLinker* class_linker = runtime->GetClassLinker(); + if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) && + LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) && LIKELY(code != GetQuickToInterpreterBridge())) { return code; } diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h index 527733040..db42eb06f 100644 --- a/runtime/interpreter/interpreter_common.h +++ b/runtime/interpreter/interpreter_common.h @@ -27,7 +27,7 @@ #include "dex_file-inl.h" #include "dex_instruction-inl.h" #include "dex_instruction.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "gc/accounting/card_table-inl.h" #include "handle_scope-inl.h" #include "nth_caller_visitor.h" diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h index 43bdf49c7..f3c8250db 100644 --- a/runtime/mirror/array-inl.h +++ b/runtime/mirror/array-inl.h @@ -27,6 +27,11 @@ namespace art { namespace mirror { +inline uint32_t Array::ClassSize() { + uint32_t vtable_entries = Object::kVTableLength; + return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0); +} + template inline size_t Array::SizeOf() { // This is safe from overflow because the array was already allocated, so we know it's sane. diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h index 25a4535f1..6588b57b8 100644 --- a/runtime/mirror/array.h +++ b/runtime/mirror/array.h @@ -30,6 +30,9 @@ namespace mirror { class MANAGED Array : public Object { public: + // The size of a java.lang.Class representing an array. + static uint32_t ClassSize(); + // Allocates an array with the given properties, if fill_usable is true the array will be of at // least component_count size, however, if there's usable space at the end of the allocation the // array will fill it. diff --git a/runtime/mirror/art_field-inl.h b/runtime/mirror/art_field-inl.h index 686fded40..90247edc7 100644 --- a/runtime/mirror/art_field-inl.h +++ b/runtime/mirror/art_field-inl.h @@ -29,6 +29,11 @@ namespace art { namespace mirror { +inline uint32_t ArtField::ClassSize() { + uint32_t vtable_entries = Object::kVTableLength + 6; + return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0); +} + inline Class* ArtField::GetDeclaringClass() { Class* result = GetFieldObject(OFFSET_OF_OBJECT_MEMBER(ArtField, declaring_class_)); DCHECK(result != NULL); diff --git a/runtime/mirror/art_field.h b/runtime/mirror/art_field.h index 502cec735..741c6eb8a 100644 --- a/runtime/mirror/art_field.h +++ b/runtime/mirror/art_field.h @@ -19,22 +19,33 @@ #include -#include "class.h" #include "modifiers.h" #include "object.h" #include "object_callbacks.h" +#include "primitive.h" #include "read_barrier.h" namespace art { struct ArtFieldOffsets; +class DexFile; class ScopedObjectAccessAlreadyRunnable; namespace mirror { +class DexCache; + // C++ mirror of java.lang.reflect.ArtField -class MANAGED ArtField : public Object { +class MANAGED ArtField FINAL : public Object { public: + // Size of java.lang.reflect.ArtField.class. + static uint32_t ClassSize(); + + // Size of an instance of java.lang.reflect.ArtField not including its value array. + static constexpr uint32_t InstanceSize() { + return sizeof(ArtField); + } + static ArtField* FromReflectedField(const ScopedObjectAccessAlreadyRunnable& soa, jobject jlr_field) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -143,11 +154,17 @@ class MANAGED ArtField : public Object { SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + Primitive::Type GetTypeAsPrimitiveType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + bool IsPrimitiveType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + size_t FieldSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); private: @@ -169,11 +186,6 @@ class MANAGED ArtField : public Object { DISALLOW_IMPLICIT_CONSTRUCTORS(ArtField); }; -class MANAGED ArtFieldClass : public Class { - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(ArtFieldClass); -}; - } // namespace mirror } // namespace art diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h index 8fcacc2a1..a5b5df613 100644 --- a/runtime/mirror/art_method-inl.h +++ b/runtime/mirror/art_method-inl.h @@ -21,14 +21,29 @@ #include "dex_file.h" #include "entrypoints/entrypoint_utils.h" +#include "object-inl.h" #include "object_array.h" +#include "object_utils.h" #include "oat.h" #include "quick/quick_method_frame_info.h" +#include "read_barrier-inl.h" #include "runtime-inl.h" namespace art { namespace mirror { +inline uint32_t ArtMethod::ClassSize() { + uint32_t vtable_entries = Object::kVTableLength + 8; + return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0); +} + +template +inline Class* ArtMethod::GetJavaLangReflectArtMethod() { + DCHECK(java_lang_reflect_ArtMethod_ != nullptr); + return ReadBarrier::BarrierForRoot( + &java_lang_reflect_ArtMethod_); +} + inline Class* ArtMethod::GetDeclaringClass() { Class* result = GetFieldObject(OFFSET_OF_OBJECT_MEMBER(ArtMethod, declaring_class_)); DCHECK(result != NULL) << this; @@ -122,8 +137,8 @@ inline void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) { return; } ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - if (code == GetQuickResolutionTrampoline(class_linker) || - code == GetQuickToInterpreterBridgeTrampoline(class_linker)) { + if (code == class_linker->GetQuickResolutionTrampoline() || + code == class_linker->GetQuickToInterpreterBridgeTrampoline()) { return; } DCHECK(IsWithinQuickCode(pc)) @@ -162,7 +177,7 @@ inline const void* ArtMethod::GetQuickOatEntryPoint() { // On failure, instead of nullptr we get the quick-generic-jni-trampoline for native method // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline) // for non-native methods. - DCHECK(entry_point != GetQuickToInterpreterBridgeTrampoline(runtime->GetClassLinker())); + DCHECK(entry_point != runtime->GetClassLinker()->GetQuickToInterpreterBridgeTrampoline()); if (UNLIKELY(entry_point == GetQuickToInterpreterBridge()) || UNLIKELY(entry_point == runtime->GetClassLinker()->GetQuickGenericJniTrampoline())) { return nullptr; @@ -289,7 +304,7 @@ inline QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() { // On failure, instead of nullptr we get the quick-generic-jni-trampoline for native method // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline) // for non-native methods. And we really shouldn't see a failure for non-native methods here. - DCHECK(entry_point != GetQuickToInterpreterBridgeTrampoline(runtime->GetClassLinker())); + DCHECK(entry_point != runtime->GetClassLinker()->GetQuickToInterpreterBridgeTrampoline()); CHECK(entry_point != GetQuickToInterpreterBridge()); if (UNLIKELY(entry_point == runtime->GetClassLinker()->GetQuickGenericJniTrampoline())) { diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc index 4821e294f..1fa680d14 100644 --- a/runtime/mirror/art_method.cc +++ b/runtime/mirror/art_method.cc @@ -355,14 +355,6 @@ void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* self->PopManagedStackFragment(fragment); } -bool ArtMethod::IsRegistered() { - void* native_method = - GetFieldPtr(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_jni_)); - CHECK(native_method != nullptr); - void* jni_stub = GetJniDlsymLookupStub(); - return native_method != jni_stub; -} - void ArtMethod::RegisterNative(Thread* self, const void* native_method, bool is_fast) { DCHECK(Thread::Current() == self); CHECK(IsNative()) << PrettyMethod(this); diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h index a55c48b87..081bee1d8 100644 --- a/runtime/mirror/art_method.h +++ b/runtime/mirror/art_method.h @@ -17,21 +17,19 @@ #ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_ #define ART_RUNTIME_MIRROR_ART_METHOD_H_ -#include "class.h" #include "dex_file.h" #include "invoke_type.h" #include "modifiers.h" #include "object.h" #include "object_callbacks.h" #include "quick/quick_method_frame_info.h" -#include "read_barrier.h" +#include "read_barrier_option.h" namespace art { struct ArtMethodOffsets; struct ConstructorMethodOffsets; union JValue; -struct MethodClassOffsets; class MethodHelper; class ScopedObjectAccessAlreadyRunnable; class StringPiece; @@ -39,14 +37,20 @@ class ShadowFrame; namespace mirror { -class StaticStorageBase; - typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result); -// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor -class MANAGED ArtMethod : public Object { +// C++ mirror of java.lang.reflect.ArtMethod. +class MANAGED ArtMethod FINAL : public Object { public: + // Size of java.lang.reflect.ArtMethod.class. + static uint32_t ClassSize(); + + // Size of an instance of java.lang.reflect.ArtMethod not including its value array. + static constexpr uint32_t InstanceSize() { + return sizeof(ArtMethod); + } + static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa, jobject jlr_method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -357,8 +361,6 @@ class MANAGED ArtMethod : public Object { return kPointerSize; } - bool IsRegistered() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - void RegisterNative(Thread* self, const void* native_method, bool is_fast) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -411,11 +413,7 @@ class MANAGED ArtMethod : public Object { static void SetClass(Class* java_lang_reflect_ArtMethod); template - static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK(java_lang_reflect_ArtMethod_ != nullptr); - return ReadBarrier::BarrierForRoot( - &java_lang_reflect_ArtMethod_); - } + static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); static void ResetClass(); @@ -423,27 +421,45 @@ class MANAGED ArtMethod : public Object { SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetDeclaringClassDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetShorty() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { uint32_t unused_length; return GetShorty(&unused_length); } + const char* GetShorty(uint32_t* out_length) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const Signature GetSignature() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile::CodeItem* GetCodeItem() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + bool IsResolvedTypeIdx(uint16_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile::ProtoId& GetPrototype() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile::TypeList* GetParameterTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetDeclaringClassSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + uint16_t GetClassDefIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile::ClassDef& GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetReturnTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + mirror::ClassLoader* GetClassLoader() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + ArtMethod* GetInterfaceMethodIfProxy() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); protected: @@ -505,11 +521,6 @@ class MANAGED ArtMethod : public Object { DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod); }; -class MANAGED ArtMethodClass : public Class { - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethodClass); -}; - } // namespace mirror } // namespace art diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h index 2daa6e49a..349d4a34d 100644 --- a/runtime/mirror/class-inl.h +++ b/runtime/mirror/class-inl.h @@ -19,8 +19,8 @@ #include "class.h" -#include "art_field.h" -#include "art_method.h" +#include "art_field-inl.h" +#include "art_method-inl.h" #include "class_linker-inl.h" #include "class_loader.h" #include "common_throws.h" @@ -29,6 +29,7 @@ #include "gc/heap-inl.h" #include "iftable.h" #include "object_array-inl.h" +#include "read_barrier-inl.h" #include "runtime.h" #include "string.h" @@ -148,6 +149,23 @@ inline void Class::SetImTable(ObjectArray* new_imtable) { SetFieldObject(OFFSET_OF_OBJECT_MEMBER(Class, imtable_), new_imtable); } +inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i) { + uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry); + return GetFieldObject(MemberOffset(offset)); +} + +inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) { + uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry); + SetFieldObject(MemberOffset(offset), method); + CHECK(method == GetImTable()->Get(i)); +} + +inline void Class::SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) { + uint32_t offset = EmbeddedVTableOffset().Uint32Value() + i * sizeof(VTableEntry); + SetFieldObject(MemberOffset(offset), method); + CHECK(method == GetVTableDuringLinking()->Get(i)); +} + inline bool Class::Implements(Class* klass) { DCHECK(klass != NULL); DCHECK(klass->IsInterface()) << PrettyClass(this); @@ -373,7 +391,8 @@ inline ObjectArray* Class::GetSFields() { inline void Class::SetSFields(ObjectArray* new_sfields) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK(NULL == GetFieldObject>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_))); + DCHECK((IsRetired() && new_sfields == nullptr) || + (NULL == GetFieldObject>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_)))); SetFieldObject(OFFSET_OF_OBJECT_MEMBER(Class, sfields_), new_sfields); } @@ -435,9 +454,9 @@ inline void Class::SetVerifyErrorClass(Class* klass) { template inline uint32_t Class::GetAccessFlags() { - // Check class is loaded or this is java.lang.String that has a + // Check class is loaded/retired or this is java.lang.String that has a // circularity issue during loading the names of its members - DCHECK(IsLoaded() || + DCHECK(IsIdxLoaded() || IsRetired() || IsErroneous(kVerifyFlags & ~kVerifyThis)>() || this == String::GetJavaLangString() || this == ArtField::GetJavaLangReflectArtField() || @@ -503,12 +522,63 @@ inline Object* Class::AllocNonMovableObject(Thread* self) { return Alloc(self, Runtime::Current()->GetHeap()->GetCurrentNonMovingAllocator()); } +inline uint32_t Class::ComputeClassSize(bool has_embedded_tables, + uint32_t num_vtable_entries, + uint32_t num_32bit_static_fields, + uint32_t num_64bit_static_fields, + uint32_t num_ref_static_fields) { + // Space used by java.lang.Class and its instance fields. + uint32_t size = sizeof(Class); + // Space used by embedded tables. + if (has_embedded_tables) { + uint32_t embedded_imt_size = kImtSize * sizeof(ImTableEntry); + uint32_t embedded_vtable_size = num_vtable_entries * sizeof(VTableEntry); + size += embedded_imt_size + embedded_vtable_size; + } + // Space used by reference statics. + size += num_ref_static_fields * sizeof(HeapReference); + // Possible pad for alignment. + if (((size & 7) != 0) && (num_64bit_static_fields > 0) && (num_32bit_static_fields == 0)) { + size += sizeof(uint32_t); + } + // Space used for primitive static fields. + size += (num_32bit_static_fields * sizeof(uint32_t)) + + (num_64bit_static_fields * sizeof(uint64_t)); + return size; +} + template inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) { // Visit the static fields first so that we don't overwrite the SFields / IFields instance // fields. - VisitStaticFieldsReferences(this, visitor); VisitInstanceFieldsReferences(klass, visitor); + if (!IsTemp()) { + // Temp classes don't ever populate imt/vtable or static fields and they are not even + // allocated with the right size for those. + VisitStaticFieldsReferences(this, visitor); + if (ShouldHaveEmbeddedImtAndVTable()) { + VisitEmbeddedImtAndVTable(visitor); + } + } +} + +template +inline void Class::VisitEmbeddedImtAndVTable(const Visitor& visitor) { + uint32_t pos = sizeof(mirror::Class); + + size_t count = kImtSize; + for (size_t i = 0; i < count; ++i) { + MemberOffset offset = MemberOffset(pos); + visitor(this, offset, true); + pos += sizeof(ImTableEntry); + } + + count = ((GetVTable() != NULL) ? GetVTable()->GetLength() : 0); + for (size_t i = 0; i < count; ++i) { + MemberOffset offset = MemberOffset(pos); + visitor(this, offset, true); + pos += sizeof(VTableEntry); + } } template @@ -554,6 +624,36 @@ inline void Class::AssertInitializedOrInitializingInThread(Thread* self) { } } +inline ObjectArray* Class::GetInterfaces() { + CHECK(IsProxyClass()); + // First static field. + DCHECK(GetSFields()->Get(0)->IsArtField()); + DCHECK_STREQ(GetSFields()->Get(0)->GetName(), "interfaces"); + MemberOffset field_offset = GetSFields()->Get(0)->GetOffset(); + return GetFieldObject>(field_offset); +} + +inline ObjectArray>* Class::GetThrows() { + CHECK(IsProxyClass()); + // Second static field. + DCHECK(GetSFields()->Get(1)->IsArtField()); + DCHECK_STREQ(GetSFields()->Get(1)->GetName(), "throws"); + MemberOffset field_offset = GetSFields()->Get(1)->GetOffset(); + return GetFieldObject>>(field_offset); +} + +inline void Class::InitializeClassVisitor::operator()( + mirror::Object* obj, size_t usable_size) const { + DCHECK_LE(class_size_, usable_size); + // Avoid AsClass as object is not yet in live bitmap or allocation stack. + mirror::Class* klass = down_cast(obj); + // DCHECK(klass->IsClass()); + klass->SetClassSize(class_size_); + klass->SetPrimitiveType(Primitive::kPrimNot); // Default to not being primitive. + klass->SetDexClassDefIndex(DexFile::kDexNoIndex16); // Default to no valid class def index. + klass->SetDexTypeIndex(DexFile::kDexNoIndex16); // Default to no valid type index. +} + } // namespace mirror } // namespace art diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc index c6472c66d..371e984b0 100644 --- a/runtime/mirror/class.cc +++ b/runtime/mirror/class.cc @@ -63,7 +63,8 @@ void Class::SetStatus(Status new_status, Thread* self) { ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); bool class_linker_initialized = class_linker != nullptr && class_linker->IsInitialized(); if (LIKELY(class_linker_initialized)) { - if (UNLIKELY(new_status <= old_status && new_status != kStatusError)) { + if (UNLIKELY(new_status <= old_status && new_status != kStatusError && + new_status != kStatusRetired)) { LOG(FATAL) << "Unexpected change back of class status for " << PrettyClass(this) << " " << old_status << " -> " << new_status; } @@ -113,11 +114,27 @@ void Class::SetStatus(Status new_status, Thread* self) { } else { SetField32Volatile(OFFSET_OF_OBJECT_MEMBER(Class, status_), new_status); } - // Classes that are being resolved or initialized need to notify waiters that the class status - // changed. See ClassLinker::EnsureResolved and ClassLinker::WaitForInitializeClass. - if ((old_status >= kStatusResolved || new_status >= kStatusResolved) && - class_linker_initialized) { - NotifyAll(self); + + if (!class_linker_initialized) { + // When the class linker is being initialized its single threaded and by definition there can be + // no waiters. During initialization classes may appear temporary but won't be retired as their + // size was statically computed. + } else { + // Classes that are being resolved or initialized need to notify waiters that the class status + // changed. See ClassLinker::EnsureResolved and ClassLinker::WaitForInitializeClass. + if (IsTemp()) { + // Class is a temporary one, ensure that waiters for resolution get notified of retirement + // so that they can grab the new version of the class from the class linker's table. + CHECK_LT(new_status, kStatusResolved) << PrettyDescriptor(this); + if (new_status == kStatusRetired || new_status == kStatusError) { + NotifyAll(self); + } + } else { + CHECK_NE(new_status, kStatusRetired); + if (old_status >= kStatusResolved || new_status >= kStatusResolved) { + NotifyAll(self); + } + } } } @@ -217,35 +234,39 @@ void Class::DumpClass(std::ostream& os, int flags) { os << StringPrintf(" %2zd: %s (cl=%p)\n", i, PrettyClass(interface).c_str(), cl); } } - // After this point, this may have moved due to GetDirectInterface. - os << " vtable (" << h_this->NumVirtualMethods() << " entries, " - << (h_super.Get() != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n"; - for (size_t i = 0; i < NumVirtualMethods(); ++i) { - os << StringPrintf(" %2zd: %s\n", i, - PrettyMethod(h_this->GetVirtualMethodDuringLinking(i)).c_str()); - } - os << " direct methods (" << h_this->NumDirectMethods() << " entries):\n"; - for (size_t i = 0; i < h_this->NumDirectMethods(); ++i) { - os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(h_this->GetDirectMethod(i)).c_str()); - } - if (h_this->NumStaticFields() > 0) { - os << " static fields (" << h_this->NumStaticFields() << " entries):\n"; - if (h_this->IsResolved() || h_this->IsErroneous()) { - for (size_t i = 0; i < h_this->NumStaticFields(); ++i) { - os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetStaticField(i)).c_str()); + if (!IsLoaded()) { + os << " class not yet loaded"; + } else { + // After this point, this may have moved due to GetDirectInterface. + os << " vtable (" << h_this->NumVirtualMethods() << " entries, " + << (h_super.Get() != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n"; + for (size_t i = 0; i < NumVirtualMethods(); ++i) { + os << StringPrintf(" %2zd: %s\n", i, + PrettyMethod(h_this->GetVirtualMethodDuringLinking(i)).c_str()); + } + os << " direct methods (" << h_this->NumDirectMethods() << " entries):\n"; + for (size_t i = 0; i < h_this->NumDirectMethods(); ++i) { + os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(h_this->GetDirectMethod(i)).c_str()); + } + if (h_this->NumStaticFields() > 0) { + os << " static fields (" << h_this->NumStaticFields() << " entries):\n"; + if (h_this->IsResolved() || h_this->IsErroneous()) { + for (size_t i = 0; i < h_this->NumStaticFields(); ++i) { + os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetStaticField(i)).c_str()); + } + } else { + os << " "; } - } else { - os << " "; } - } - if (h_this->NumInstanceFields() > 0) { - os << " instance fields (" << h_this->NumInstanceFields() << " entries):\n"; - if (h_this->IsResolved() || h_this->IsErroneous()) { - for (size_t i = 0; i < h_this->NumInstanceFields(); ++i) { - os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetInstanceField(i)).c_str()); + if (h_this->NumInstanceFields() > 0) { + os << " instance fields (" << h_this->NumInstanceFields() << " entries):\n"; + if (h_this->IsResolved() || h_this->IsErroneous()) { + for (size_t i = 0; i < h_this->NumInstanceFields(); ++i) { + os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetInstanceField(i)).c_str()); + } + } else { + os << " "; } - } else { - os << " "; } } } @@ -721,9 +742,7 @@ uint32_t Class::NumDirectInterfaces() { } else if (IsArrayClass()) { return 2; } else if (IsProxyClass()) { - mirror::SynthesizedProxyClass* proxy_class= - reinterpret_cast(this); - mirror::ObjectArray* interfaces = proxy_class->GetInterfaces(); + mirror::ObjectArray* interfaces = GetInterfaces(); return interfaces != nullptr ? interfaces->GetLength() : 0; } else { const DexFile::TypeList* interfaces = GetInterfaceTypeList(); @@ -753,9 +772,7 @@ mirror::Class* Class::GetDirectInterface(Thread* self, Handle kla return class_linker->FindSystemClass(self, "Ljava/io/Serializable;"); } } else if (klass->IsProxyClass()) { - mirror::SynthesizedProxyClass* proxy_class = - reinterpret_cast(klass.Get()); - mirror::ObjectArray* interfaces = proxy_class->GetInterfaces(); + mirror::ObjectArray* interfaces = klass.Get()->GetInterfaces(); DCHECK(interfaces != nullptr); return interfaces->Get(idx); } else { @@ -798,5 +815,49 @@ const DexFile::TypeList* Class::GetInterfaceTypeList() { return GetDexFile().GetInterfacesList(*class_def); } +void Class::PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ObjectArray* table = GetImTable(); + if (table != nullptr) { + for (uint32_t i = 0; i < kImtSize; i++) { + SetEmbeddedImTableEntry(i, table->Get(i)); + } + } + + table = GetVTableDuringLinking(); + CHECK(table != nullptr); + for (int32_t i = 0; i < table->GetLength(); i++) { + SetEmbeddedVTableEntry(i, table->Get(i)); + } +} + +Class* Class::CopyOf(Thread* self, int32_t new_length) { + DCHECK_GE(new_length, static_cast(sizeof(Class))); + // We may get copied by a compacting GC. + StackHandleScope<1> hs(self); + Handle h_this(hs.NewHandle(this)); + gc::Heap* heap = Runtime::Current()->GetHeap(); + InitializeClassVisitor visitor(new_length); + + mirror::Object* new_class = + kMovingClasses ? heap->AllocObject(self, java_lang_Class_, new_length, visitor) + : heap->AllocNonMovableObject(self, java_lang_Class_, new_length, visitor); + if (UNLIKELY(new_class == nullptr)) { + CHECK(self->IsExceptionPending()); // Expect an OOME. + return NULL; + } + + mirror::Class* new_class_obj = new_class->AsClass(); + memcpy(new_class_obj, h_this.Get(), sizeof(Class)); + + new_class_obj->SetStatus(kStatusResolving, self); + new_class_obj->PopulateEmbeddedImtAndVTable(); + // Correct some fields. + new_class_obj->SetLockWord(LockWord(), false); + new_class_obj->SetClassSize(new_length); + + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(new_class_obj); + return new_class_obj; +} + } // namespace mirror } // namespace art diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h index 7ac53ea5f..0f42044b3 100644 --- a/runtime/mirror/class.h +++ b/runtime/mirror/class.h @@ -22,6 +22,7 @@ #include "invoke_type.h" #include "modifiers.h" #include "object.h" +#include "object_array.h" #include "object_callbacks.h" #include "primitive.h" #include "read_barrier.h" @@ -62,7 +63,6 @@ namespace art { -struct ClassClassOffsets; struct ClassOffsets; class Signature; class StringPiece; @@ -70,13 +70,29 @@ class StringPiece; namespace mirror { class ArtField; +class ArtMethod; class ClassLoader; class DexCache; class IfTable; // C++ mirror of java.lang.Class -class MANAGED Class : public Object { +class MANAGED Class FINAL : public Object { public: + // Interface method table size. Increasing this value reduces the chance of two interface methods + // colliding in the interface method table but increases the size of classes that implement + // (non-marker) interfaces. + static constexpr size_t kImtSize = 64; + + // imtable entry embedded in class object. + struct MANAGED ImTableEntry { + HeapReference method; + }; + + // vtable entry embedded in class object. + struct MANAGED VTableEntry { + HeapReference method; + }; + // Class Status // // kStatusNotReady: If a Class cannot be found in the class table by @@ -95,6 +111,11 @@ class MANAGED Class : public Object { // using ResolveClass to initialize the super_class_ and ensuring the // interfaces are resolved. // + // kStatusResolving: Class is just cloned with the right size from + // temporary class that's acting as a placeholder for linking. The old + // class will be retired. New class is set to this status first before + // moving on to being resolved. + // // kStatusResolved: Still holding the lock on Class, the ClassLinker // shows linking is complete and fields of the Class populated by making // it kStatusResolved. Java allows circularities of the form where a super @@ -109,18 +130,20 @@ class MANAGED Class : public Object { // // TODO: Explain the other states enum Status { + kStatusRetired = -2, kStatusError = -1, kStatusNotReady = 0, kStatusIdx = 1, // Loaded, DEX idx in super_class_type_idx_ and interfaces_type_idx_. kStatusLoaded = 2, // DEX idx values resolved. - kStatusResolved = 3, // Part of linking. - kStatusVerifying = 4, // In the process of being verified. - kStatusRetryVerificationAtRuntime = 5, // Compile time verification failed, retry at runtime. - kStatusVerifyingAtRuntime = 6, // Retrying verification at runtime. - kStatusVerified = 7, // Logically part of linking; done pre-init. - kStatusInitializing = 8, // Class init in progress. - kStatusInitialized = 9, // Ready to go. - kStatusMax = 10, + kStatusResolving = 3, // Just cloned from temporary class object. + kStatusResolved = 4, // Part of linking. + kStatusVerifying = 5, // In the process of being verified. + kStatusRetryVerificationAtRuntime = 6, // Compile time verification failed, retry at runtime. + kStatusVerifyingAtRuntime = 7, // Retrying verification at runtime. + kStatusVerified = 8, // Logically part of linking; done pre-init. + kStatusInitializing = 9, // Class init in progress. + kStatusInitialized = 10, // Ready to go. + kStatusMax = 11, }; template @@ -136,6 +159,12 @@ class MANAGED Class : public Object { return OFFSET_OF_OBJECT_MEMBER(Class, status_); } + // Returns true if the class has been retired. + template + bool IsRetired() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + return GetStatus() == kStatusRetired; + } + // Returns true if the class has failed to link. template bool IsErroneous() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -274,6 +303,13 @@ class MANAGED Class : public Object { } } + // Returns true if this class is the placeholder and should retire and + // be replaced with a class with the right size for embedded imt/vtable. + bool IsTemp() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + Status s = GetStatus(); + return s < Status::kStatusResolving && ShouldHaveEmbeddedImtAndVTable(); + } + String* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Returns the cached name. void SetName(String* name) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Sets the cached name. // Computes the name, then sets the cached value. @@ -451,6 +487,25 @@ class MANAGED Class : public Object { void SetClassSize(uint32_t new_class_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // Compute how many bytes would be used a class with the given elements. + static uint32_t ComputeClassSize(bool has_embedded_tables, + uint32_t num_vtable_entries, + uint32_t num_32bit_static_fields, + uint32_t num_64bit_static_fields, + uint32_t num_ref_static_fields); + + // The size of java.lang.Class.class. + static uint32_t ClassClassSize() { + // The number of vtable entries in java.lang.Class. + uint32_t vtable_entries = Object::kVTableLength + 64; + return ComputeClassSize(true, vtable_entries, 0, 1, 0); + } + + // The size of a java.lang.Class representing a primitive such as int.class. + static uint32_t PrimitiveClassSize() { + return ComputeClassSize(false, 0, 0, 0, 0); + } + template uint32_t GetObjectSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -623,8 +678,6 @@ class MANAGED Class : public Object { return OFFSET_OF_OBJECT_MEMBER(Class, vtable_); } - ObjectArray* GetImTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - void SetImTable(ObjectArray* new_imtable) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -632,6 +685,26 @@ class MANAGED Class : public Object { return OFFSET_OF_OBJECT_MEMBER(Class, imtable_); } + static MemberOffset EmbeddedImTableOffset() { + return MemberOffset(sizeof(Class)); + } + + static MemberOffset EmbeddedVTableOffset() { + return MemberOffset(sizeof(Class) + kImtSize * sizeof(mirror::Class::ImTableEntry)); + } + + bool ShouldHaveEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + return IsInstantiable(); + } + + ArtMethod* GetEmbeddedImTableEntry(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + void SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + void SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + void PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // Given a method implemented by this class but potentially from a super class, return the // specific implementation method for this class. ArtMethod* FindVirtualMethodForVirtual(ArtMethod* method) @@ -739,11 +812,6 @@ class MANAGED Class : public Object { void SetReferenceInstanceOffsets(uint32_t new_reference_offsets) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - // Beginning of static field data - static MemberOffset FieldsOffset() { - return OFFSET_OF_OBJECT_MEMBER(Class, fields_); - } - // Returns the number of static fields containing reference types. uint32_t NumReferenceStaticFields() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { DCHECK(IsResolved() || IsErroneous()); @@ -751,7 +819,7 @@ class MANAGED Class : public Object { } uint32_t NumReferenceStaticFieldsDuringLinking() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK(IsLoaded() || IsErroneous()); + DCHECK(IsLoaded() || IsErroneous() || IsRetired()); return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_reference_static_fields_)); } @@ -865,25 +933,65 @@ class MANAGED Class : public Object { template void VisitReferences(mirror::Class* klass, const Visitor& visitor) - NO_THREAD_SAFETY_ANALYSIS; + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + // Visit references within the embedded tables of the class. + // TODO: remove NO_THREAD_SAFETY_ANALYSIS when annotalysis handles visitors better. + template + void VisitEmbeddedImtAndVTable(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS; std::string GetDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + bool DescriptorEquals(const char* match) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + std::string GetArrayDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile::ClassDef* GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + uint32_t NumDirectInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + uint16_t GetDirectInterfaceTypeIdx(uint32_t idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + static mirror::Class* GetDirectInterface(Thread* self, Handle klass, uint32_t idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const char* GetSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + std::string GetLocation() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile& GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + const DexFile::TypeList* GetInterfaceTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Asserts we are initialized or initializing in the given thread. void AssertInitializedOrInitializingInThread(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + Class* CopyOf(Thread* self, int32_t new_length) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + // For proxy class only. + ObjectArray* GetInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + // For proxy class only. + ObjectArray>* GetThrows() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + // Used to initialize a class in the allocation code path to ensure it is guarded by a StoreStore + // fence. + class InitializeClassVisitor { + public: + explicit InitializeClassVisitor(uint32_t class_size) : class_size_(class_size) { + } + + void operator()(mirror::Object* obj, size_t usable_size) const + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + + private: + const uint32_t class_size_; + + DISALLOW_COPY_AND_ASSIGN(InitializeClassVisitor); + }; + private: void SetVerifyErrorClass(Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -902,6 +1010,8 @@ class MANAGED Class : public Object { void CheckObjectAlloc() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + ObjectArray* GetImTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // defining class loader, or NULL for the "bootstrap" system loader HeapReference class_loader_; @@ -1012,7 +1122,12 @@ class MANAGED Class : public Object { // values are kept in a table in gDvm. // InitiatingLoaderList initiating_loader_list_; - // Location of first static field. + // The following data exist in real class objects. + // Embedded Imtable, for class object that's not an interface, fixed size. + ImTableEntry embedded_imtable_[0]; + // Embedded Vtable, for class object that's not an interface, variable size. + VTableEntry embedded_vtable_[0]; + // Static fields, variable size. uint32_t fields_[0]; // java.lang.Class @@ -1024,14 +1139,6 @@ class MANAGED Class : public Object { std::ostream& operator<<(std::ostream& os, const Class::Status& rhs); -class MANAGED ClassClass : public Class { - private: - int32_t pad_; - int64_t serialVersionUID_; - friend struct art::ClassClassOffsets; // for verifying offset information - DISALLOW_IMPLICIT_CONSTRUCTORS(ClassClass); -}; - } // namespace mirror } // namespace art diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h index 74dae386f..f3594e40d 100644 --- a/runtime/mirror/class_loader.h +++ b/runtime/mirror/class_loader.h @@ -27,6 +27,12 @@ namespace mirror { // C++ mirror of java.lang.ClassLoader class MANAGED ClassLoader : public Object { + public: + // Size of an instance of java.lang.ClassLoader. + static constexpr uint32_t InstanceSize() { + return sizeof(ClassLoader); + } + private: // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses". HeapReference packages_; diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h index 7e40f643c..08cff999b 100644 --- a/runtime/mirror/dex_cache-inl.h +++ b/runtime/mirror/dex_cache-inl.h @@ -24,6 +24,11 @@ namespace art { namespace mirror { +inline uint32_t DexCache::ClassSize() { + uint32_t vtable_entries = Object::kVTableLength + 1; + return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0); +} + inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ArtMethod* method = GetResolvedMethods()->Get(method_idx); diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index 65a50260f..bfd603a18 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -18,10 +18,8 @@ #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ #include "art_method.h" -#include "class.h" #include "object.h" #include "object_array.h" -#include "string.h" namespace art { @@ -33,15 +31,21 @@ union JValue; namespace mirror { class ArtField; +class ArtMethod; class Class; +class String; -class MANAGED DexCacheClass : public Class { - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(DexCacheClass); -}; - -class MANAGED DexCache : public Object { +// C++ mirror of java.lang.DexCache. +class MANAGED DexCache FINAL : public Object { public: + // Size of java.lang.DexCache.class. + static uint32_t ClassSize(); + + // Size of an instance of java.lang.DexCache not including referenced values. + static constexpr uint32_t InstanceSize() { + return sizeof(DexCache); + } + void Init(const DexFile* dex_file, String* location, ObjectArray* strings, diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h index ad312ed22..5feb602a9 100644 --- a/runtime/mirror/iftable.h +++ b/runtime/mirror/iftable.h @@ -23,7 +23,7 @@ namespace art { namespace mirror { -class MANAGED IfTable : public ObjectArray { +class MANAGED IfTable FINAL : public ObjectArray { public: Class* GetInterface(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Class* interface = Get((i * kMax) + kInterface)->AsClass(); diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h index 089ef5731..d9f442c3e 100644 --- a/runtime/mirror/object-inl.h +++ b/runtime/mirror/object-inl.h @@ -35,6 +35,11 @@ namespace art { namespace mirror { +inline uint32_t Object::ClassSize() { + uint32_t vtable_entries = kVTableLength; + return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0); +} + template inline Class* Object::GetClass() { return GetFieldObject( @@ -687,6 +692,7 @@ inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Vi template inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) { + DCHECK(!klass->IsTemp()); klass->VisitFieldsReferences( klass->GetReferenceStaticOffsets(), visitor); } diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h index d29011a4b..11998cca2 100644 --- a/runtime/mirror/object.h +++ b/runtime/mirror/object.h @@ -63,13 +63,24 @@ static constexpr bool kCheckFieldAssignments = false; // C++ mirror of java.lang.Object class MANAGED LOCKABLE Object { public: + // The number of vtable entries in java.lang.Object. + static constexpr size_t kVTableLength = 11; + + // The size of the java.lang.Class representing a java.lang.Object. + static uint32_t ClassSize(); + + // Size of an instance of java.lang.Object. + static constexpr uint32_t InstanceSize() { + return sizeof(Object); + } + static MemberOffset ClassOffset() { return OFFSET_OF_OBJECT_MEMBER(Object, klass_); } template - Class* GetClass() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + ALWAYS_INLINE Class* GetClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template void SetClass(Class* new_klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -202,27 +213,27 @@ class MANAGED LOCKABLE Object { // Accessor for Java type fields. template - T* GetFieldObject(MemberOffset field_offset) ALWAYS_INLINE + ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - T* GetFieldObjectVolatile(MemberOffset field_offset) ALWAYS_INLINE + ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value) - ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetFieldObject(MemberOffset field_offset, Object* new_value) ALWAYS_INLINE + ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, Object* new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) ALWAYS_INLINE + ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template* GetFieldObjectReferenceAddr(MemberOffset field_offset); template - int32_t GetField32(MemberOffset field_offset) ALWAYS_INLINE + ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - int32_t GetField32Volatile(MemberOffset field_offset) ALWAYS_INLINE + ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetField32(MemberOffset field_offset, int32_t new_value) ALWAYS_INLINE + ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetField32Volatile(MemberOffset field_offset, int32_t new_value) ALWAYS_INLINE + ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value, - int32_t new_value) ALWAYS_INLINE + ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, + int32_t old_value, int32_t new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - int64_t GetField64(MemberOffset field_offset) ALWAYS_INLINE + ALWAYS_INLINE int64_t GetField64(MemberOffset field_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - int64_t GetField64Volatile(MemberOffset field_offset) ALWAYS_INLINE + ALWAYS_INLINE int64_t GetField64Volatile(MemberOffset field_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetField64(MemberOffset field_offset, int64_t new_value) ALWAYS_INLINE + ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template - void SetField64Volatile(MemberOffset field_offset, int64_t new_value) ALWAYS_INLINE + ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template -class MANAGED ObjectArray : public Array { +class MANAGED ObjectArray: public Array { public: + // The size of Object[].class. + static uint32_t ClassSize() { + return Array::ClassSize(); + } + static ObjectArray* Alloc(Thread* self, Class* object_array_class, int32_t length, gc::AllocatorType allocator_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc index f85fb2748..7e1de5d06 100644 --- a/runtime/mirror/object_test.cc +++ b/runtime/mirror/object_test.cc @@ -28,7 +28,7 @@ #include "class_linker-inl.h" #include "common_runtime_test.h" #include "dex_file.h" -#include "entrypoints/entrypoint_utils.h" +#include "entrypoints/entrypoint_utils-inl.h" #include "gc/accounting/card_table-inl.h" #include "gc/heap.h" #include "iftable-inl.h" diff --git a/runtime/mirror/proxy.h b/runtime/mirror/proxy.h index 6e4947ebf..db511d642 100644 --- a/runtime/mirror/proxy.h +++ b/runtime/mirror/proxy.h @@ -25,28 +25,8 @@ struct ProxyOffsets; namespace mirror { -// All proxy objects have a class which is a synthesized proxy class. The synthesized proxy class -// has the static fields used to implement reflection on proxy objects. -class MANAGED SynthesizedProxyClass : public Class { - public: - ObjectArray* GetInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - return GetFieldObject>(OFFSET_OF_OBJECT_MEMBER(SynthesizedProxyClass, - interfaces_)); - } - - ObjectArray>* GetThrows() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - return GetFieldObject>>(OFFSET_OF_OBJECT_MEMBER(SynthesizedProxyClass, - throws_)); - } - - private: - HeapReference> interfaces_; - HeapReference>> throws_; - DISALLOW_IMPLICIT_CONSTRUCTORS(SynthesizedProxyClass); -}; - // C++ mirror of java.lang.reflect.Proxy. -class MANAGED Proxy : public Object { +class MANAGED Proxy FINAL : public Object { private: HeapReference h_; diff --git a/runtime/mirror/stack_trace_element.h b/runtime/mirror/stack_trace_element.h index abecbc533..52b092775 100644 --- a/runtime/mirror/stack_trace_element.h +++ b/runtime/mirror/stack_trace_element.h @@ -29,7 +29,7 @@ struct StackTraceElementOffsets; namespace mirror { // C++ mirror of java.lang.StackTraceElement -class MANAGED StackTraceElement : public Object { +class MANAGED StackTraceElement FINAL : public Object { public: String* GetDeclaringClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return GetFieldObject(OFFSET_OF_OBJECT_MEMBER(StackTraceElement, declaring_class_)); diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h index 315f7b195..673649759 100644 --- a/runtime/mirror/string-inl.h +++ b/runtime/mirror/string-inl.h @@ -18,6 +18,7 @@ #define ART_RUNTIME_MIRROR_STRING_INL_H_ #include "array.h" +#include "class.h" #include "intern_table.h" #include "runtime.h" #include "string.h" @@ -26,6 +27,11 @@ namespace art { namespace mirror { +inline uint32_t String::ClassSize() { + uint32_t vtable_entries = Object::kVTableLength + 51; + return Class::ComputeClassSize(true, vtable_entries, 1, 1, 2); +} + inline CharArray* String::GetCharArray() { return GetFieldObject(ValueOffset()); } diff --git a/runtime/mirror/string.h b/runtime/mirror/string.h index b8acede63..8ab4db90e 100644 --- a/runtime/mirror/string.h +++ b/runtime/mirror/string.h @@ -19,22 +19,28 @@ #include -#include "class.h" #include "object_callbacks.h" #include "read_barrier.h" namespace art { template class Handle; -struct StringClassOffsets; struct StringOffsets; class StringPiece; namespace mirror { // C++ mirror of java.lang.String -class MANAGED String : public Object { +class MANAGED String FINAL : public Object { public: + // Size of java.lang.String.class. + static uint32_t ClassSize(); + + // Size of an instance of java.lang.String not including its value array. + static constexpr uint32_t InstanceSize() { + return sizeof(String); + } + static MemberOffset CountOffset() { return OFFSET_OF_OBJECT_MEMBER(String, count_); } @@ -160,16 +166,6 @@ class MANAGED String : public Object { DISALLOW_IMPLICIT_CONSTRUCTORS(String); }; -class MANAGED StringClass : public Class { - private: - HeapReference ASCII_; - HeapReference CASE_INSENSITIVE_ORDER_; - uint32_t REPLACEMENT_CHAR_; - int64_t serialVersionUID_; - friend struct art::StringClassOffsets; // for verifying offset information - DISALLOW_IMPLICIT_CONSTRUCTORS(StringClass); -}; - } // namespace mirror } // namespace art diff --git a/runtime/native/java_lang_Class.cc b/runtime/native/java_lang_Class.cc index e619dda13..cede1a089 100644 --- a/runtime/native/java_lang_Class.cc +++ b/runtime/native/java_lang_Class.cc @@ -21,7 +21,6 @@ #include "mirror/class-inl.h" #include "mirror/class_loader.h" #include "mirror/object-inl.h" -#include "mirror/proxy.h" #include "object_utils.h" #include "scoped_thread_state_change.h" #include "scoped_fast_native_object_access.h" @@ -91,8 +90,7 @@ static jstring Class_getNameNative(JNIEnv* env, jobject javaThis) { static jobjectArray Class_getProxyInterfaces(JNIEnv* env, jobject javaThis) { ScopedFastNativeObjectAccess soa(env); - mirror::SynthesizedProxyClass* c = - down_cast(DecodeClass(soa, javaThis)); + mirror::Class* c = DecodeClass(soa, javaThis); return soa.AddLocalReference(c->GetInterfaces()->Clone(soa.Self())); } diff --git a/runtime/native/java_lang_reflect_Method.cc b/runtime/native/java_lang_reflect_Method.cc index 22e81e4b3..ac602acb2 100644 --- a/runtime/native/java_lang_reflect_Method.cc +++ b/runtime/native/java_lang_reflect_Method.cc @@ -21,7 +21,6 @@ #include "mirror/class-inl.h" #include "mirror/object-inl.h" #include "mirror/object_array-inl.h" -#include "mirror/proxy.h" #include "object_utils.h" #include "reflection.h" #include "scoped_fast_native_object_access.h" @@ -39,8 +38,7 @@ static jobject Method_getExceptionTypesNative(JNIEnv* env, jobject javaMethod) { ScopedFastNativeObjectAccess soa(env); mirror::ArtMethod* proxy_method = mirror::ArtMethod::FromReflectedMethod(soa, javaMethod); CHECK(proxy_method->GetDeclaringClass()->IsProxyClass()); - mirror::SynthesizedProxyClass* proxy_class = - down_cast(proxy_method->GetDeclaringClass()); + mirror::Class* proxy_class = proxy_method->GetDeclaringClass(); int throws_index = -1; size_t num_virt_methods = proxy_class->NumVirtualMethods(); for (size_t i = 0; i < num_virt_methods; i++) { diff --git a/runtime/oat_file.h b/runtime/oat_file.h index 70253af61..44f446672 100644 --- a/runtime/oat_file.h +++ b/runtime/oat_file.h @@ -24,7 +24,7 @@ #include "dex_file.h" #include "invoke_type.h" #include "mem_map.h" -#include "mirror/art_method.h" +#include "mirror/class.h" #include "oat.h" #include "os.h" diff --git a/runtime/object_callbacks.h b/runtime/object_callbacks.h index d8c1c402b..0e6f4d80a 100644 --- a/runtime/object_callbacks.h +++ b/runtime/object_callbacks.h @@ -26,10 +26,10 @@ namespace art { namespace mirror { -class Class; -class Object; -template class HeapReference; -class Reference; + class Class; + class Object; + template class HeapReference; + class Reference; } // namespace mirror class StackVisitor; diff --git a/runtime/object_utils.h b/runtime/object_utils.h index 28ce8f34a..4379b4ad2 100644 --- a/runtime/object_utils.h +++ b/runtime/object_utils.h @@ -25,7 +25,6 @@ #include "mirror/class.h" #include "mirror/dex_cache.h" #include "mirror/iftable.h" -#include "mirror/proxy.h" #include "mirror/string.h" #include "runtime.h" diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc index 103492334..49f6fe015 100644 --- a/runtime/quick_exception_handler.cc +++ b/runtime/quick_exception_handler.cc @@ -20,6 +20,9 @@ #include "entrypoints/entrypoint_utils.h" #include "handle_scope-inl.h" #include "mirror/art_method-inl.h" +#include "mirror/class-inl.h" +#include "mirror/class_loader.h" +#include "mirror/throwable.h" #include "verifier/method_verifier.h" namespace art { diff --git a/runtime/runtime.cc b/runtime/runtime.cc index efa205e07..9cbc31b32 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -1013,8 +1013,8 @@ mirror::ArtMethod* Runtime::CreateImtConflictMethod() { method->SetEntryPointFromPortableCompiledCode(nullptr); method->SetEntryPointFromQuickCompiledCode(nullptr); } else { - method->SetEntryPointFromPortableCompiledCode(GetPortableImtConflictTrampoline(class_linker)); - method->SetEntryPointFromQuickCompiledCode(GetQuickImtConflictTrampoline(class_linker)); + method->SetEntryPointFromPortableCompiledCode(class_linker->GetPortableImtConflictTrampoline()); + method->SetEntryPointFromQuickCompiledCode(class_linker->GetQuickImtConflictTrampoline()); } return method.Get(); } @@ -1033,8 +1033,8 @@ mirror::ArtMethod* Runtime::CreateResolutionMethod() { method->SetEntryPointFromPortableCompiledCode(nullptr); method->SetEntryPointFromQuickCompiledCode(nullptr); } else { - method->SetEntryPointFromPortableCompiledCode(GetPortableResolutionTrampoline(class_linker)); - method->SetEntryPointFromQuickCompiledCode(GetQuickResolutionTrampoline(class_linker)); + method->SetEntryPointFromPortableCompiledCode(class_linker->GetPortableResolutionTrampoline()); + method->SetEntryPointFromQuickCompiledCode(class_linker->GetQuickResolutionTrampoline()); } return method.Get(); } diff --git a/runtime/thread.h b/runtime/thread.h index 1b335c82e..3f7c21b54 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -47,7 +47,7 @@ namespace art { namespace gc { namespace collector { -class SemiSpace; + class SemiSpace; } // namespace collector } // namespace gc @@ -61,7 +61,6 @@ namespace mirror { template class PrimitiveArray; typedef PrimitiveArray IntArray; class StackTraceElement; - class StaticStorageBase; class Throwable; } // namespace mirror class BaseMutex; diff --git a/runtime/utils.h b/runtime/utils.h index 448c591f2..b47de81d6 100644 --- a/runtime/utils.h +++ b/runtime/utils.h @@ -167,6 +167,10 @@ struct TypeIdentity { // For rounding integers. template +static constexpr T RoundDown(T x, typename TypeIdentity::type n) + __attribute__((warn_unused_result)); + +template static constexpr T RoundDown(T x, typename TypeIdentity::type n) { return DCHECK_CONSTEXPR(IsPowerOfTwo(n), , T(0)) @@ -174,17 +178,27 @@ static constexpr T RoundDown(T x, typename TypeIdentity::type n) { } template +static constexpr T RoundUp(T x, typename TypeIdentity::type n) + __attribute__((warn_unused_result)); + +template static constexpr T RoundUp(T x, typename TypeIdentity::type n) { return RoundDown(x + n - 1, n); } // For aligning pointers. template +static inline T* AlignDown(T* x, uintptr_t n) __attribute__((warn_unused_result)); + +template static inline T* AlignDown(T* x, uintptr_t n) { return reinterpret_cast(RoundDown(reinterpret_cast(x), n)); } template +static inline T* AlignUp(T* x, uintptr_t n) __attribute__((warn_unused_result)); + +template static inline T* AlignUp(T* x, uintptr_t n) { return reinterpret_cast(RoundUp(reinterpret_cast(x), n)); } diff --git a/runtime/verifier/method_verifier_test.cc b/runtime/verifier/method_verifier_test.cc index 9ac04d706..f70faf59a 100644 --- a/runtime/verifier/method_verifier_test.cc +++ b/runtime/verifier/method_verifier_test.cc @@ -19,7 +19,7 @@ #include #include -#include "class_linker.h" +#include "class_linker-inl.h" #include "common_runtime_test.h" #include "dex_file.h" -- 2.11.0