From 951ec2c93c79c5539cbcc669566f0808d4460338 Mon Sep 17 00:00:00 2001 From: Mathieu Chartier Date: Tue, 22 Sep 2015 08:50:05 -0700 Subject: [PATCH] Revert "Revert "Add one LinearAlloc per ClassLoader"" Issue was fixed by: https://android-review.googlesource.com/#/c/171945/ Bug: 22720414 This reverts commit 7de5dfe37f3cf24e1166412b589f6f67dcd1f1c0. --- runtime/class_linker.cc | 110 ++++++++++++++++++----------- runtime/class_linker.h | 33 ++++++--- runtime/class_linker_test.cc | 1 + runtime/gc/collector/concurrent_copying.cc | 2 + runtime/gc/collector/mark_compact.cc | 1 + runtime/gc/collector/mark_sweep.cc | 8 ++- runtime/gc/collector/semi_space.cc | 1 + runtime/jit/jit_code_cache_test.cc | 7 +- runtime/mirror/class_loader.h | 14 ++++ runtime/runtime.cc | 10 ++- runtime/runtime.h | 3 + runtime/stack.cc | 35 +++++---- 12 files changed, 151 insertions(+), 74 deletions(-) diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index bc8a9f493..6b9c8aa35 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1318,9 +1318,8 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { boot_class_table_.VisitRoots(buffered_visitor); // TODO: Avoid marking these to enable class unloading. JavaVMExt* const vm = Runtime::Current()->GetJavaVM(); - for (jweak weak_root : class_loaders_) { - mirror::Object* class_loader = - down_cast(vm->DecodeWeakGlobal(self, weak_root)); + for (const ClassLoaderData& data : class_loaders_) { + mirror::Object* class_loader = vm->DecodeWeakGlobal(self, data.weak_root); // Don't need to update anything since the class loaders will be updated by SweepSystemWeaks. visitor->VisitRootIfNonNull(&class_loader, RootInfo(kRootVMInternal)); } @@ -1503,13 +1502,10 @@ ClassLinker::~ClassLinker() { STLDeleteElements(&oat_files_); Thread* const self = Thread::Current(); JavaVMExt* const vm = Runtime::Current()->GetJavaVM(); - for (jweak weak_root : class_loaders_) { - auto* const class_loader = down_cast( - vm->DecodeWeakGlobalDuringShutdown(self, weak_root)); - if (class_loader != nullptr) { - delete class_loader->GetClassTable(); - } - vm->DeleteWeakGlobalRef(self, weak_root); + for (const ClassLoaderData& data : class_loaders_) { + vm->DecodeWeakGlobalDuringShutdown(self, data.weak_root); + delete data.allocator; + delete data.class_table; } class_loaders_.clear(); } @@ -2375,21 +2371,25 @@ void ClassLinker::LoadClass(Thread* self, } } -LengthPrefixedArray* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) { +LengthPrefixedArray* ClassLinker::AllocArtFieldArray(Thread* self, + LinearAlloc* allocator, + size_t length) { if (length == 0) { return nullptr; } // If the ArtField alignment changes, review all uses of LengthPrefixedArray. static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4."); size_t storage_size = LengthPrefixedArray::ComputeSize(length); - void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size); + void* array_storage = allocator->Alloc(self, storage_size); auto* ret = new(array_storage) LengthPrefixedArray(length); CHECK(ret != nullptr); std::uninitialized_fill_n(&ret->At(0), length, ArtField()); return ret; } -LengthPrefixedArray* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) { +LengthPrefixedArray* ClassLinker::AllocArtMethodArray(Thread* self, + LinearAlloc* allocator, + size_t length) { if (length == 0) { return nullptr; } @@ -2397,7 +2397,7 @@ LengthPrefixedArray* ClassLinker::AllocArtMethodArray(Thread* self, s const size_t method_size = ArtMethod::Size(image_pointer_size_); const size_t storage_size = LengthPrefixedArray::ComputeSize(length, method_size, method_alignment); - void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size); + void* array_storage = allocator->Alloc(self, storage_size); auto* ret = new (array_storage) LengthPrefixedArray(length); CHECK(ret != nullptr); for (size_t i = 0; i < length; ++i) { @@ -2406,6 +2406,15 @@ LengthPrefixedArray* ClassLinker::AllocArtMethodArray(Thread* self, s return ret; } +LinearAlloc* ClassLinker::GetAllocatorForClassLoader(mirror::ClassLoader* class_loader) { + if (class_loader == nullptr) { + return Runtime::Current()->GetLinearAlloc(); + } + LinearAlloc* allocator = class_loader->GetAllocator(); + DCHECK(allocator != nullptr); + return allocator; +} + void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, const uint8_t* class_data, @@ -2418,8 +2427,11 @@ void ClassLinker::LoadClassMembers(Thread* self, // Load static fields. // We allow duplicate definitions of the same field in a class_data_item // but ignore the repeated indexes here, b/21868015. + LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); ClassDataItemIterator it(dex_file, class_data); - LengthPrefixedArray* sfields = AllocArtFieldArray(self, it.NumStaticFields()); + LengthPrefixedArray* sfields = AllocArtFieldArray(self, + allocator, + it.NumStaticFields()); size_t num_sfields = 0; uint32_t last_field_idx = 0u; for (; it.HasNextStaticField(); it.Next()) { @@ -2435,7 +2447,9 @@ void ClassLinker::LoadClassMembers(Thread* self, klass->SetSFieldsPtr(sfields); DCHECK_EQ(klass->NumStaticFields(), num_sfields); // Load instance fields. - LengthPrefixedArray* ifields = AllocArtFieldArray(self, it.NumInstanceFields()); + LengthPrefixedArray* ifields = AllocArtFieldArray(self, + allocator, + it.NumInstanceFields()); size_t num_ifields = 0u; last_field_idx = 0u; for (; it.HasNextInstanceField(); it.Next()) { @@ -2458,8 +2472,8 @@ void ClassLinker::LoadClassMembers(Thread* self, klass->SetIFieldsPtr(ifields); DCHECK_EQ(klass->NumInstanceFields(), num_ifields); // Load methods. - klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods())); - klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods())); + klass->SetDirectMethodsPtr(AllocArtMethodArray(self, allocator, it.NumDirectMethods())); + klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, allocator, it.NumVirtualMethods())); size_t class_def_method_index = 0; uint32_t last_dex_method_index = DexFile::kDexNoIndex; size_t last_class_def_method_index = 0; @@ -3031,7 +3045,7 @@ void ClassLinker::MoveClassTableToPreZygote() { WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); boot_class_table_.FreezeSnapshot(); MoveClassTableToPreZygoteVisitor visitor; - VisitClassLoadersAndRemoveClearedLoaders(&visitor); + VisitClassLoaders(&visitor); } mirror::Class* ClassLinker::LookupClassFromImage(const char* descriptor) { @@ -3414,9 +3428,12 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& mirror::Class* existing = InsertClass(descriptor.c_str(), klass.Get(), hash); CHECK(existing == nullptr); + // Needs to be after we insert the class so that the allocator field is set. + LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); + // Instance fields are inherited, but we add a couple of static fields... const size_t num_fields = 2; - LengthPrefixedArray* sfields = AllocArtFieldArray(self, num_fields); + LengthPrefixedArray* sfields = AllocArtFieldArray(self, allocator, num_fields); klass->SetSFieldsPtr(sfields); // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by @@ -3433,7 +3450,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal); // Proxies have 1 direct method, the constructor - LengthPrefixedArray* directs = AllocArtMethodArray(self, 1); + LengthPrefixedArray* directs = AllocArtMethodArray(self, allocator, 1); // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we // want to throw OOM in the future. if (UNLIKELY(directs == nullptr)) { @@ -3448,7 +3465,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& DCHECK_EQ(h_methods->GetClass(), mirror::Method::ArrayClass()) << PrettyClass(h_methods->GetClass()); const size_t num_virtual_methods = h_methods->GetLength(); - auto* virtuals = AllocArtMethodArray(self, num_virtual_methods); + auto* virtuals = AllocArtMethodArray(self, allocator, num_virtual_methods); // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we // want to throw OOM in the future. if (UNLIKELY(virtuals == nullptr)) { @@ -4166,9 +4183,14 @@ ClassTable* ClassLinker::InsertClassTableForClassLoader(mirror::ClassLoader* cla if (class_table == nullptr) { class_table = new ClassTable; Thread* const self = Thread::Current(); - class_loaders_.push_back(self->GetJniEnv()->vm->AddWeakGlobalRef(self, class_loader)); + ClassLoaderData data; + data.weak_root = self->GetJniEnv()->vm->AddWeakGlobalRef(self, class_loader); + data.class_table = class_table; + data.allocator = Runtime::Current()->CreateLinearAlloc(); + class_loaders_.push_back(data); // Don't already have a class table, add it to the class loader. - class_loader->SetClassTable(class_table); + class_loader->SetClassTable(data.class_table); + class_loader->SetAllocator(data.allocator); } return class_table; } @@ -6158,7 +6180,10 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self, std::vector* method_array = AllocArtMethodArray(Thread::Current(), 1); + LengthPrefixedArray* method_array = AllocArtMethodArray( + Thread::Current(), + Runtime::Current()->GetLinearAlloc(), + 1); ArtMethod* method = &method_array->At(0, method_size, method_alignment); CHECK(method != nullptr); method->SetDexMethodIndex(DexFile::kDexNoIndex); @@ -6171,33 +6196,34 @@ void ClassLinker::DropFindArrayClassCache() { find_array_class_cache_next_victim_ = 0; } -void ClassLinker::VisitClassLoadersAndRemoveClearedLoaders(ClassLoaderVisitor* visitor) { +void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const { Thread* const self = Thread::Current(); - Locks::classlinker_classes_lock_->AssertExclusiveHeld(self); JavaVMExt* const vm = self->GetJniEnv()->vm; - for (auto it = class_loaders_.begin(); it != class_loaders_.end();) { - const jweak weak_root = *it; - mirror::ClassLoader* const class_loader = down_cast( - vm->DecodeWeakGlobal(self, weak_root)); + for (const ClassLoaderData& data : class_loaders_) { + auto* const class_loader = down_cast( + vm->DecodeWeakGlobal(self, data.weak_root)); if (class_loader != nullptr) { visitor->Visit(class_loader); - ++it; - } else { - // Remove the cleared weak reference from the array. - vm->DeleteWeakGlobalRef(self, weak_root); - it = class_loaders_.erase(it); } } } -void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const { +void ClassLinker::CleanupClassLoaders() { Thread* const self = Thread::Current(); - JavaVMExt* const vm = self->GetJniEnv()->vm; - for (jweak weak_root : class_loaders_) { - mirror::ClassLoader* const class_loader = down_cast( - vm->DecodeWeakGlobal(self, weak_root)); + WriterMutexLock mu(self, *Locks::classlinker_classes_lock_); + JavaVMExt* const vm = Runtime::Current()->GetJavaVM(); + for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) { + const ClassLoaderData& data = *it; + auto* const class_loader = down_cast( + vm->DecodeWeakGlobal(self, data.weak_root)); if (class_loader != nullptr) { - visitor->Visit(class_loader); + ++it; + } else { + // Weak reference was cleared, delete the data associated with this class loader. + delete data.class_table; + delete data.allocator; + vm->DeleteWeakGlobalRef(self, data.weak_root); + it = class_loaders_.erase(it); } } } diff --git a/runtime/class_linker.h b/runtime/class_linker.h index fee706625..f705330b1 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -403,9 +403,13 @@ class ClassLinker { SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_); - LengthPrefixedArray* AllocArtFieldArray(Thread* self, size_t length); + LengthPrefixedArray* AllocArtFieldArray(Thread* self, + LinearAlloc* allocator, + size_t length); - LengthPrefixedArray* AllocArtMethodArray(Thread* self, size_t length); + LengthPrefixedArray* AllocArtMethodArray(Thread* self, + LinearAlloc* allocator, + size_t length); mirror::PointerArray* AllocPointerArray(Thread* self, size_t length) SHARED_REQUIRES(Locks::mutator_lock_) @@ -546,17 +550,24 @@ class ClassLinker { // entries are roots, but potentially not image classes. void DropFindArrayClassCache() SHARED_REQUIRES(Locks::mutator_lock_); - private: - // The RemoveClearedLoaders version removes cleared weak global class loaders and frees their - // class tables. This version can only be called with reader access to the - // classlinker_classes_lock_ since it modifies the class_loaders_ list. - void VisitClassLoadersAndRemoveClearedLoaders(ClassLoaderVisitor* visitor) - REQUIRES(Locks::classlinker_classes_lock_) + // Clean up class loaders, this needs to happen after JNI weak globals are cleared. + void CleanupClassLoaders() + SHARED_REQUIRES(Locks::mutator_lock_) + REQUIRES(!Locks::classlinker_classes_lock_); + + static LinearAlloc* GetAllocatorForClassLoader(mirror::ClassLoader* class_loader) SHARED_REQUIRES(Locks::mutator_lock_); + + private: + struct ClassLoaderData { + jobject weak_root; // Weak root to enable class unloading. + ClassTable* class_table; + LinearAlloc* allocator; + }; + void VisitClassLoaders(ClassLoaderVisitor* visitor) const SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); - void VisitClassesInternal(ClassVisitor* visitor) SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); @@ -826,8 +837,8 @@ class ClassLinker { std::vector oat_files_ GUARDED_BY(dex_lock_); // This contains the class loaders which have class tables. It is populated by - // InsertClassTableForClassLoader. Weak roots to enable class unloading. - std::list class_loaders_ + // InsertClassTableForClassLoader. + std::list class_loaders_ GUARDED_BY(Locks::classlinker_classes_lock_); // Boot class path table. Since the class loader for this is null. diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index b4ea3b346..0926ce3f6 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -550,6 +550,7 @@ struct StackTraceElementOffsets : public CheckOffsets struct ClassLoaderOffsets : public CheckOffsets { ClassLoaderOffsets() : CheckOffsets(false, "Ljava/lang/ClassLoader;") { + addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, allocator_), "allocator"); addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, class_table_), "classTable"); addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, packages_), "packages"); addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, parent_), "parent"); diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index 399591b93..468179c9d 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -457,6 +457,8 @@ void ConcurrentCopying::MarkingPhase() { CheckEmptyMarkStack(); // Re-enable weak ref accesses. ReenableWeakRefAccess(self); + // Free data for class loaders that we unloaded. + Runtime::Current()->GetClassLinker()->CleanupClassLoaders(); // Marking is done. Disable marking. DisableMarking(); CheckEmptyMarkStack(); diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc index 60f833b34..f561764ce 100644 --- a/runtime/gc/collector/mark_compact.cc +++ b/runtime/gc/collector/mark_compact.cc @@ -205,6 +205,7 @@ void MarkCompact::MarkingPhase() { ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_); SweepSystemWeaks(); } + Runtime::Current()->GetClassLinker()->CleanupClassLoaders(); // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked // before they are properly counted. RevokeAllThreadLocalBuffers(); diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 089f45388..2d1f3120b 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -283,11 +283,15 @@ void MarkSweep::MarkReachableObjects() { void MarkSweep::ReclaimPhase() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); - Thread* self = Thread::Current(); + Thread* const self = Thread::Current(); // Process the references concurrently. ProcessReferences(self); SweepSystemWeaks(self); - Runtime::Current()->AllowNewSystemWeaks(); + Runtime* const runtime = Runtime::Current(); + runtime->AllowNewSystemWeaks(); + // Clean up class loaders after system weaks are swept since that is how we know if class + // unloading occurred. + runtime->GetClassLinker()->CleanupClassLoaders(); { WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); GetHeap()->RecordFreeRevoke(); diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index ed63ed049..7f57f30b2 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -248,6 +248,7 @@ void SemiSpace::MarkingPhase() { ReaderMutexLock mu(self_, *Locks::heap_bitmap_lock_); SweepSystemWeaks(); } + Runtime::Current()->GetClassLinker()->CleanupClassLoaders(); // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked // before they are properly counted. RevokeAllThreadLocalBuffers(); diff --git a/runtime/jit/jit_code_cache_test.cc b/runtime/jit/jit_code_cache_test.cc index a6cbb710a..c76dc1110 100644 --- a/runtime/jit/jit_code_cache_test.cc +++ b/runtime/jit/jit_code_cache_test.cc @@ -49,8 +49,11 @@ TEST_F(JitCodeCacheTest, TestCoverage) { ASSERT_TRUE(reserved_code != nullptr); ASSERT_TRUE(code_cache->ContainsCodePtr(reserved_code)); ASSERT_EQ(code_cache->NumMethods(), 1u); - ClassLinker* const cl = Runtime::Current()->GetClassLinker(); - ArtMethod* method = &cl->AllocArtMethodArray(soa.Self(), 1)->At(0); + Runtime* const runtime = Runtime::Current(); + ClassLinker* const class_linker = runtime->GetClassLinker(); + ArtMethod* method = &class_linker->AllocArtMethodArray(soa.Self(), + runtime->GetLinearAlloc(), + 1)->At(0); ASSERT_FALSE(code_cache->ContainsMethod(method)); method->SetEntryPointFromQuickCompiledCode(reserved_code); ASSERT_TRUE(code_cache->ContainsMethod(method)); diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h index f27b6155c..c2a65d62e 100644 --- a/runtime/mirror/class_loader.h +++ b/runtime/mirror/class_loader.h @@ -35,18 +35,31 @@ class MANAGED ClassLoader : public Object { static constexpr uint32_t InstanceSize() { return sizeof(ClassLoader); } + ClassLoader* GetParent() SHARED_REQUIRES(Locks::mutator_lock_) { return GetFieldObject(OFFSET_OF_OBJECT_MEMBER(ClassLoader, parent_)); } + ClassTable* GetClassTable() SHARED_REQUIRES(Locks::mutator_lock_) { return reinterpret_cast( GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, class_table_))); } + void SetClassTable(ClassTable* class_table) SHARED_REQUIRES(Locks::mutator_lock_) { SetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, class_table_), reinterpret_cast(class_table)); } + LinearAlloc* GetAllocator() SHARED_REQUIRES(Locks::mutator_lock_) { + return reinterpret_cast( + GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_))); + } + + void SetAllocator(LinearAlloc* allocator) SHARED_REQUIRES(Locks::mutator_lock_) { + SetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_), + reinterpret_cast(allocator)); + } + private: // Visit instance fields of the class loader as well as its associated classes. // Null class loader is handled by ClassLinker::VisitClassRoots. @@ -61,6 +74,7 @@ class MANAGED ClassLoader : public Object { HeapReference proxyCache_; // Native pointer to class table, need to zero this out when image writing. uint32_t padding_ ATTRIBUTE_UNUSED; + uint64_t allocator_; uint64_t class_table_; friend struct art::ClassLoaderOffsets; // for verifying offset information diff --git a/runtime/runtime.cc b/runtime/runtime.cc index fba9d37d1..fe97394e0 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -947,10 +947,8 @@ bool Runtime::Init(const RuntimeOptions& raw_options, bool ignore_unrecognized) if (IsCompiler() && Is64BitInstructionSet(kRuntimeISA)) { // 4gb, no malloc. Explanation in header. low_4gb_arena_pool_.reset(new ArenaPool(false, true)); - linear_alloc_.reset(new LinearAlloc(low_4gb_arena_pool_.get())); - } else { - linear_alloc_.reset(new LinearAlloc(arena_pool_.get())); } + linear_alloc_.reset(CreateLinearAlloc()); BlockSignals(); InitPlatformSignalHandlers(); @@ -1791,4 +1789,10 @@ bool Runtime::IsVerificationSoftFail() const { return verify_ == verifier::VerifyMode::kSoftFail; } +LinearAlloc* Runtime::CreateLinearAlloc() { + return (IsCompiler() && Is64BitInstructionSet(kRuntimeISA)) + ? new LinearAlloc(low_4gb_arena_pool_.get()) + : new LinearAlloc(arena_pool_.get()); +} + } // namespace art diff --git a/runtime/runtime.h b/runtime/runtime.h index a35eac1af..6154c34ec 100644 --- a/runtime/runtime.h +++ b/runtime/runtime.h @@ -570,6 +570,9 @@ class Runtime { // Called from class linker. void SetSentinel(mirror::Object* sentinel) SHARED_REQUIRES(Locks::mutator_lock_); + // Create a normal LinearAlloc or low 4gb version if we are 64 bit AOT compiler. + LinearAlloc* CreateLinearAlloc(); + private: static void InitPlatformSignalHandlers(); diff --git a/runtime/stack.cc b/runtime/stack.cc index d73974315..7f72f8ab6 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -840,23 +840,30 @@ void StackVisitor::SanityCheckFrame() const { } else { CHECK(declaring_class == nullptr); } - auto* runtime = Runtime::Current(); - auto* la = runtime->GetLinearAlloc(); - if (!la->Contains(method)) { - // Check image space. - bool in_image = false; - for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) { - if (space->IsImageSpace()) { - auto* image_space = space->AsImageSpace(); - const auto& header = image_space->GetImageHeader(); - const auto* methods = &header.GetMethodsSection(); - if (methods->Contains(reinterpret_cast(method) - image_space->Begin())) { - in_image = true; - break; + Runtime* const runtime = Runtime::Current(); + LinearAlloc* const linear_alloc = runtime->GetLinearAlloc(); + if (!linear_alloc->Contains(method)) { + // Check class linker linear allocs. + mirror::Class* klass = method->GetDeclaringClass(); + LinearAlloc* const class_linear_alloc = (klass != nullptr) + ? ClassLinker::GetAllocatorForClassLoader(klass->GetClassLoader()) + : linear_alloc; + if (!class_linear_alloc->Contains(method)) { + // Check image space. + bool in_image = false; + for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) { + if (space->IsImageSpace()) { + auto* image_space = space->AsImageSpace(); + const auto& header = image_space->GetImageHeader(); + const auto* methods = &header.GetMethodsSection(); + if (methods->Contains(reinterpret_cast(method) - image_space->Begin())) { + in_image = true; + break; + } } } + CHECK(in_image) << PrettyMethod(method) << " not in linear alloc or image"; } - CHECK(in_image) << PrettyMethod(method) << " not in linear alloc or image"; } if (cur_quick_frame_ != nullptr) { method->AssertPcIsWithinQuickCode(cur_quick_frame_pc_); -- 2.11.0