From 763a31ed7a2bfad22a9cb07f5301a71c0f97ca49 Mon Sep 17 00:00:00 2001 From: Mathieu Chartier Date: Mon, 16 Nov 2015 16:05:55 -0800 Subject: [PATCH] Add immune spaces abstraction ImmuneSpaces is a set of spaces which are not reclaimable by the GC in the current collection. This set of spaces does not have requirements about space adjacency like the old ImmuneRegion. ImmuneSpaces generates the largest immune region for the GC. Since there is no requirement on adjacency, it is possible to have multiple non-adjacent applicaton image files. For image spaces, we also look at the oat code which is normally after the application image. In this case, we add the code as part of the immune region. This is required to have both the boot image and the zygote space be in the same immune region (for performance reasons). Bug: 22858531 Change-Id: I5103b31c0e39ad63c594f5557fc848a3b288b43e --- build/Android.gtest.mk | 1 + runtime/Android.mk | 1 + runtime/gc/collector/concurrent_copying.cc | 46 ++++----- runtime/gc/collector/concurrent_copying.h | 4 +- runtime/gc/collector/immune_region.cc | 33 ------- runtime/gc/collector/immune_region.h | 17 ++-- runtime/gc/collector/immune_spaces.cc | 98 +++++++++++++++++++ runtime/gc/collector/immune_spaces.h | 95 ++++++++++++++++++ runtime/gc/collector/immune_spaces_test.cc | 150 +++++++++++++++++++++++++++++ runtime/gc/collector/mark_compact.cc | 12 +-- runtime/gc/collector/mark_compact.h | 6 +- runtime/gc/collector/mark_sweep.cc | 74 +++++++------- runtime/gc/collector/mark_sweep.h | 7 +- runtime/gc/collector/partial_mark_sweep.cc | 2 +- runtime/gc/collector/semi_space-inl.h | 2 +- runtime/gc/collector/semi_space.cc | 16 ++- runtime/gc/collector/semi_space.h | 6 +- runtime/gc/space/image_space.h | 18 +++- runtime/image.h | 4 +- 19 files changed, 468 insertions(+), 124 deletions(-) create mode 100644 runtime/gc/collector/immune_spaces.cc create mode 100644 runtime/gc/collector/immune_spaces.h create mode 100644 runtime/gc/collector/immune_spaces_test.cc diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk index 0afec2d5e..dcde5abbc 100644 --- a/build/Android.gtest.mk +++ b/build/Android.gtest.mk @@ -188,6 +188,7 @@ RUNTIME_GTEST_COMMON_SRC_FILES := \ runtime/gc/accounting/card_table_test.cc \ runtime/gc/accounting/mod_union_table_test.cc \ runtime/gc/accounting/space_bitmap_test.cc \ + runtime/gc/collector/immune_spaces_test.cc \ runtime/gc/heap_test.cc \ runtime/gc/reference_queue_test.cc \ runtime/gc/space/dlmalloc_space_base_test.cc \ diff --git a/runtime/Android.mk b/runtime/Android.mk index 1fdffe3e1..0b0f0942a 100644 --- a/runtime/Android.mk +++ b/runtime/Android.mk @@ -60,6 +60,7 @@ LIBART_COMMON_SRC_FILES := \ gc/collector/concurrent_copying.cc \ gc/collector/garbage_collector.cc \ gc/collector/immune_region.cc \ + gc/collector/immune_spaces.cc \ gc/collector/mark_compact.cc \ gc/collector/mark_sweep.cc \ gc/collector/partial_mark_sweep.cc \ diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index f4cf3ae26..1cd798311 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -134,10 +134,10 @@ void ConcurrentCopying::BindBitmaps() { WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); // Mark all of the spaces we never collect as immune. for (const auto& space : heap_->GetContinuousSpaces()) { - if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect - || space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { + if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || + space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { CHECK(space->IsZygoteSpace() || space->IsImageSpace()); - CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space; + immune_spaces_.AddSpace(space); const char* bitmap_name = space->IsImageSpace() ? "cc image space bitmap" : "cc zygote space bitmap"; // TODO: try avoiding using bitmaps for image/zygote to save space. @@ -164,7 +164,7 @@ void ConcurrentCopying::InitializePhase() { << reinterpret_cast(region_space_->Limit()); } CheckEmptyMarkStack(); - immune_region_.Reset(); + immune_spaces_.Reset(); bytes_moved_.StoreRelaxed(0); objects_moved_.StoreRelaxed(0); if (GetCurrentIteration()->GetGcCause() == kGcCauseExplicit || @@ -177,7 +177,11 @@ void ConcurrentCopying::InitializePhase() { BindBitmaps(); if (kVerboseMode) { LOG(INFO) << "force_evacuate_all=" << force_evacuate_all_; - LOG(INFO) << "Immune region: " << immune_region_.Begin() << "-" << immune_region_.End(); + LOG(INFO) << "Largest immune region: " << immune_spaces_.GetLargestImmuneRegion().Begin() + << "-" << immune_spaces_.GetLargestImmuneRegion().End(); + for (space::ContinuousSpace* space : immune_spaces_.GetSpaces()) { + LOG(INFO) << "Immune space: " << *space; + } LOG(INFO) << "GC end of InitializePhase"; } } @@ -300,7 +304,7 @@ class ConcurrentCopyingImmuneSpaceObjVisitor { void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) SHARED_REQUIRES(Locks::heap_bitmap_lock_) { DCHECK(obj != nullptr); - DCHECK(collector_->immune_region_.ContainsObject(obj)); + DCHECK(collector_->immune_spaces_.ContainsObject(obj)); accounting::ContinuousSpaceBitmap* cc_bitmap = collector_->cc_heap_bitmap_->GetContinuousSpaceBitmap(obj); DCHECK(cc_bitmap != nullptr) @@ -383,15 +387,13 @@ void ConcurrentCopying::MarkingPhase() { } // Immune spaces. - for (auto& space : heap_->GetContinuousSpaces()) { - if (immune_region_.ContainsSpace(space)) { - DCHECK(space->IsImageSpace() || space->IsZygoteSpace()); - accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); - ConcurrentCopyingImmuneSpaceObjVisitor visitor(this); - live_bitmap->VisitMarkedRange(reinterpret_cast(space->Begin()), - reinterpret_cast(space->Limit()), - visitor); - } + for (auto& space : immune_spaces_.GetSpaces()) { + DCHECK(space->IsImageSpace() || space->IsZygoteSpace()); + accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); + ConcurrentCopyingImmuneSpaceObjVisitor visitor(this); + live_bitmap->VisitMarkedRange(reinterpret_cast(space->Begin()), + reinterpret_cast(space->Limit()), + visitor); } Thread* self = Thread::Current(); @@ -1205,7 +1207,7 @@ void ConcurrentCopying::Sweep(bool swap_bitmaps) { for (const auto& space : GetHeap()->GetContinuousSpaces()) { if (space->IsContinuousMemMapAllocSpace()) { space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace(); - if (space == region_space_ || immune_region_.ContainsSpace(space)) { + if (space == region_space_ || immune_spaces_.ContainsSpace(space)) { continue; } TimingLogger::ScopedTiming split2( @@ -1507,8 +1509,8 @@ void ConcurrentCopying::LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset } } else { // In a non-moving space. - if (immune_region_.ContainsObject(obj)) { - LOG(INFO) << "holder is in the image or the zygote space."; + if (immune_spaces_.ContainsObject(obj)) { + LOG(INFO) << "holder is in an immune image or the zygote space."; accounting::ContinuousSpaceBitmap* cc_bitmap = cc_heap_bitmap_->GetContinuousSpaceBitmap(obj); CHECK(cc_bitmap != nullptr) @@ -1519,7 +1521,7 @@ void ConcurrentCopying::LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset LOG(INFO) << "holder is NOT marked in the bit map."; } } else { - LOG(INFO) << "holder is in a non-moving (or main) space."; + LOG(INFO) << "holder is in a non-immune, non-moving (or main) space."; accounting::ContinuousSpaceBitmap* mark_bitmap = heap_mark_bitmap_->GetContinuousSpaceBitmap(obj); accounting::LargeObjectBitmap* los_bitmap = @@ -1547,7 +1549,7 @@ void ConcurrentCopying::LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset void ConcurrentCopying::AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, mirror::Object* ref) { // In a non-moving spaces. Check that the ref is marked. - if (immune_region_.ContainsObject(ref)) { + if (immune_spaces_.ContainsObject(ref)) { accounting::ContinuousSpaceBitmap* cc_bitmap = cc_heap_bitmap_->GetContinuousSpaceBitmap(ref); CHECK(cc_bitmap != nullptr) @@ -1932,7 +1934,7 @@ mirror::Object* ConcurrentCopying::IsMarked(mirror::Object* from_ref) { } } else { // from_ref is in a non-moving space. - if (immune_region_.ContainsObject(from_ref)) { + if (immune_spaces_.ContainsObject(from_ref)) { accounting::ContinuousSpaceBitmap* cc_bitmap = cc_heap_bitmap_->GetContinuousSpaceBitmap(from_ref); DCHECK(cc_bitmap != nullptr) @@ -1986,7 +1988,7 @@ bool ConcurrentCopying::IsOnAllocStack(mirror::Object* ref) { mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref) { // ref is in a non-moving space (from_ref == to_ref). DCHECK(!region_space_->HasAddress(ref)) << ref; - if (immune_region_.ContainsObject(ref)) { + if (immune_spaces_.ContainsObject(ref)) { accounting::ContinuousSpaceBitmap* cc_bitmap = cc_heap_bitmap_->GetContinuousSpaceBitmap(ref); DCHECK(cc_bitmap != nullptr) diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h index 27726e23c..5d21c599e 100644 --- a/runtime/gc/collector/concurrent_copying.h +++ b/runtime/gc/collector/concurrent_copying.h @@ -19,7 +19,7 @@ #include "barrier.h" #include "garbage_collector.h" -#include "immune_region.h" +#include "immune_spaces.h" #include "jni.h" #include "object_callbacks.h" #include "offsets.h" @@ -200,7 +200,7 @@ class ConcurrentCopying : public GarbageCollector { bool is_marking_; // True while marking is ongoing. bool is_active_; // True while the collection is ongoing. bool is_asserting_to_space_invariant_; // True while asserting the to-space invariant. - ImmuneRegion immune_region_; + ImmuneSpaces immune_spaces_; std::unique_ptr cc_heap_bitmap_; std::vector*> cc_bitmaps_; accounting::SpaceBitmap* region_space_bitmap_; diff --git a/runtime/gc/collector/immune_region.cc b/runtime/gc/collector/immune_region.cc index 3e1c94430..8a04c178b 100644 --- a/runtime/gc/collector/immune_region.cc +++ b/runtime/gc/collector/immune_region.cc @@ -32,39 +32,6 @@ void ImmuneRegion::Reset() { SetEnd(nullptr); } -bool ImmuneRegion::AddContinuousSpace(space::ContinuousSpace* space) { - // Bind live to mark bitmap if necessary. - if (space->GetLiveBitmap() != space->GetMarkBitmap()) { - CHECK(space->IsContinuousMemMapAllocSpace()); - space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); - } - mirror::Object* space_begin = reinterpret_cast(space->Begin()); - mirror::Object* space_limit = reinterpret_cast(space->Limit()); - if (IsEmpty()) { - SetBegin(space_begin); - SetEnd(space_limit); - } else { - if (space_limit <= begin_) { // Space is before the immune region. - SetBegin(space_begin); - } else if (space_begin >= end_) { // Space is after the immune region. - SetEnd(space_limit); - } else { - return false; - } - } - return true; -} - -bool ImmuneRegion::ContainsSpace(const space::ContinuousSpace* space) const { - bool contains = - begin_ <= reinterpret_cast(space->Begin()) && - end_ >= reinterpret_cast(space->Limit()); - if (kIsDebugBuild && contains) { - // A bump pointer space shoult not be in the immune region. - DCHECK(space->GetType() != space::kSpaceTypeBumpPointerSpace); - } - return contains; -} } // namespace collector } // namespace gc diff --git a/runtime/gc/collector/immune_region.h b/runtime/gc/collector/immune_region.h index 3ead50104..b60426daf 100644 --- a/runtime/gc/collector/immune_region.h +++ b/runtime/gc/collector/immune_region.h @@ -39,35 +39,34 @@ namespace collector { class ImmuneRegion { public: ImmuneRegion(); + void Reset(); - bool AddContinuousSpace(space::ContinuousSpace* space) - REQUIRES(Locks::heap_bitmap_lock_); - bool ContainsSpace(const space::ContinuousSpace* space) const; + // Returns true if an object is inside of the immune region (assumed to be marked). - bool ContainsObject(const mirror::Object* obj) const ALWAYS_INLINE { + ALWAYS_INLINE bool ContainsObject(const mirror::Object* obj) const { // Note: Relies on integer underflow behavior. return reinterpret_cast(obj) - reinterpret_cast(begin_) < size_; } + void SetBegin(mirror::Object* begin) { begin_ = begin; UpdateSize(); } + void SetEnd(mirror::Object* end) { end_ = end; UpdateSize(); } - mirror::Object* Begin() { + mirror::Object* Begin() const { return begin_; } - mirror::Object* End() { + + mirror::Object* End() const { return end_; } private: - bool IsEmpty() const { - return size_ == 0; - } void UpdateSize() { size_ = reinterpret_cast(end_) - reinterpret_cast(begin_); } diff --git a/runtime/gc/collector/immune_spaces.cc b/runtime/gc/collector/immune_spaces.cc new file mode 100644 index 000000000..8f9a9e294 --- /dev/null +++ b/runtime/gc/collector/immune_spaces.cc @@ -0,0 +1,98 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "immune_spaces.h" + +#include "gc/space/space-inl.h" +#include "mirror/object.h" + +namespace art { +namespace gc { +namespace collector { + +void ImmuneSpaces::Reset() { + spaces_.clear(); + largest_immune_region_.Reset(); +} + +void ImmuneSpaces::CreateLargestImmuneRegion() { + uintptr_t best_begin = 0u; + uintptr_t best_end = 0u; + uintptr_t cur_begin = 0u; + uintptr_t cur_end = 0u; + // TODO: If the last space is an image space, we may include its oat file in the immune region. + // This could potentially hide heap corruption bugs if there is invalid pointers that point into + // the boot oat code + for (space::ContinuousSpace* space : GetSpaces()) { + uintptr_t space_begin = reinterpret_cast(space->Begin()); + uintptr_t space_end = reinterpret_cast(space->Limit()); + if (space->IsImageSpace()) { + // For the boot image, the boot oat file is always directly after. For app images it may not + // be if the app image was mapped at a random address. + space::ImageSpace* image_space = space->AsImageSpace(); + // Update the end to include the other non-heap sections. + space_end = RoundUp(reinterpret_cast(image_space->GetImageEnd()), kPageSize); + uintptr_t oat_begin = reinterpret_cast(image_space->GetOatFileBegin()); + uintptr_t oat_end = reinterpret_cast(image_space->GetOatFileEnd()); + if (space_end == oat_begin) { + DCHECK_GE(oat_end, oat_begin); + space_end = oat_end; + } + } + if (cur_begin == 0u) { + cur_begin = space_begin; + cur_end = space_end; + } else if (cur_end == space_begin) { + // Extend current region. + cur_end = space_end; + } else { + // Reset. + cur_begin = 0; + cur_end = 0; + } + if (cur_end - cur_begin > best_end - best_begin) { + // Improvement, update the best range. + best_begin = cur_begin; + best_end = cur_end; + } + } + largest_immune_region_.SetBegin(reinterpret_cast(best_begin)); + largest_immune_region_.SetEnd(reinterpret_cast(best_end)); +} + +void ImmuneSpaces::AddSpace(space::ContinuousSpace* space) { + DCHECK(spaces_.find(space) == spaces_.end()) << *space; + // Bind live to mark bitmap if necessary. + if (space->GetLiveBitmap() != space->GetMarkBitmap()) { + CHECK(space->IsContinuousMemMapAllocSpace()); + space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); + } + spaces_.insert(space); + CreateLargestImmuneRegion(); +} + +bool ImmuneSpaces::CompareByBegin::operator()(space::ContinuousSpace* a, space::ContinuousSpace* b) + const { + return a->Begin() < b->Begin(); +} + +bool ImmuneSpaces::ContainsSpace(space::ContinuousSpace* space) const { + return spaces_.find(space) != spaces_.end(); +} + +} // namespace collector +} // namespace gc +} // namespace art diff --git a/runtime/gc/collector/immune_spaces.h b/runtime/gc/collector/immune_spaces.h new file mode 100644 index 000000000..72cb60d46 --- /dev/null +++ b/runtime/gc/collector/immune_spaces.h @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_RUNTIME_GC_COLLECTOR_IMMUNE_SPACES_H_ +#define ART_RUNTIME_GC_COLLECTOR_IMMUNE_SPACES_H_ + +#include "base/macros.h" +#include "base/mutex.h" +#include "gc/space/space.h" +#include "immune_region.h" + +#include + +namespace art { +namespace gc { +namespace space { +class ContinuousSpace; +} // namespace space + +namespace collector { + +// ImmuneSpaces is a set of spaces which are not going to have any objects become marked during the +// GC. +class ImmuneSpaces { + class CompareByBegin { + public: + bool operator()(space::ContinuousSpace* a, space::ContinuousSpace* b) const; + }; + + public: + ImmuneSpaces() {} + void Reset(); + + // Add a continuous space to the immune spaces set. + void AddSpace(space::ContinuousSpace* space) REQUIRES(Locks::heap_bitmap_lock_); + + // Returns true if an object is inside of the immune region (assumed to be marked). Only returns + // true for the largest immune region. The object can still be inside of an immune space. + ALWAYS_INLINE bool IsInImmuneRegion(const mirror::Object* obj) const { + return largest_immune_region_.ContainsObject(obj); + } + + // Return true if the spaces is contained. + bool ContainsSpace(space::ContinuousSpace* space) const; + + // Return the set of spaces in the immune region. + const std::set& GetSpaces() { + return spaces_; + } + + // Return the associated largest immune region. + const ImmuneRegion& GetLargestImmuneRegion() const { + return largest_immune_region_; + } + + // Return true if the object is contained by any of the immune space.s + ALWAYS_INLINE bool ContainsObject(const mirror::Object* obj) const { + if (largest_immune_region_.ContainsObject(obj)) { + return true; + } + for (space::ContinuousSpace* space : spaces_) { + if (space->HasAddress(obj)) { + return true; + } + } + return false; + } + + private: + // Setup the immune region to the largest continuous set of immune spaces. The immune region is + // just the for the fast path lookup. + void CreateLargestImmuneRegion(); + + std::set spaces_; + ImmuneRegion largest_immune_region_; +}; + +} // namespace collector +} // namespace gc +} // namespace art + +#endif // ART_RUNTIME_GC_COLLECTOR_IMMUNE_SPACES_H_ diff --git a/runtime/gc/collector/immune_spaces_test.cc b/runtime/gc/collector/immune_spaces_test.cc new file mode 100644 index 000000000..f741117bc --- /dev/null +++ b/runtime/gc/collector/immune_spaces_test.cc @@ -0,0 +1,150 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common_runtime_test.h" +#include "gc/collector/immune_spaces.h" +#include "gc/space/image_space.h" +#include "gc/space/space-inl.h" +#include "oat_file.h" +#include "thread-inl.h" + +namespace art { +namespace mirror { +class Object; +} // namespace mirror +namespace gc { +namespace collector { + +class ImmuneSpacesTest : public CommonRuntimeTest {}; + +class DummySpace : public space::ContinuousSpace { + public: + DummySpace(uint8_t* begin, uint8_t* end) + : ContinuousSpace("DummySpace", + space::kGcRetentionPolicyNeverCollect, + begin, + end, + /*limit*/end) {} + + space::SpaceType GetType() const OVERRIDE { + return space::kSpaceTypeMallocSpace; + } + + bool CanMoveObjects() const OVERRIDE { + return false; + } + + accounting::ContinuousSpaceBitmap* GetLiveBitmap() const OVERRIDE { + return nullptr; + } + + accounting::ContinuousSpaceBitmap* GetMarkBitmap() const OVERRIDE { + return nullptr; + } +}; + +TEST_F(ImmuneSpacesTest, AppendBasic) { + ImmuneSpaces spaces; + uint8_t* const base = reinterpret_cast(0x1000); + DummySpace a(base, base + 45 * KB); + DummySpace b(a.Limit(), a.Limit() + 813 * KB); + { + WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_); + spaces.AddSpace(&a); + spaces.AddSpace(&b); + } + EXPECT_TRUE(spaces.ContainsSpace(&a)); + EXPECT_TRUE(spaces.ContainsSpace(&b)); + EXPECT_EQ(reinterpret_cast(spaces.GetLargestImmuneRegion().Begin()), a.Begin()); + EXPECT_EQ(reinterpret_cast(spaces.GetLargestImmuneRegion().End()), b.Limit()); +} + +class DummyImageSpace : public space::ImageSpace { + public: + DummyImageSpace(MemMap* map, accounting::ContinuousSpaceBitmap* live_bitmap) + : ImageSpace("DummyImageSpace", + /*image_location*/"", + map, + live_bitmap, + map->End()) {} + + // OatSize is how large the oat file is after the image. + static DummyImageSpace* Create(size_t size, size_t oat_size) { + std::string error_str; + std::unique_ptr map(MemMap::MapAnonymous("DummyImageSpace", + nullptr, + size, + PROT_READ | PROT_WRITE, + /*low_4gb*/true, + /*reuse*/false, + &error_str)); + if (map == nullptr) { + LOG(ERROR) << error_str; + return nullptr; + } + std::unique_ptr live_bitmap( + accounting::ContinuousSpaceBitmap::Create("bitmap", map->Begin(), map->Size())); + if (live_bitmap == nullptr) { + return nullptr; + } + // Create image header. + ImageSection sections[ImageHeader::kSectionCount]; + new (map->Begin()) ImageHeader( + /*image_begin*/PointerToLowMemUInt32(map->Begin()), + /*image_size*/map->Size(), + sections, + /*image_roots*/PointerToLowMemUInt32(map->Begin()) + 1, + /*oat_checksum*/0u, + /*oat_file_begin*/PointerToLowMemUInt32(map->End()), + /*oat_data_begin*/PointerToLowMemUInt32(map->End()), + /*oat_data_end*/PointerToLowMemUInt32(map->End() + oat_size), + /*oat_file_end*/PointerToLowMemUInt32(map->End() + oat_size), + /*pointer_size*/sizeof(void*), + /*compile_pic*/false); + return new DummyImageSpace(map.release(), live_bitmap.release()); + } +}; + +TEST_F(ImmuneSpacesTest, AppendAfterImage) { + ImmuneSpaces spaces; + constexpr size_t image_size = 123 * kPageSize; + constexpr size_t image_oat_size = 321 * kPageSize; + std::unique_ptr image_space(DummyImageSpace::Create(image_size, image_oat_size)); + ASSERT_TRUE(image_space != nullptr); + const ImageHeader& image_header = image_space->GetImageHeader(); + EXPECT_EQ(image_header.GetImageSize(), image_size); + EXPECT_EQ(static_cast(image_header.GetOatFileEnd() - image_header.GetOatFileBegin()), + image_oat_size); + DummySpace space(image_header.GetOatFileEnd(), image_header.GetOatFileEnd() + 813 * kPageSize); + EXPECT_NE(image_space->Limit(), space.Begin()); + { + WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_); + spaces.AddSpace(image_space.get()); + spaces.AddSpace(&space); + } + EXPECT_TRUE(spaces.ContainsSpace(image_space.get())); + EXPECT_TRUE(spaces.ContainsSpace(&space)); + // CreateLargestImmuneRegion should have coalesced the two spaces since the oat code after the + // image prevents gaps. + // Check that we have a continuous region. + EXPECT_EQ(reinterpret_cast(spaces.GetLargestImmuneRegion().Begin()), + image_space->Begin()); + EXPECT_EQ(reinterpret_cast(spaces.GetLargestImmuneRegion().End()), space.Limit()); +} + +} // namespace collector +} // namespace gc +} // namespace art diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc index f561764ce..ce6467a6c 100644 --- a/runtime/gc/collector/mark_compact.cc +++ b/runtime/gc/collector/mark_compact.cc @@ -45,7 +45,7 @@ void MarkCompact::BindBitmaps() { for (const auto& space : GetHeap()->GetContinuousSpaces()) { if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { - CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space; + immune_spaces_.AddSpace(space); } } } @@ -115,7 +115,7 @@ void MarkCompact::InitializePhase() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); mark_stack_ = heap_->GetMarkStack(); DCHECK(mark_stack_ != nullptr); - immune_region_.Reset(); + immune_spaces_.Reset(); CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_; // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap. ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_); @@ -148,7 +148,7 @@ inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) { // Verify all the objects have the correct forward pointer installed. obj->AssertReadBarrierPointer(); } - if (!immune_region_.ContainsObject(obj)) { + if (!immune_spaces_.IsInImmuneRegion(obj)) { if (objects_before_forwarding_->HasAddress(obj)) { if (!objects_before_forwarding_->Set(obj)) { MarkStackPush(obj); // This object was not previously marked. @@ -218,7 +218,7 @@ void MarkCompact::UpdateAndMarkModUnion() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); for (auto& space : heap_->GetContinuousSpaces()) { // If the space is immune then we need to mark the references to other spaces. - if (immune_region_.ContainsSpace(space)) { + if (immune_spaces_.ContainsSpace(space)) { accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space); if (table != nullptr) { // TODO: Improve naming. @@ -475,7 +475,7 @@ inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) } mirror::Object* MarkCompact::IsMarked(mirror::Object* object) { - if (immune_region_.ContainsObject(object)) { + if (immune_spaces_.IsInImmuneRegion(object)) { return object; } if (updating_references_) { @@ -498,7 +498,7 @@ void MarkCompact::SweepSystemWeaks() { } bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const { - return space != space_ && !immune_region_.ContainsSpace(space); + return space != space_ && !immune_spaces_.ContainsSpace(space); } class MoveObjectVisitor { diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h index 8d9193905..8a1209416 100644 --- a/runtime/gc/collector/mark_compact.h +++ b/runtime/gc/collector/mark_compact.h @@ -26,7 +26,7 @@ #include "garbage_collector.h" #include "gc_root.h" #include "gc/accounting/heap_bitmap.h" -#include "immune_region.h" +#include "immune_spaces.h" #include "lock_word.h" #include "object_callbacks.h" #include "offsets.h" @@ -194,8 +194,8 @@ class MarkCompact : public GarbageCollector { accounting::ObjectStack* mark_stack_; - // Immune region, every object inside the immune region is assumed to be marked. - ImmuneRegion immune_region_; + // Every object inside the immune spaces is assumed to be marked. + ImmuneSpaces immune_spaces_; // Bump pointer space which we are collecting. space::BumpPointerSpace* space_; diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index db516a0a8..5427f8856 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -86,7 +86,7 @@ void MarkSweep::BindBitmaps() { // Mark all of the spaces we never collect as immune. for (const auto& space : GetHeap()->GetContinuousSpaces()) { if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect) { - CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space; + immune_spaces_.AddSpace(space); } } } @@ -115,7 +115,7 @@ void MarkSweep::InitializePhase() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); mark_stack_ = heap_->GetMarkStack(); DCHECK(mark_stack_ != nullptr); - immune_region_.Reset(); + immune_spaces_.Reset(); no_reference_class_count_.StoreRelaxed(0); normal_count_.StoreRelaxed(0); class_count_.StoreRelaxed(0); @@ -268,16 +268,41 @@ void MarkSweep::MarkingPhase() { PreCleanCards(); } +class ScanObjectVisitor { + public: + explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE + : mark_sweep_(mark_sweep) {} + + void operator()(mirror::Object* obj) const + ALWAYS_INLINE + REQUIRES(Locks::heap_bitmap_lock_) + SHARED_REQUIRES(Locks::mutator_lock_) { + if (kCheckLocks) { + Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); + Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); + } + mark_sweep_->ScanObject(obj); + } + + private: + MarkSweep* const mark_sweep_; +}; + void MarkSweep::UpdateAndMarkModUnion() { - for (const auto& space : heap_->GetContinuousSpaces()) { - if (immune_region_.ContainsSpace(space)) { - const char* name = space->IsZygoteSpace() - ? "UpdateAndMarkZygoteModUnionTable" - : "UpdateAndMarkImageModUnionTable"; - TimingLogger::ScopedTiming t(name, GetTimings()); - accounting::ModUnionTable* mod_union_table = heap_->FindModUnionTableFromSpace(space); - CHECK(mod_union_table != nullptr); + for (const auto& space : immune_spaces_.GetSpaces()) { + const char* name = space->IsZygoteSpace() + ? "UpdateAndMarkZygoteModUnionTable" + : "UpdateAndMarkImageModUnionTable"; + DCHECK(space->IsZygoteSpace() || space->IsImageSpace()) << *space; + TimingLogger::ScopedTiming t(name, GetTimings()); + accounting::ModUnionTable* mod_union_table = heap_->FindModUnionTableFromSpace(space); + if (mod_union_table != nullptr) { mod_union_table->UpdateAndMarkReferences(this); + } else { + // No mod-union table, scan all the live bits. This can only occur for app images. + space->GetLiveBitmap()->VisitMarkedRange(reinterpret_cast(space->Begin()), + reinterpret_cast(space->End()), + ScanObjectVisitor(this)); } } } @@ -460,7 +485,7 @@ inline void MarkSweep::MarkObjectNonNull(mirror::Object* obj, // Verify all the objects have the correct pointer installed. obj->AssertReadBarrierPointer(); } - if (immune_region_.ContainsObject(obj)) { + if (immune_spaces_.IsInImmuneRegion(obj)) { if (kCountMarkedObjects) { ++mark_immune_count_; } @@ -501,7 +526,7 @@ inline bool MarkSweep::MarkObjectParallel(mirror::Object* obj) { // Verify all the objects have the correct pointer installed. obj->AssertReadBarrierPointer(); } - if (immune_region_.ContainsObject(obj)) { + if (immune_spaces_.IsInImmuneRegion(obj)) { DCHECK(IsMarked(obj) != nullptr); return false; } @@ -606,26 +631,6 @@ void MarkSweep::MarkConcurrentRoots(VisitRootFlags flags) { this, static_cast(flags | kVisitRootFlagNonMoving)); } -class ScanObjectVisitor { - public: - explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE - : mark_sweep_(mark_sweep) {} - - void operator()(mirror::Object* obj) const - ALWAYS_INLINE - REQUIRES(Locks::heap_bitmap_lock_) - SHARED_REQUIRES(Locks::mutator_lock_) { - if (kCheckLocks) { - Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); - Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); - } - mark_sweep_->ScanObject(obj); - } - - private: - MarkSweep* const mark_sweep_; -}; - class DelayReferenceReferentVisitor { public: explicit DelayReferenceReferentVisitor(MarkSweep* collector) : collector_(collector) {} @@ -1193,7 +1198,8 @@ void MarkSweep::SweepArray(accounting::ObjectStack* allocations, bool swap_bitma std::vector sweep_spaces; space::ContinuousSpace* non_moving_space = nullptr; for (space::ContinuousSpace* space : heap_->GetContinuousSpaces()) { - if (space->IsAllocSpace() && !immune_region_.ContainsSpace(space) && + if (space->IsAllocSpace() && + !immune_spaces_.ContainsSpace(space) && space->GetLiveBitmap() != nullptr) { if (space == heap_->GetNonMovingSpace()) { non_moving_space = space; @@ -1422,7 +1428,7 @@ void MarkSweep::ProcessMarkStack(bool paused) { } inline mirror::Object* MarkSweep::IsMarked(mirror::Object* object) { - if (immune_region_.ContainsObject(object)) { + if (immune_spaces_.IsInImmuneRegion(object)) { return object; } if (current_space_bitmap_->HasAddress(object)) { diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h index 8f7df78d5..245f96bdb 100644 --- a/runtime/gc/collector/mark_sweep.h +++ b/runtime/gc/collector/mark_sweep.h @@ -26,7 +26,7 @@ #include "garbage_collector.h" #include "gc_root.h" #include "gc/accounting/heap_bitmap.h" -#include "immune_region.h" +#include "immune_spaces.h" #include "object_callbacks.h" #include "offsets.h" @@ -314,8 +314,9 @@ class MarkSweep : public GarbageCollector { accounting::ObjectStack* mark_stack_; - // Immune region, every object inside the immune range is assumed to be marked. - ImmuneRegion immune_region_; + // Every object inside the immune spaces is assumed to be marked. Immune spaces that aren't in the + // immune region are handled by the normal marking logic. + ImmuneSpaces immune_spaces_; // Parallel finger. AtomicInteger atomic_finger_; diff --git a/runtime/gc/collector/partial_mark_sweep.cc b/runtime/gc/collector/partial_mark_sweep.cc index 15f782aea..984779484 100644 --- a/runtime/gc/collector/partial_mark_sweep.cc +++ b/runtime/gc/collector/partial_mark_sweep.cc @@ -39,7 +39,7 @@ void PartialMarkSweep::BindBitmaps() { for (const auto& space : GetHeap()->GetContinuousSpaces()) { if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { CHECK(space->IsZygoteSpace()); - CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space; + immune_spaces_.AddSpace(space); } } } diff --git a/runtime/gc/collector/semi_space-inl.h b/runtime/gc/collector/semi_space-inl.h index 06d20f583..12cf3dbf9 100644 --- a/runtime/gc/collector/semi_space-inl.h +++ b/runtime/gc/collector/semi_space-inl.h @@ -74,7 +74,7 @@ inline void SemiSpace::MarkObject( MarkStackPush(forward_address); } obj_ptr->Assign(forward_address); - } else if (!collect_from_space_only_ && !immune_region_.ContainsObject(obj)) { + } else if (!collect_from_space_only_ && !immune_spaces_.IsInImmuneRegion(obj)) { BitmapSetSlowPathVisitor visitor(this); if (!mark_bitmap_->Set(obj, visitor)) { // This object was not previously marked. diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index 7f57f30b2..e9497a222 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -66,8 +66,9 @@ void SemiSpace::BindBitmaps() { for (const auto& space : GetHeap()->GetContinuousSpaces()) { if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { - CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space; + immune_spaces_.AddSpace(space); } else if (space->GetLiveBitmap() != nullptr) { + // TODO: We can probably also add this space to the immune region. if (space == to_space_ || collect_from_space_only_) { if (collect_from_space_only_) { // Bind the bitmaps of the main free list space and the non-moving space we are doing a @@ -144,7 +145,7 @@ void SemiSpace::InitializePhase() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); mark_stack_ = heap_->GetMarkStack(); DCHECK(mark_stack_ != nullptr); - immune_region_.Reset(); + immune_spaces_.Reset(); is_large_object_space_immune_ = false; saved_bytes_ = 0; bytes_moved_ = 0; @@ -376,7 +377,13 @@ void SemiSpace::MarkReachableObjects() { << "generational_=" << generational_ << " " << "collect_from_space_only_=" << collect_from_space_only_; accounting::RememberedSet* rem_set = GetHeap()->FindRememberedSetFromSpace(space); - CHECK_EQ(rem_set != nullptr, kUseRememberedSet); + if (kUseRememberedSet) { + // App images currently do not have remembered sets. + DCHECK((space->IsImageSpace() && space != heap_->GetBootImageSpace()) || + rem_set != nullptr); + } else { + DCHECK(rem_set == nullptr); + } if (rem_set != nullptr) { TimingLogger::ScopedTiming t2("UpdateAndMarkRememberedSet", GetTimings()); rem_set->UpdateAndMarkReferences(from_space_, this); @@ -767,7 +774,8 @@ mirror::Object* SemiSpace::IsMarked(mirror::Object* obj) { if (from_space_->HasAddress(obj)) { // Returns either the forwarding address or null. return GetForwardingAddressInFromSpace(obj); - } else if (collect_from_space_only_ || immune_region_.ContainsObject(obj) || + } else if (collect_from_space_only_ || + immune_spaces_.IsInImmuneRegion(obj) || to_space_->HasAddress(obj)) { return obj; // Already forwarded, must be marked. } diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h index b9246ca2f..a90590411 100644 --- a/runtime/gc/collector/semi_space.h +++ b/runtime/gc/collector/semi_space.h @@ -25,7 +25,7 @@ #include "garbage_collector.h" #include "gc_root.h" #include "gc/accounting/heap_bitmap.h" -#include "immune_region.h" +#include "immune_spaces.h" #include "mirror/object_reference.h" #include "object_callbacks.h" #include "offsets.h" @@ -201,8 +201,8 @@ class SemiSpace : public GarbageCollector { // object. accounting::ObjectStack* mark_stack_; - // Immune region, every object inside the immune region is assumed to be marked. - ImmuneRegion immune_region_; + // Every object inside the immune spaces is assumed to be marked. + ImmuneSpaces immune_spaces_; // If true, the large object space is immune. bool is_large_object_space_immune_; diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h index 99207426a..babd672ce 100644 --- a/runtime/gc/space/image_space.h +++ b/runtime/gc/space/image_space.h @@ -119,7 +119,22 @@ class ImageSpace : public MemMapSpace { bool* has_data, bool *is_global_cache); - private: + // Return the end of the image which includes non-heap objects such as ArtMethods and ArtFields. + uint8_t* GetImageEnd() const { + return Begin() + GetImageHeader().GetImageSize(); + } + + // Return the start of the associated oat file. + uint8_t* GetOatFileBegin() const { + return GetImageHeader().GetOatFileBegin(); + } + + // Return the end of the associated oat file. + uint8_t* GetOatFileEnd() const { + return GetImageHeader().GetOatFileEnd(); + } + + protected: // Tries to initialize an ImageSpace from the given image path, // returning null on error. // @@ -157,6 +172,7 @@ class ImageSpace : public MemMapSpace { const std::string image_location_; + private: DISALLOW_COPY_AND_ASSIGN(ImageSpace); }; diff --git a/runtime/image.h b/runtime/image.h index 20e4159b0..555cf5ddb 100644 --- a/runtime/image.h +++ b/runtime/image.h @@ -84,7 +84,7 @@ class PACKED(4) ImageHeader { image_roots_(0U), pointer_size_(0U), compile_pic_(0) {} ImageHeader(uint32_t image_begin, - uint32_t image_size_, + uint32_t image_size, ImageSection* sections, uint32_t image_roots, uint32_t oat_checksum, @@ -93,7 +93,7 @@ class PACKED(4) ImageHeader { uint32_t oat_data_end, uint32_t oat_file_end, uint32_t pointer_size, - bool compile_pic_); + bool compile_pic); bool IsValid() const; const char* GetMagic() const; -- 2.11.0