From aa5168291c46f9b418d989bccf2d8e09338a83e6 Mon Sep 17 00:00:00 2001 From: Mathieu Chartier Date: Fri, 2 Oct 2015 15:53:37 -0700 Subject: [PATCH] Add exclusion between instrumentation and GC Instrumentation calls VisitClasses while exclusive holding the mutator lock. This can cause deadlocks since VisitClasses needs to decode JNI weak globals. If the system weak slow path flag is set, then we wait holding locks while exclusive holding the mutator lock. This causes a deadlock since the GC cannot acquire the mutator lock to sweep system weaks. This fixes a deadlock seen in one of the tracing tests. Change-Id: I580152118e068a70f309dcc19df4144afec835dd --- runtime/Android.mk | 1 + runtime/debugger.cc | 14 ++++++++-- runtime/debugger.h | 2 +- runtime/gc/collector_type.h | 2 ++ runtime/gc/gc_cause.cc | 1 + runtime/gc/gc_cause.h | 2 ++ runtime/gc/heap.cc | 12 +++++--- runtime/gc/heap.h | 3 ++ runtime/gc/scoped_gc_critical_section.cc | 41 ++++++++++++++++++++++++++++ runtime/gc/scoped_gc_critical_section.h | 47 ++++++++++++++++++++++++++++++++ runtime/instrumentation.h | 31 +++++++++++++++------ runtime/instrumentation_test.cc | 22 +++++++++++++++ runtime/trace.cc | 20 +++++++++++--- 13 files changed, 179 insertions(+), 19 deletions(-) create mode 100644 runtime/gc/scoped_gc_critical_section.cc create mode 100644 runtime/gc/scoped_gc_critical_section.h diff --git a/runtime/Android.mk b/runtime/Android.mk index 059c4cdd5..8d81f2a7f 100644 --- a/runtime/Android.mk +++ b/runtime/Android.mk @@ -67,6 +67,7 @@ LIBART_COMMON_SRC_FILES := \ gc/heap.cc \ gc/reference_processor.cc \ gc/reference_queue.cc \ + gc/scoped_gc_critical_section.cc \ gc/space/bump_pointer_space.cc \ gc/space/dlmalloc_space.cc \ gc/space/image_space.cc \ diff --git a/runtime/debugger.cc b/runtime/debugger.cc index d24b4fb54..b19381d87 100644 --- a/runtime/debugger.cc +++ b/runtime/debugger.cc @@ -30,6 +30,7 @@ #include "dex_instruction.h" #include "gc/accounting/card_table-inl.h" #include "gc/allocation_record.h" +#include "gc/scoped_gc_critical_section.h" #include "gc/space/large_object_space.h" #include "gc/space/space-inl.h" #include "handle_scope.h" @@ -559,14 +560,15 @@ void Dbg::GoActive() { return; } + Thread* const self = Thread::Current(); { // TODO: dalvik only warned if there were breakpoints left over. clear in Dbg::Disconnected? - ReaderMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_); + ReaderMutexLock mu(self, *Locks::breakpoint_lock_); CHECK_EQ(gBreakpoints.size(), 0U); } { - MutexLock mu(Thread::Current(), *Locks::deoptimization_lock_); + MutexLock mu(self, *Locks::deoptimization_lock_); CHECK_EQ(deoptimization_requests_.size(), 0U); CHECK_EQ(full_deoptimization_event_count_, 0U); CHECK_EQ(dex_pc_change_event_ref_count_, 0U); @@ -598,6 +600,10 @@ void Dbg::Disconnected() { Runtime* runtime = Runtime::Current(); Thread* self = Thread::Current(); { + // Required for DisableDeoptimization. + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa(__FUNCTION__); ThreadState old_state = self->SetStateUnsafe(kRunnable); // Debugger may not be active at this point. @@ -3162,6 +3168,10 @@ void Dbg::ManageDeoptimization() { } CHECK_EQ(self->GetState(), kRunnable); ScopedThreadSuspension sts(self, kWaitingForDeoptimization); + // Required for ProcessDeoptimizationRequest. + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); // We need to suspend mutator threads first. ScopedSuspendAll ssa(__FUNCTION__); const ThreadState old_state = self->SetStateUnsafe(kRunnable); diff --git a/runtime/debugger.h b/runtime/debugger.h index b4d42de2b..b3617e4bb 100644 --- a/runtime/debugger.h +++ b/runtime/debugger.h @@ -731,7 +731,7 @@ class Dbg { SHARED_REQUIRES(Locks::mutator_lock_); static void ProcessDeoptimizationRequest(const DeoptimizationRequest& request) - REQUIRES(Locks::mutator_lock_); + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_); static void RequestDeoptimizationLocked(const DeoptimizationRequest& req) REQUIRES(Locks::deoptimization_lock_) SHARED_REQUIRES(Locks::mutator_lock_); diff --git a/runtime/gc/collector_type.h b/runtime/gc/collector_type.h index 95ba380a0..416510d73 100644 --- a/runtime/gc/collector_type.h +++ b/runtime/gc/collector_type.h @@ -40,6 +40,8 @@ enum CollectorType { kCollectorTypeHeapTrim, // A (mostly) concurrent copying collector. kCollectorTypeCC, + // Instrumentation critical section fake collector. + kCollectorTypeInstrumentation, // A homogeneous space compaction collector used in background transition // when both foreground and background collector are CMS. kCollectorTypeHomogeneousSpaceCompact, diff --git a/runtime/gc/gc_cause.cc b/runtime/gc/gc_cause.cc index 6be683df4..84243dfe1 100644 --- a/runtime/gc/gc_cause.cc +++ b/runtime/gc/gc_cause.cc @@ -33,6 +33,7 @@ const char* PrettyCause(GcCause cause) { case kGcCauseDisableMovingGc: return "DisableMovingGc"; case kGcCauseHomogeneousSpaceCompact: return "HomogeneousSpaceCompact"; case kGcCauseTrim: return "HeapTrim"; + case kGcCauseInstrumentation: return "Instrumentation"; default: LOG(FATAL) << "Unreachable"; UNREACHABLE(); diff --git a/runtime/gc/gc_cause.h b/runtime/gc/gc_cause.h index 0536f32df..34c776622 100644 --- a/runtime/gc/gc_cause.h +++ b/runtime/gc/gc_cause.h @@ -39,6 +39,8 @@ enum GcCause { kGcCauseDisableMovingGc, // Not a real GC cause, used when we trim the heap. kGcCauseTrim, + // Not a real GC cause, used to implement exclusion between GC and instrumentation. + kGcCauseInstrumentation, // GC triggered for background transition when both foreground and background collector are CMS. kGcCauseHomogeneousSpaceCompact, }; diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index 7d664faa4..657fcb5f0 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -1312,6 +1312,13 @@ void Heap::TrimIndirectReferenceTables(Thread* self) { ATRACE_END(); } +void Heap::StartGC(Thread* self, GcCause cause, CollectorType collector_type) { + MutexLock mu(self, *gc_complete_lock_); + // Ensure there is only one GC at a time. + WaitForGcToCompleteLocked(cause, self); + collector_type_running_ = collector_type; +} + void Heap::TrimSpaces(Thread* self) { { // Need to do this before acquiring the locks since we don't want to get suspended while @@ -1319,10 +1326,7 @@ void Heap::TrimSpaces(Thread* self) { ScopedThreadStateChange tsc(self, kWaitingForGcToComplete); // Pretend we are doing a GC to prevent background compaction from deleting the space we are // trimming. - MutexLock mu(self, *gc_complete_lock_); - // Ensure there is only one GC at a time. - WaitForGcToCompleteLocked(kGcCauseTrim, self); - collector_type_running_ = kCollectorTypeHeapTrim; + StartGC(self, kGcCauseTrim, kCollectorTypeHeapTrim); } ATRACE_BEGIN(__FUNCTION__); const uint64_t start_ns = NanoTime(); diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h index d0d0be382..cc48172f7 100644 --- a/runtime/gc/heap.h +++ b/runtime/gc/heap.h @@ -775,6 +775,8 @@ class Heap { REQUIRES(Locks::mutator_lock_); void LogGC(GcCause gc_cause, collector::GarbageCollector* collector); + void StartGC(Thread* self, GcCause cause, CollectorType collector_type) + REQUIRES(!*gc_complete_lock_); void FinishGC(Thread* self, collector::GcType gc_type) REQUIRES(!*gc_complete_lock_); // Create a mem map with a preferred base address. @@ -1325,6 +1327,7 @@ class Heap { friend class collector::MarkSweep; friend class collector::SemiSpace; friend class ReferenceQueue; + friend class ScopedGCCriticalSection; friend class VerifyReferenceCardVisitor; friend class VerifyReferenceVisitor; friend class VerifyObjectVisitor; diff --git a/runtime/gc/scoped_gc_critical_section.cc b/runtime/gc/scoped_gc_critical_section.cc new file mode 100644 index 000000000..e7786a154 --- /dev/null +++ b/runtime/gc/scoped_gc_critical_section.cc @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "scoped_gc_critical_section.h" + +#include "gc/collector_type.h" +#include "gc/heap.h" +#include "runtime.h" +#include "thread-inl.h" + +namespace art { +namespace gc { + +ScopedGCCriticalSection::ScopedGCCriticalSection(Thread* self, + GcCause cause, + CollectorType collector_type) + : self_(self) { + Runtime::Current()->GetHeap()->StartGC(self, cause, collector_type); + old_cause_ = self->StartAssertNoThreadSuspension("ScopedGCCriticalSection"); +} +ScopedGCCriticalSection::~ScopedGCCriticalSection() { + self_->EndAssertNoThreadSuspension(old_cause_); + Runtime::Current()->GetHeap()->FinishGC(self_, collector::kGcTypeNone); +} + +} // namespace gc +} // namespace art + diff --git a/runtime/gc/scoped_gc_critical_section.h b/runtime/gc/scoped_gc_critical_section.h new file mode 100644 index 000000000..ec93bca80 --- /dev/null +++ b/runtime/gc/scoped_gc_critical_section.h @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_ +#define ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_ + +#include "base/mutex.h" +#include "collector_type.h" +#include "gc_cause.h" + +namespace art { + +class Thread; + +namespace gc { + +// Wait until the GC is finished and then prevent GC from starting until the destructor. Used +// to prevent deadlocks in places where we call ClassLinker::VisitClass with all th threads +// suspended. +class ScopedGCCriticalSection { + public: + ScopedGCCriticalSection(Thread* self, GcCause cause, CollectorType collector_type) + ACQUIRE(Roles::uninterruptible_); + ~ScopedGCCriticalSection() RELEASE(Roles::uninterruptible_); + + private: + Thread* const self_; + const char* old_cause_; +}; + +} // namespace gc +} // namespace art + +#endif // ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_ diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h index 80460562e..612ca14cf 100644 --- a/runtime/instrumentation.h +++ b/runtime/instrumentation.h @@ -146,9 +146,13 @@ class Instrumentation { // Deoptimization. void EnableDeoptimization() - REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_); + REQUIRES(Locks::mutator_lock_) + REQUIRES(!deoptimized_methods_lock_); + // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs. void DisableDeoptimization(const char* key) - REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_); + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_) + REQUIRES(!deoptimized_methods_lock_); + bool AreAllMethodsDeoptimized() const { return interpreter_stubs_installed_; } @@ -156,12 +160,17 @@ class Instrumentation { // Executes everything with interpreter. void DeoptimizeEverything(const char* key) - REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_, + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_) + REQUIRES(!Locks::thread_list_lock_, + !Locks::classlinker_classes_lock_, !deoptimized_methods_lock_); - // Executes everything with compiled code (or interpreter if there is no code). + // Executes everything with compiled code (or interpreter if there is no code). May visit class + // linker classes through ConfigureStubs. void UndeoptimizeEverything(const char* key) - REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_, + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_) + REQUIRES(!Locks::thread_list_lock_, + !Locks::classlinker_classes_lock_, !deoptimized_methods_lock_); // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static @@ -183,12 +192,16 @@ class Instrumentation { // Enable method tracing by installing instrumentation entry/exit stubs or interpreter. void EnableMethodTracing(const char* key, bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners) - REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_, + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_) + REQUIRES(!Locks::thread_list_lock_, + !Locks::classlinker_classes_lock_, !deoptimized_methods_lock_); // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter. void DisableMethodTracing(const char* key) - REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_, + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_) + REQUIRES(!Locks::thread_list_lock_, + !Locks::classlinker_classes_lock_, !deoptimized_methods_lock_); InterpreterHandlerTable GetInterpreterHandlerTable() const @@ -393,7 +406,9 @@ class Instrumentation { // instrumentation level it needs. Therefore the current instrumentation level // becomes the highest instrumentation level required by a client. void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level) - REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_, !Locks::thread_list_lock_, + REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_) + REQUIRES(!deoptimized_methods_lock_, + !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_); void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) { diff --git a/runtime/instrumentation_test.cc b/runtime/instrumentation_test.cc index d98d24691..e4688a21d 100644 --- a/runtime/instrumentation_test.cc +++ b/runtime/instrumentation_test.cc @@ -20,6 +20,7 @@ #include "common_throws.h" #include "class_linker-inl.h" #include "dex_file.h" +#include "gc/scoped_gc_critical_section.h" #include "handle_scope-inl.h" #include "jvalue.h" #include "runtime.h" @@ -151,6 +152,9 @@ class InstrumentationTest : public CommonRuntimeTest { ScopedObjectAccess soa(Thread::Current()); instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation(); ScopedThreadSuspension sts(soa.Self(), kSuspended); + gc::ScopedGCCriticalSection gcs(soa.Self(), + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("Instrumentation::ConfigureStubs"); instr->ConfigureStubs(key, level); } @@ -203,6 +207,9 @@ class InstrumentationTest : public CommonRuntimeTest { Runtime* runtime = Runtime::Current(); instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); ScopedThreadSuspension sts(self, kSuspended); + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("Single method deoptimization"); if (enable_deoptimization) { instrumentation->EnableDeoptimization(); @@ -216,6 +223,9 @@ class InstrumentationTest : public CommonRuntimeTest { Runtime* runtime = Runtime::Current(); instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); ScopedThreadSuspension sts(self, kSuspended); + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("Single method undeoptimization"); instrumentation->Undeoptimize(method); if (disable_deoptimization) { @@ -228,6 +238,9 @@ class InstrumentationTest : public CommonRuntimeTest { Runtime* runtime = Runtime::Current(); instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); ScopedThreadSuspension sts(self, kSuspended); + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("Full deoptimization"); if (enable_deoptimization) { instrumentation->EnableDeoptimization(); @@ -240,6 +253,9 @@ class InstrumentationTest : public CommonRuntimeTest { Runtime* runtime = Runtime::Current(); instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); ScopedThreadSuspension sts(self, kSuspended); + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("Full undeoptimization"); instrumentation->UndeoptimizeEverything(key); if (disable_deoptimization) { @@ -252,6 +268,9 @@ class InstrumentationTest : public CommonRuntimeTest { Runtime* runtime = Runtime::Current(); instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); ScopedThreadSuspension sts(self, kSuspended); + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("EnableMethodTracing"); instrumentation->EnableMethodTracing(key, needs_interpreter); } @@ -261,6 +280,9 @@ class InstrumentationTest : public CommonRuntimeTest { Runtime* runtime = Runtime::Current(); instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation(); ScopedThreadSuspension sts(self, kSuspended); + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa("EnableMethodTracing"); instrumentation->DisableMethodTracing(key); } diff --git a/runtime/trace.cc b/runtime/trace.cc index e2743ceb1..eb0ea6843 100644 --- a/runtime/trace.cc +++ b/runtime/trace.cc @@ -31,6 +31,7 @@ #include "common_throws.h" #include "debugger.h" #include "dex_file-inl.h" +#include "gc/scoped_gc_critical_section.h" #include "instrumentation.h" #include "mirror/class-inl.h" #include "mirror/dex_cache-inl.h" @@ -350,6 +351,10 @@ void Trace::Start(const char* trace_filename, int trace_fd, size_t buffer_size, // Create Trace object. { + // Required since EnableMethodTracing calls ConfigureStubs which visits class linker classes. + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa(__FUNCTION__); MutexLock mu(self, *Locks::trace_lock_); if (the_trace_ != nullptr) { @@ -464,9 +469,10 @@ void Trace::Pause() { Runtime* runtime = Runtime::Current(); Trace* the_trace = nullptr; + Thread* const self = Thread::Current(); pthread_t sampling_pthread = 0U; { - MutexLock mu(Thread::Current(), *Locks::trace_lock_); + MutexLock mu(self, *Locks::trace_lock_); if (the_trace_ == nullptr) { LOG(ERROR) << "Trace pause requested, but no trace currently running"; return; @@ -478,23 +484,26 @@ void Trace::Pause() { if (sampling_pthread != 0U) { { - MutexLock mu(Thread::Current(), *Locks::trace_lock_); + MutexLock mu(self, *Locks::trace_lock_); the_trace_ = nullptr; } CHECK_PTHREAD_CALL(pthread_join, (sampling_pthread, nullptr), "sampling thread shutdown"); sampling_pthread_ = 0U; { - MutexLock mu(Thread::Current(), *Locks::trace_lock_); + MutexLock mu(self, *Locks::trace_lock_); the_trace_ = the_trace; } } if (the_trace != nullptr) { + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa(__FUNCTION__); stop_alloc_counting = (the_trace->flags_ & Trace::kTraceCountAllocs) != 0; if (the_trace->trace_mode_ == TraceMode::kSampling) { - MutexLock mu(Thread::Current(), *Locks::thread_list_lock_); + MutexLock mu(self, *Locks::thread_list_lock_); runtime->GetThreadList()->ForEach(ClearThreadStackTraceAndClockBase, nullptr); } else { runtime->GetInstrumentation()->DisableMethodTracing(kTracerInstrumentationKey); @@ -530,6 +539,9 @@ void Trace::Resume() { bool enable_stats = (the_trace->flags_ && kTraceCountAllocs) != 0; { + gc::ScopedGCCriticalSection gcs(self, + gc::kGcCauseInstrumentation, + gc::kCollectorTypeInstrumentation); ScopedSuspendAll ssa(__FUNCTION__); // Reenable. -- 2.11.0