OSDN Git Service

Use thread-local is_gc_marking flags for the CC collector.
authorHiroshi Yamauchi <yamauchi@google.com>
Tue, 18 Aug 2015 21:47:25 +0000 (14:47 -0700)
committerHiroshi Yamauchi <yamauchi@google.com>
Wed, 19 Aug 2015 19:15:17 +0000 (12:15 -0700)
The currently global is_marking flag is used to check if the read
barrier slow path needs to be taken for GC roots access. Changing it
to a thread-local flag simplifies the fast path check and makes it
easier to do it in assembly code. It also solves the issue that we
need to avoid accessing the global flag during startup before the heap
or the collector object isn't allocated and initialized.

Bug: 12687968
Change-Id: Ibf0dca12f400bf3490188b12dfe96c7de30583e0

runtime/art_method-inl.h
runtime/art_method.cc
runtime/gc/collector/concurrent_copying.cc
runtime/gc/collector/concurrent_copying.h
runtime/read_barrier-inl.h
runtime/thread.h
runtime/thread_list.cc

index 40bb9e1..d38cc56 100644 (file)
@@ -21,6 +21,7 @@
 
 #include "art_field.h"
 #include "base/logging.h"
+#include "class_linker-inl.h"
 #include "dex_file.h"
 #include "dex_file-inl.h"
 #include "gc_root-inl.h"
index b9e13a4..f4c6473 100644 (file)
@@ -20,6 +20,7 @@
 #include "art_field-inl.h"
 #include "art_method-inl.h"
 #include "base/stringpiece.h"
+#include "class_linker-inl.h"
 #include "debugger.h"
 #include "dex_file-inl.h"
 #include "dex_instruction.h"
index 263e678..65e946f 100644 (file)
@@ -190,6 +190,7 @@ class ThreadFlipVisitor : public Closure {
     Thread* self = Thread::Current();
     CHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
         << thread->GetState() << " thread " << thread << " self " << self;
+    thread->SetIsGcMarking(true);
     if (use_tlab_ && thread->HasTlab()) {
       if (ConcurrentCopying::kEnableFromSpaceAccountingCheck) {
         // This must come before the revoke.
@@ -454,17 +455,8 @@ void ConcurrentCopying::MarkingPhase() {
     CheckEmptyMarkStack();
     // Re-enable weak ref accesses.
     ReenableWeakRefAccess(self);
-    // Issue an empty checkpoint to ensure no threads are still in the middle of a read barrier
-    // which may have a from-space ref cached in a local variable.
-    IssueEmptyCheckpoint();
     // Marking is done. Disable marking.
-    if (kUseTableLookupReadBarrier) {
-      heap_->rb_table_->ClearAll();
-      DCHECK(heap_->rb_table_->IsAllCleared());
-    }
-    is_mark_stack_push_disallowed_.StoreSequentiallyConsistent(1);
-    is_marking_ = false;  // This disables the read barrier/marking of weak roots.
-    mark_stack_mode_.StoreSequentiallyConsistent(kMarkStackModeOff);
+    DisableMarking();
     CheckEmptyMarkStack();
   }
 
@@ -493,6 +485,68 @@ void ConcurrentCopying::ReenableWeakRefAccess(Thread* self) {
   Runtime::Current()->BroadcastForNewSystemWeaks();
 }
 
+class DisableMarkingCheckpoint : public Closure {
+ public:
+  explicit DisableMarkingCheckpoint(ConcurrentCopying* concurrent_copying)
+      : concurrent_copying_(concurrent_copying) {
+  }
+
+  void Run(Thread* thread) OVERRIDE NO_THREAD_SAFETY_ANALYSIS {
+    // Note: self is not necessarily equal to thread since thread may be suspended.
+    Thread* self = Thread::Current();
+    DCHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
+        << thread->GetState() << " thread " << thread << " self " << self;
+    // Disable the thread-local is_gc_marking flag.
+    DCHECK(thread->GetIsGcMarking());
+    thread->SetIsGcMarking(false);
+    // If thread is a running mutator, then act on behalf of the garbage collector.
+    // See the code in ThreadList::RunCheckpoint.
+    if (thread->GetState() == kRunnable) {
+      concurrent_copying_->GetBarrier().Pass(self);
+    }
+  }
+
+ private:
+  ConcurrentCopying* const concurrent_copying_;
+};
+
+void ConcurrentCopying::IssueDisableMarkingCheckpoint() {
+  Thread* self = Thread::Current();
+  DisableMarkingCheckpoint check_point(this);
+  ThreadList* thread_list = Runtime::Current()->GetThreadList();
+  gc_barrier_->Init(self, 0);
+  size_t barrier_count = thread_list->RunCheckpoint(&check_point);
+  // If there are no threads to wait which implies that all the checkpoint functions are finished,
+  // then no need to release the mutator lock.
+  if (barrier_count == 0) {
+    return;
+  }
+  // Release locks then wait for all mutator threads to pass the barrier.
+  Locks::mutator_lock_->SharedUnlock(self);
+  {
+    ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
+    gc_barrier_->Increment(self, barrier_count);
+  }
+  Locks::mutator_lock_->SharedLock(self);
+}
+
+void ConcurrentCopying::DisableMarking() {
+  // Change the global is_marking flag to false. Do a fence before doing a checkpoint to update the
+  // thread-local flags so that a new thread starting up will get the correct is_marking flag.
+  is_marking_ = false;
+  QuasiAtomic::ThreadFenceForConstructor();
+  // Use a checkpoint to turn off the thread-local is_gc_marking flags and to ensure no threads are
+  // still in the middle of a read barrier which may have a from-space ref cached in a local
+  // variable.
+  IssueDisableMarkingCheckpoint();
+  if (kUseTableLookupReadBarrier) {
+    heap_->rb_table_->ClearAll();
+    DCHECK(heap_->rb_table_->IsAllCleared());
+  }
+  is_mark_stack_push_disallowed_.StoreSequentiallyConsistent(1);
+  mark_stack_mode_.StoreSequentiallyConsistent(kMarkStackModeOff);
+}
+
 void ConcurrentCopying::IssueEmptyCheckpoint() {
   Thread* self = Thread::Current();
   EmptyCheckpoint check_point(this);
@@ -708,6 +762,14 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor {
 void ConcurrentCopying::VerifyNoFromSpaceReferences() {
   Thread* self = Thread::Current();
   DCHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
+  // Verify all threads have is_gc_marking to be false
+  {
+    MutexLock mu(self, *Locks::thread_list_lock_);
+    std::list<Thread*> thread_list = Runtime::Current()->GetThreadList()->GetList();
+    for (Thread* thread : thread_list) {
+      CHECK(!thread->GetIsGcMarking());
+    }
+  }
   ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor visitor(this);
   // Roots.
   {
index f382448..8efad73 100644 (file)
@@ -180,6 +180,8 @@ class ConcurrentCopying : public GarbageCollector {
   void AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, mirror::Object* ref)
       SHARED_REQUIRES(Locks::mutator_lock_);
   void ReenableWeakRefAccess(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
+  void DisableMarking() SHARED_REQUIRES(Locks::mutator_lock_);
+  void IssueDisableMarkingCheckpoint() SHARED_REQUIRES(Locks::mutator_lock_);
 
   space::RegionSpace* region_space_;      // The underlying region space.
   std::unique_ptr<Barrier> gc_barrier_;
index 7014813..daae401 100644 (file)
@@ -79,13 +79,9 @@ inline MirrorType* ReadBarrier::BarrierForRoot(MirrorType** root,
   MirrorType* ref = *root;
   const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
   if (with_read_barrier && kUseBakerReadBarrier) {
-    if (kMaybeDuringStartup && IsDuringStartup()) {
-      // During startup, the heap may not be initialized yet. Just
-      // return the given ref.
-      return ref;
-    }
     // TODO: separate the read barrier code from the collector code more.
-    if (Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking()) {
+    Thread* self = Thread::Current();
+    if (self != nullptr && self->GetIsGcMarking()) {
       ref = reinterpret_cast<MirrorType*>(Mark(ref));
     }
     AssertToSpaceInvariant(gc_root_source, ref);
@@ -120,13 +116,9 @@ inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<Mirro
   MirrorType* ref = root->AsMirrorPtr();
   const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
   if (with_read_barrier && kUseBakerReadBarrier) {
-    if (kMaybeDuringStartup && IsDuringStartup()) {
-      // During startup, the heap may not be initialized yet. Just
-      // return the given ref.
-      return ref;
-    }
     // TODO: separate the read barrier code from the collector code more.
-    if (Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking()) {
+    Thread* self = Thread::Current();
+    if (self != nullptr && self->GetIsGcMarking()) {
       ref = reinterpret_cast<MirrorType*>(Mark(ref));
     }
     AssertToSpaceInvariant(gc_root_source, ref);
index e4ad7f3..959af95 100644 (file)
@@ -782,6 +782,16 @@ class Thread {
     tls32_.debug_method_entry_ = false;
   }
 
+  bool GetIsGcMarking() const {
+    CHECK(kUseReadBarrier);
+    return tls32_.is_gc_marking;
+  }
+
+  void SetIsGcMarking(bool is_marking) {
+    CHECK(kUseReadBarrier);
+    tls32_.is_gc_marking = is_marking;
+  }
+
   bool GetWeakRefAccessEnabled() const {
     CHECK(kUseReadBarrier);
     return tls32_.weak_ref_access_enabled;
@@ -1093,7 +1103,8 @@ class Thread {
       daemon(is_daemon), throwing_OutOfMemoryError(false), no_thread_suspension(0),
       thread_exit_check_count(0), handling_signal_(false),
       deoptimization_return_value_is_reference(false), suspended_at_suspend_check(false),
-      ready_for_debug_invoke(false), debug_method_entry_(false), weak_ref_access_enabled(true) {
+      ready_for_debug_invoke(false), debug_method_entry_(false), is_gc_marking(false),
+      weak_ref_access_enabled(true) {
     }
 
     union StateAndFlags state_and_flags;
@@ -1151,6 +1162,11 @@ class Thread {
     // event for the debugger.
     bool32_t debug_method_entry_;
 
+    // True if the GC is in the marking phase. This is used for the CC collector only. This is
+    // thread local so that we can simplify the logic to check for the fast path of read barriers of
+    // GC roots.
+    bool32_t is_gc_marking;
+
     // True if the thread is allowed to access a weak ref (Reference::GetReferent() and system
     // weaks) and to potentially mark an object alive/gray. This is used for concurrent reference
     // processing of the CC collector only. This is thread local so that we can enable/disable weak
index d449f42..d63781b 100644 (file)
@@ -1175,7 +1175,10 @@ void ThreadList::Register(Thread* self) {
   CHECK(!Contains(self));
   list_.push_back(self);
   if (kUseReadBarrier) {
-    // Initialize this according to the state of the CC collector.
+    // Initialize according to the state of the CC collector.
+    bool is_gc_marking =
+        Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking();
+    self->SetIsGcMarking(is_gc_marking);
     bool weak_ref_access_enabled =
         Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsWeakRefAccessEnabled();
     self->SetWeakRefAccessEnabled(weak_ref_access_enabled);