&MarkObjectCallback, &ProcessMarkStackPausedCallback, this);
}
+void MarkSweep::PreProcessReferences(Thread* self) {
+ timings_.NewSplit("PreProcessReferences");
+ GetHeap()->ProcessSoftReferences(timings_, clear_soft_references_, &IsMarkedCallback,
+ &MarkObjectCallback, &ProcessMarkStackPausedCallback, this);
+}
+
bool MarkSweep::HandleDirtyObjectsPhase() {
TimingLogger::ScopedSplit split("(Paused)HandleDirtyObjectsPhase", &timings_);
Thread* self = Thread::Current();
MarkReachableObjects();
// Pre-clean dirtied cards to reduce pauses.
PreCleanCards();
+ if (IsConcurrent()) {
+ // No reason to do this for non-concurrent GC since pre processing soft references only helps
+ // pauses.
+ PreProcessReferences(self);
+ }
}
void MarkSweep::UpdateAndMarkModUnion() {
return args->mark_callback_(obj, args->arg_);
}
-// Process reference class instances and schedule finalizations.
-void Heap::ProcessReferences(TimingLogger& timings, bool clear_soft,
- IsMarkedCallback* is_marked_callback,
- MarkObjectCallback* mark_object_callback,
- ProcessMarkStackCallback* process_mark_stack_callback, void* arg) {
- // Unless we are in the zygote or required to clear soft references with white references,
- // preserve some white referents.
- if (!clear_soft && !Runtime::Current()->IsZygote()) {
+void Heap::ProcessSoftReferences(TimingLogger& timings, bool clear_soft,
+ IsMarkedCallback* is_marked_callback,
+ MarkObjectCallback* mark_object_callback,
+ ProcessMarkStackCallback* process_mark_stack_callback, void* arg) {
+ // Unless required to clear soft references with white references, preserve some white referents.
+ if (!clear_soft) {
+ // Don't clear for sticky GC.
SoftReferenceArgs soft_reference_args;
soft_reference_args.is_marked_callback_ = is_marked_callback;
soft_reference_args.mark_callback_ = mark_object_callback;
soft_reference_args.arg_ = arg;
+ // References with a marked referent are removed from the list.
soft_reference_queue_.PreserveSomeSoftReferences(&PreserveSoftReferenceCallback,
&soft_reference_args);
process_mark_stack_callback(arg);
}
+}
+
+// Process reference class instances and schedule finalizations.
+void Heap::ProcessReferences(TimingLogger& timings, bool clear_soft,
+ IsMarkedCallback* is_marked_callback,
+ MarkObjectCallback* mark_object_callback,
+ ProcessMarkStackCallback* process_mark_stack_callback, void* arg) {
+ ProcessSoftReferences(timings, clear_soft, is_marked_callback, mark_object_callback,
+ process_mark_stack_callback, arg);
timings.StartSplit("(Paused)ProcessReferences");
// Clear all remaining soft and weak references with white referents.
soft_reference_queue_.ClearWhiteReferences(cleared_references_, is_marked_callback, arg);
<< "Could not find garbage collector with concurrent=" << concurrent_gc_
<< " and type=" << gc_type;
ATRACE_BEGIN(StringPrintf("%s %s GC", PrettyCause(gc_cause), collector->GetName()).c_str());
- collector->Run(gc_cause, clear_soft_references);
+ if (!clear_soft_references) {
+ clear_soft_references = gc_type != collector::kGcTypeSticky; // TODO: GSS?
+ }
+ collector->Run(gc_cause, clear_soft_references || Runtime::Current()->IsZygote());
total_objects_freed_ever_ += collector->GetFreedObjects();
total_bytes_freed_ever_ += collector->GetFreedBytes();
RequestHeapTrim();
}
static mirror::Object* PreserveSoftReferenceCallback(mirror::Object* obj, void* arg);
+ void ProcessSoftReferences(TimingLogger& timings, bool clear_soft,
+ IsMarkedCallback* is_marked_callback,
+ MarkObjectCallback* mark_object_callback,
+ ProcessMarkStackCallback* process_mark_stack_callback, void* arg)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
void ProcessReferences(TimingLogger& timings, bool clear_soft,
IsMarkedCallback* is_marked_callback,
MarkObjectCallback* mark_object_callback,