2 * Copyright (C) 2008 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 #include "alloc/HeapBitmap.h"
19 #include "alloc/HeapInternal.h"
20 #include "alloc/HeapSource.h"
21 #include "alloc/MarkSweep.h"
22 #include <limits.h> // for ULONG_MAX
23 #include <sys/mman.h> // for madvise(), mmap()
24 #include <cutils/ashmem.h>
27 #define GC_DEBUG_PARANOID 2
28 #define GC_DEBUG_BASIC 1
29 #define GC_DEBUG_OFF 0
30 #define GC_DEBUG(l) (GC_DEBUG_LEVEL >= (l))
33 #define GC_DEBUG_LEVEL GC_DEBUG_PARANOID
35 #define GC_DEBUG_LEVEL GC_DEBUG_OFF
40 #define GC_LOG_TAG LOG_TAG "-gc"
43 #define LOGV_GC(...) ((void)0)
44 #define LOGD_GC(...) ((void)0)
46 #define LOGV_GC(...) LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
47 #define LOGD_GC(...) LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
51 #define LOGVV_GC(...) LOGV_GC(__VA_ARGS__)
53 #define LOGVV_GC(...) ((void)0)
56 #define LOGI_GC(...) LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
57 #define LOGW_GC(...) LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
58 #define LOGE_GC(...) LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
60 #define LOG_SCAN(...) LOGV_GC("SCAN: " __VA_ARGS__)
61 #define LOG_MARK(...) LOGV_GC("MARK: " __VA_ARGS__)
62 #define LOG_SWEEP(...) LOGV_GC("SWEEP: " __VA_ARGS__)
63 #define LOG_REF(...) LOGV_GC("REF: " __VA_ARGS__)
65 #define LOGV_SCAN(...) LOGVV_GC("SCAN: " __VA_ARGS__)
66 #define LOGV_MARK(...) LOGVV_GC("MARK: " __VA_ARGS__)
67 #define LOGV_SWEEP(...) LOGVV_GC("SWEEP: " __VA_ARGS__)
68 #define LOGV_REF(...) LOGVV_GC("REF: " __VA_ARGS__)
70 #if WITH_OBJECT_HEADERS
72 static const Object *gMarkParent = NULL;
76 #define PAGE_SIZE 4096
78 #define ALIGN_UP_TO_PAGE_SIZE(p) \
79 (((size_t)(p) + (PAGE_SIZE - 1)) & ~(PAGE_SIZE - 1))
81 /* Do not cast the result of this to a boolean; the only set bit
84 static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
85 __attribute__((always_inline));
86 static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
88 return dvmHeapBitmapIsObjectBitSetInList(ctx->bitmaps, ctx->numBitmaps, hc);
92 createMarkStack(GcMarkStack *stack)
98 /* Create a stack big enough for the worst possible case,
99 * where the heap is perfectly full of the smallest object.
100 * TODO: be better about memory usage; use a smaller stack with
101 * overflow detection and recovery.
103 size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
104 (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
105 size = ALIGN_UP_TO_PAGE_SIZE(size);
106 fd = ashmem_create_region("dalvik-heap-markstack", size);
108 LOGE_GC("Could not create %d-byte ashmem mark stack: %s\n",
109 size, strerror(errno));
112 limit = (const Object **)mmap(NULL, size, PROT_READ | PROT_WRITE,
116 if (limit == MAP_FAILED) {
117 LOGE_GC("Could not mmap %d-byte ashmem mark stack: %s\n",
118 size, strerror(err));
122 memset(stack, 0, sizeof(*stack));
123 stack->limit = limit;
124 stack->base = (const Object **)((uintptr_t)limit + size);
125 stack->top = stack->base;
131 destroyMarkStack(GcMarkStack *stack)
133 munmap((char *)stack->limit,
134 (uintptr_t)stack->base - (uintptr_t)stack->limit);
135 memset(stack, 0, sizeof(*stack));
138 #define MARK_STACK_PUSH(stack, obj) \
140 *--(stack).top = (obj); \
144 dvmHeapBeginMarkStep()
146 GcMarkContext *mc = &gDvm.gcHeap->markContext;
147 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
150 if (!createMarkStack(&mc->stack)) {
154 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
155 HEAP_SOURCE_MAX_HEAP_COUNT);
156 if (numBitmaps <= 0) {
160 /* Create mark bitmaps that cover the same ranges as the
161 * current object bitmaps.
163 if (!dvmHeapBitmapInitListFromTemplates(mc->bitmaps, objectBitmaps,
169 mc->numBitmaps = numBitmaps;
172 #if WITH_OBJECT_HEADERS
179 static long setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
180 __attribute__((always_inline));
182 setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
184 return dvmHeapBitmapSetAndReturnObjectBitInList(ctx->bitmaps,
185 ctx->numBitmaps, hc);
188 static void _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
189 bool checkFinger, bool forceStack)
190 __attribute__((always_inline));
192 _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
193 bool checkFinger, bool forceStack)
199 #if GC_DEBUG(GC_DEBUG_PARANOID)
200 //TODO: make sure we're locked
201 assert(obj != (Object *)gDvm.unlinkedJavaLangClass);
202 assert(dvmIsValidObject(obj));
206 if (!setAndReturnMarkBit(ctx, hc)) {
207 /* This object was not previously marked.
209 if (forceStack || (checkFinger && (void *)hc < ctx->finger)) {
210 /* This object will need to go on the mark stack.
212 MARK_STACK_PUSH(ctx->stack, obj);
215 #if WITH_OBJECT_HEADERS
216 if (hc->scanGeneration != hc->markGeneration) {
217 LOGE("markObject(0x%08x): wasn't scanned last time\n", (uint)obj);
220 if (hc->markGeneration == gGeneration) {
221 LOGE("markObject(0x%08x): already marked this generation\n",
225 hc->oldMarkGeneration = hc->markGeneration;
226 hc->markGeneration = gGeneration;
227 hc->markFingerOld = hc->markFinger;
228 hc->markFinger = ctx->finger;
229 if (gMarkParent != NULL) {
230 hc->parentOld = hc->parent;
231 hc->parent = gMarkParent;
233 hc->parent = (const Object *)((uintptr_t)hc->parent | 1);
238 if (gDvm.gcHeap->hprofContext != NULL) {
239 hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
242 #if DVM_TRACK_HEAP_MARKING
243 gDvm.gcHeap->markCount++;
244 gDvm.gcHeap->markSize += dvmHeapSourceChunkSize((void *)hc) +
245 HEAP_SOURCE_CHUNK_OVERHEAD;
248 /* obj->clazz can be NULL if we catch an object between
249 * dvmMalloc() and DVM_OBJECT_INIT(). This is ok.
251 LOGV_MARK("0x%08x %s\n", (uint)obj,
252 obj->clazz == NULL ? "<null class>" : obj->clazz->name);
256 /* Used to mark objects when recursing. Recursion is done by moving
257 * the finger across the bitmaps in address order and marking child
258 * objects. Any newly-marked objects whose addresses are lower than
259 * the finger won't be visited by the bitmap scan, so those objects
260 * need to be added to the mark stack.
263 markObjectNonNull(const Object *obj, GcMarkContext *ctx)
265 _markObjectNonNullCommon(obj, ctx, true, false);
268 #define markObject(obj, ctx) \
270 Object *MO_obj_ = (Object *)(obj); \
271 if (MO_obj_ != NULL) { \
272 markObjectNonNull(MO_obj_, (ctx)); \
276 /* If the object hasn't already been marked, mark it and
277 * schedule it to be scanned for references.
279 * obj may not be NULL. The macro dvmMarkObject() should
280 * be used in situations where a reference may be NULL.
282 * This function may only be called when marking the root
283 * set. When recursing, use the internal markObject[NonNull]().
286 dvmMarkObjectNonNull(const Object *obj)
288 _markObjectNonNullCommon(obj, &gDvm.gcHeap->markContext, false, false);
291 /* Mark the set of root objects.
293 * Things we need to scan:
294 * - System classes defined by root classloader
296 * - Interpreted stack, from top to "curFrame"
297 * - Dalvik registers (args + local vars)
298 * - JNI local references
299 * - Automatic VM local references (TrackedAlloc)
300 * - Associated Thread/VMThread object
301 * - ThreadGroups (could track & start with these instead of working
302 * upward from Threads)
303 * - Exception currently being thrown, if present
304 * - JNI global references
305 * - Interned string table
306 * - Primitive classes
308 * - gDvm.outOfMemoryObj
309 * - Objects allocated with ALLOC_NO_GC
310 * - Objects pending finalization (but not yet finalized)
311 * - Objects in debugger object registry
314 * - Native stack (for in-progress stuff in the VM)
315 * - The TrackedAlloc stuff watches all native VM references.
317 void dvmHeapMarkRootSet()
323 gcHeap = gDvm.gcHeap;
325 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
327 LOG_SCAN("root class loader\n");
328 dvmGcScanRootClassLoader();
329 LOG_SCAN("primitive classes\n");
330 dvmGcScanPrimitiveClasses();
332 /* dvmGcScanRootThreadGroups() sets a bunch of
333 * different scan states internally.
335 HPROF_CLEAR_GC_SCAN_STATE();
337 LOG_SCAN("root thread groups\n");
338 dvmGcScanRootThreadGroups();
340 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
342 LOG_SCAN("interned strings\n");
343 dvmGcScanInternedStrings();
345 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
347 LOG_SCAN("JNI global refs\n");
348 dvmGcMarkJniGlobalRefs();
350 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
352 LOG_SCAN("pending reference operations\n");
353 dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations, true);
355 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
357 LOG_SCAN("pending finalizations\n");
358 dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs, false);
360 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
362 LOG_SCAN("debugger refs\n");
363 dvmGcMarkDebuggerRefs();
365 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
367 /* Mark all ALLOC_NO_GC objects.
369 LOG_SCAN("ALLOC_NO_GC objects\n");
370 refs = &gcHeap->nonCollectableRefs;
372 while ((uintptr_t)op < (uintptr_t)refs->nextEntry) {
373 dvmMarkObjectNonNull(*(op++));
376 /* Mark any special objects we have sitting around.
378 LOG_SCAN("special objects\n");
379 dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
380 dvmMarkObjectNonNull(gDvm.internalErrorObj);
381 //TODO: scan object references sitting in gDvm; use pointer begin & end
383 HPROF_CLEAR_GC_SCAN_STATE();
387 * Nothing past this point is allowed to use dvmMarkObject*().
388 * Scanning/recursion must use markObject*(), which takes the
389 * finger into account.
391 #define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
394 /* Mark all of a ClassObject's interfaces.
396 static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx)
398 ClassObject **interfaces;
402 /* Mark all interfaces.
404 interfaces = clazz->interfaces;
405 interfaceCount = clazz->interfaceCount;
406 for (i = 0; i < interfaceCount; i++) {
407 markObjectNonNull((Object *)*interfaces, ctx);
412 /* Mark all objects referred to by a ClassObject's static fields.
414 static void scanStaticFields(const ClassObject *clazz, GcMarkContext *ctx)
419 //TODO: Optimize this with a bit vector or something
421 for (i = 0; i < clazz->sfieldCount; i++) {
422 char c = f->field.signature[0];
423 if (c == '[' || c == 'L') {
424 /* It's an array or class reference.
426 markObject((Object *)f->value.l, ctx);
432 /* Mark all objects referred to by a DataObject's instance fields.
434 static void scanInstanceFields(const DataObject *obj, ClassObject *clazz,
437 //TODO: Optimize this by avoiding walking the superclass chain
438 while (clazz != NULL) {
442 /* All of the fields that contain object references
443 * are guaranteed to be at the beginning of the ifields list.
446 for (i = 0; i < clazz->ifieldRefCount; i++) {
447 /* Mark the array or object reference.
450 * Note that, per the comment on struct InstField,
451 * f->byteOffset is the offset from the beginning of
452 * obj, not the offset into obj->instanceData.
454 markObject(dvmGetFieldObject((Object*)obj, f->byteOffset), ctx);
458 /* This will be NULL when we hit java.lang.Object
460 clazz = clazz->super;
464 /* Mark all objects referred to by the array's contents.
466 static void scanObjectArray(const ArrayObject *array, GcMarkContext *ctx)
472 contents = (Object **)array->contents;
473 length = array->length;
475 for (i = 0; i < length; i++) {
476 markObject(*contents, ctx); // may be NULL
481 /* Mark all objects referred to by the ClassObject.
483 static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx)
485 LOGV_SCAN("---------> %s\n", clazz->name);
487 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
488 /* We're an array; mark the class object of the contents
491 * Note that we won't necessarily reach the array's element
492 * class by scanning the array contents; the array may be
493 * zero-length, or may only contain null objects.
495 markObjectNonNull((Object *)clazz->elementClass, ctx);
498 /* We scan these explicitly in case the only remaining
499 * reference to a particular class object is via a data
500 * object; we may not be guaranteed to reach all
501 * live class objects via a classloader.
503 markObject((Object *)clazz->super, ctx); // may be NULL (java.lang.Object)
504 markObject(clazz->classLoader, ctx); // may be NULL
506 scanStaticFields(clazz, ctx);
507 markInterfaces(clazz, ctx);
510 /* Mark all objects that obj refers to.
512 * Called on every object in markList.
514 static void scanObject(const Object *obj, GcMarkContext *ctx)
518 assert(dvmIsValidObject(obj));
519 LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name);
522 if (gDvm.gcHeap->hprofContext != NULL) {
523 hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
527 /* Get and mark the class object for this particular instance.
531 /* This can happen if we catch an object between
532 * dvmMalloc() and DVM_OBJECT_INIT(). The object
533 * won't contain any references yet, so we can
537 } else if (clazz == gDvm.unlinkedJavaLangClass) {
538 /* This class hasn't been linked yet. We're guaranteed
539 * that the object doesn't contain any references that
540 * aren't already tracked, so we can skip scanning it.
542 * NOTE: unlinkedJavaLangClass is not on the heap, so
543 * it's very important that we don't try marking it.
547 #if WITH_OBJECT_HEADERS
549 if (ptr2chunk(obj)->scanGeneration == gGeneration) {
550 LOGE("object 0x%08x was already scanned this generation\n",
554 ptr2chunk(obj)->oldScanGeneration = ptr2chunk(obj)->scanGeneration;
555 ptr2chunk(obj)->scanGeneration = gGeneration;
556 ptr2chunk(obj)->scanCount++;
559 assert(dvmIsValidObject((Object *)clazz));
560 markObjectNonNull((Object *)clazz, ctx);
562 /* Mark any references in this object.
564 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
565 /* It's an array object.
567 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) {
568 /* It's an array of object references.
570 scanObjectArray((ArrayObject *)obj, ctx);
572 // else there's nothing else to scan
574 /* It's a DataObject-compatible object.
576 scanInstanceFields((DataObject *)obj, clazz, ctx);
578 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) {
579 GcHeap *gcHeap = gDvm.gcHeap;
582 /* It's a subclass of java/lang/ref/Reference.
583 * The fields in this class have been arranged
584 * such that scanInstanceFields() did not actually
585 * mark the "referent" field; we need to handle
588 * If the referent already has a strong mark (isMarked(referent)),
589 * we don't care about its reference status.
591 referent = dvmGetFieldObject(obj,
592 gDvm.offJavaLangRefReference_referent);
593 if (referent != NULL &&
594 !isMarked(ptr2chunk(referent), &gcHeap->markContext))
598 if (gcHeap->markAllReferents) {
599 LOG_REF("Hard-marking a reference\n");
601 /* Don't bother with normal reference-following
602 * behavior, just mark the referent. This should
603 * only be used when following objects that just
604 * became scheduled for finalization.
606 markObjectNonNull(referent, ctx);
610 /* See if this reference was handled by a previous GC.
612 if (dvmGetFieldObject(obj,
613 gDvm.offJavaLangRefReference_vmData) ==
614 SCHEDULED_REFERENCE_MAGIC)
616 LOG_REF("Skipping scheduled reference\n");
618 /* Don't reschedule it, but make sure that its
619 * referent doesn't get collected (in case it's
620 * a PhantomReference and wasn't cleared automatically).
622 //TODO: Mark these after handling all new refs of
623 // this strength, in case the new refs refer
624 // to the same referent. Not a very common
626 markObjectNonNull(referent, ctx);
630 /* Find out what kind of reference is pointing
633 refFlags = GET_CLASS_FLAG_GROUP(clazz,
635 CLASS_ISWEAKREFERENCE |
636 CLASS_ISPHANTOMREFERENCE);
638 /* We use the vmData field of Reference objects
639 * as a next pointer in a singly-linked list.
640 * That way, we don't need to allocate any memory
641 * while we're doing a GC.
643 #define ADD_REF_TO_LIST(list, ref) \
645 Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \
646 dvmSetFieldObject(ARTL_ref_, \
647 gDvm.offJavaLangRefReference_vmData, list); \
651 /* At this stage, we just keep track of all of
652 * the live references that we've seen. Later,
653 * we'll walk through each of these lists and
654 * deal with the referents.
656 if (refFlags == CLASS_ISREFERENCE) {
657 /* It's a soft reference. Depending on the state,
658 * we'll attempt to collect all of them, some of
659 * them, or none of them.
661 if (gcHeap->softReferenceCollectionState ==
665 markObjectNonNull(referent, ctx);
666 } else if (gcHeap->softReferenceCollectionState ==
670 ADD_REF_TO_LIST(gcHeap->softReferences, obj);
672 /* We'll only try to collect half of the
675 if (gcHeap->softReferenceColor++ & 1) {
676 goto sr_collect_none;
681 /* It's a weak or phantom reference.
682 * Clearing CLASS_ISREFERENCE will reveal which.
684 refFlags &= ~CLASS_ISREFERENCE;
685 if (refFlags == CLASS_ISWEAKREFERENCE) {
686 ADD_REF_TO_LIST(gcHeap->weakReferences, obj);
687 } else if (refFlags == CLASS_ISPHANTOMREFERENCE) {
688 ADD_REF_TO_LIST(gcHeap->phantomReferences, obj);
690 assert(!"Unknown reference type");
693 #undef ADD_REF_TO_LIST
698 /* If this is a class object, mark various other things that
699 * its internals point to.
701 * All class objects are instances of java.lang.Class,
702 * including the java.lang.Class class object.
704 if (clazz == gDvm.classJavaLangClass) {
705 scanClassObject((ClassObject *)obj, ctx);
709 #if WITH_OBJECT_HEADERS
715 processMarkStack(GcMarkContext *ctx)
717 const Object **const base = ctx->stack.base;
719 /* Scan anything that's on the mark stack.
720 * We can't use the bitmaps anymore, so use
721 * a finger that points past the end of them.
723 ctx->finger = (void *)ULONG_MAX;
724 while (ctx->stack.top != base) {
725 scanObject(*ctx->stack.top++, ctx);
730 static uintptr_t gLastFinger = 0;
734 scanBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
736 GcMarkContext *ctx = (GcMarkContext *)arg;
740 assert((uintptr_t)finger >= gLastFinger);
741 gLastFinger = (uintptr_t)finger;
744 ctx->finger = finger;
745 for (i = 0; i < numPtrs; i++) {
746 /* The pointers we're getting back are DvmHeapChunks,
749 scanObject(chunk2ptr(*ptrs++), ctx);
755 /* Given bitmaps with the root set marked, find and mark all
756 * reachable objects. When this returns, the entire set of
757 * live objects will be marked and the mark stack will be empty.
759 void dvmHeapScanMarkedObjects()
761 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
763 assert(ctx->finger == NULL);
765 /* The bitmaps currently have bits set for the root set.
766 * Walk across the bitmaps and scan each object.
771 dvmHeapBitmapWalkList(ctx->bitmaps, ctx->numBitmaps,
772 scanBitmapCallback, ctx);
774 /* We've walked the mark bitmaps. Scan anything that's
775 * left on the mark stack.
777 processMarkStack(ctx);
779 LOG_SCAN("done with marked objects\n");
782 /** @return true if we need to schedule a call to clear().
784 static bool clearReference(Object *reference)
786 /* This is what the default implementation of Reference.clear()
787 * does. We're required to clear all references to a given
788 * referent atomically, so we can't pop in and out of interp
791 * Also, someone may have subclassed one of the basic Reference
792 * types, overriding clear(). We can't trust the clear()
793 * implementation to call super.clear(); we cannot let clear()
794 * resurrect the referent. If we clear it here, we can safely
795 * call any overriding implementations.
797 dvmSetFieldObject(reference,
798 gDvm.offJavaLangRefReference_referent, NULL);
800 #if FANCY_REFERENCE_SUBCLASS
801 /* See if clear() has actually been overridden. If so,
802 * we need to schedule a call to it before calling enqueue().
804 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_clear]->clazz !=
805 gDvm.classJavaLangRefReference)
807 /* clear() has been overridden; return true to indicate
808 * that we need to schedule a call to the real clear()
818 /** @return true if we need to schedule a call to enqueue().
820 static bool enqueueReference(Object *reference)
822 #if FANCY_REFERENCE_SUBCLASS
823 /* See if this reference class has overridden enqueue();
824 * if not, we can take a shortcut.
826 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_enqueue]->clazz
827 == gDvm.classJavaLangRefReference)
830 Object *queue = dvmGetFieldObject(reference,
831 gDvm.offJavaLangRefReference_queue);
832 Object *queueNext = dvmGetFieldObject(reference,
833 gDvm.offJavaLangRefReference_queueNext);
834 if (queue == NULL || queueNext != NULL) {
835 /* There is no queue, or the reference has already
836 * been enqueued. The Reference.enqueue() method
837 * will do nothing even if we call it.
843 /* We need to call enqueue(), but if we called it from
844 * here we'd probably deadlock. Schedule a call.
849 /* All objects for stronger reference levels have been
850 * marked before this is called.
852 void dvmHeapHandleReferences(Object *refListHead, enum RefType refType)
855 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
856 const int offVmData = gDvm.offJavaLangRefReference_vmData;
857 const int offReferent = gDvm.offJavaLangRefReference_referent;
858 bool workRequired = false;
860 size_t numCleared = 0;
861 size_t numEnqueued = 0;
862 reference = refListHead;
863 while (reference != NULL) {
867 /* Pull the interesting fields out of the Reference object.
869 next = dvmGetFieldObject(reference, offVmData);
870 referent = dvmGetFieldObject(reference, offReferent);
872 //TODO: when handling REF_PHANTOM, unlink any references
873 // that fail this initial if(). We need to re-walk
874 // the list, and it would be nice to avoid the extra
876 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
877 bool schedClear, schedEnqueue;
879 /* This is the strongest reference that refers to referent.
880 * Do the right thing.
885 schedClear = clearReference(reference);
886 schedEnqueue = enqueueReference(reference);
889 /* PhantomReferences are not cleared automatically.
890 * Until someone clears it (or the reference itself
891 * is collected), the referent must remain alive.
893 * It's necessary to fully mark the referent because
894 * it will still be present during the next GC, and
895 * all objects that it points to must be valid.
896 * (The referent will be marked outside of this loop,
897 * after handing all references of this strength, in
898 * case multiple references point to the same object.)
902 /* A PhantomReference is only useful with a
903 * queue, but since it's possible to create one
904 * without a queue, we need to check.
906 schedEnqueue = enqueueReference(reference);
909 assert(!"Bad reference type");
911 schedEnqueue = false;
914 numCleared += schedClear ? 1 : 0;
915 numEnqueued += schedEnqueue ? 1 : 0;
917 if (schedClear || schedEnqueue) {
920 /* Stuff the clear/enqueue bits in the bottom of
921 * the pointer. Assumes that objects are 8-byte
924 * Note that we are adding the *Reference* (which
925 * is by definition already marked at this point) to
926 * this list; we're not adding the referent (which
927 * has already been cleared).
929 assert(((intptr_t)reference & 3) == 0);
930 assert(((WORKER_CLEAR | WORKER_ENQUEUE) & ~3) == 0);
931 workBits = (schedClear ? WORKER_CLEAR : 0) |
932 (schedEnqueue ? WORKER_ENQUEUE : 0);
933 if (!dvmHeapAddRefToLargeTable(
934 &gDvm.gcHeap->referenceOperations,
935 (Object *)((uintptr_t)reference | workBits)))
937 LOGE_HEAP("dvmMalloc(): no room for any more "
938 "reference operations\n");
944 if (refType != REF_PHANTOM) {
945 /* Let later GCs know not to reschedule this reference.
947 dvmSetFieldObject(reference, offVmData,
948 SCHEDULED_REFERENCE_MAGIC);
949 } // else this is handled later for REF_PHANTOM
951 } // else there was a stronger reference to the referent.
955 #define refType2str(r) \
956 ((r) == REF_SOFT ? "soft" : ( \
957 (r) == REF_WEAK ? "weak" : ( \
958 (r) == REF_PHANTOM ? "phantom" : "UNKNOWN" )))
959 LOGD_HEAP("dvmHeapHandleReferences(): cleared %zd, enqueued %zd %s references\n", numCleared, numEnqueued, refType2str(refType));
961 /* Walk though the reference list again, and mark any non-clear/marked
962 * referents. Only PhantomReferences can have non-clear referents
965 if (refType == REF_PHANTOM) {
966 bool scanRequired = false;
968 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
969 reference = refListHead;
970 while (reference != NULL) {
974 /* Pull the interesting fields out of the Reference object.
976 next = dvmGetFieldObject(reference, offVmData);
977 referent = dvmGetFieldObject(reference, offReferent);
979 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
980 markObjectNonNull(referent, markContext);
983 /* Let later GCs know not to reschedule this reference.
985 dvmSetFieldObject(reference, offVmData,
986 SCHEDULED_REFERENCE_MAGIC);
991 HPROF_CLEAR_GC_SCAN_STATE();
994 processMarkStack(markContext);
999 dvmSignalHeapWorker(false);
1004 /* Find unreachable objects that need to be finalized,
1005 * and schedule them for finalization.
1007 void dvmHeapScheduleFinalizations()
1009 HeapRefTable newPendingRefs;
1010 LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
1013 size_t totalPendCount;
1014 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
1017 * All reachable objects have been marked.
1018 * Any unmarked finalizable objects need to be finalized.
1021 /* Create a table that the new pending refs will
1024 if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) {
1025 //TODO: mark all finalizable refs and hope that
1026 // we can schedule them next time. Watch out,
1027 // because we may be expecting to free up space
1028 // by calling finalizers.
1029 LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
1030 "pending finalizations\n");
1034 /* Walk through finalizableRefs and move any unmarked references
1035 * to the list of new pending refs.
1038 while (finRefs != NULL) {
1040 size_t newPendCount = 0;
1042 gapRef = ref = finRefs->refs.table;
1043 lastRef = finRefs->refs.nextEntry;
1044 while (ref < lastRef) {
1047 hc = ptr2chunk(*ref);
1048 if (!isMarked(hc, markContext)) {
1049 if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
1050 //TODO: add the current table and allocate
1051 // a new, smaller one.
1052 LOGE_GC("dvmHeapScheduleFinalizations(): "
1053 "no room for any more pending finalizations: %zd\n",
1054 dvmHeapNumHeapRefTableEntries(&newPendingRefs));
1059 /* This ref is marked, so will remain on finalizableRefs.
1061 if (newPendCount > 0) {
1062 /* Copy it up to fill the holes.
1066 /* No holes yet; don't bother copying.
1073 finRefs->refs.nextEntry = gapRef;
1074 //TODO: if the table is empty when we're done, free it.
1075 totalPendCount += newPendCount;
1076 finRefs = finRefs->next;
1078 LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
1080 if (totalPendCount == 0) {
1081 /* No objects required finalization.
1082 * Free the empty temporary table.
1084 dvmClearReferenceTable(&newPendingRefs);
1088 /* Add the new pending refs to the main list.
1090 if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
1093 LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
1094 "pending finalizations\n");
1098 //TODO: try compacting the main list with a memcpy loop
1100 /* Mark the refs we just moved; we don't want them or their
1101 * children to get swept yet.
1103 ref = newPendingRefs.table;
1104 lastRef = newPendingRefs.nextEntry;
1105 assert(ref < lastRef);
1106 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
1107 while (ref < lastRef) {
1108 markObjectNonNull(*ref, markContext);
1111 HPROF_CLEAR_GC_SCAN_STATE();
1113 /* Set markAllReferents so that we don't collect referents whose
1114 * only references are in final-reachable objects.
1115 * TODO: eventually provide normal reference behavior by properly
1116 * marking these references.
1118 gDvm.gcHeap->markAllReferents = true;
1119 processMarkStack(markContext);
1120 gDvm.gcHeap->markAllReferents = false;
1122 dvmSignalHeapWorker(false);
1125 void dvmHeapFinishMarkStep()
1127 HeapBitmap *markBitmap;
1128 HeapBitmap objectBitmap;
1129 GcMarkContext *markContext;
1131 markContext = &gDvm.gcHeap->markContext;
1133 /* The sweep step freed every object that appeared in the
1134 * HeapSource bitmaps that didn't appear in the mark bitmaps.
1135 * The new state of the HeapSource is exactly the final
1136 * mark bitmaps, so swap them in.
1138 * The old bitmaps will be swapped into the context so that
1139 * we can clean them up.
1141 dvmHeapSourceReplaceObjectBitmaps(markContext->bitmaps,
1142 markContext->numBitmaps);
1144 /* Clean up the old HeapSource bitmaps and anything else associated
1145 * with the marking process.
1147 dvmHeapBitmapDeleteList(markContext->bitmaps, markContext->numBitmaps);
1148 destroyMarkStack(&markContext->stack);
1150 memset(markContext, 0, sizeof(*markContext));
1153 #if WITH_HPROF && WITH_HPROF_UNREACHABLE
1155 hprofUnreachableBitmapCallback(size_t numPtrs, void **ptrs,
1156 const void *finger, void *arg)
1158 hprof_context_t *hctx = (hprof_context_t *)arg;
1161 for (i = 0; i < numPtrs; i++) {
1164 /* The pointers we're getting back are DvmHeapChunks, not
1167 obj = (Object *)chunk2ptr(*ptrs++);
1169 hprofMarkRootObject(hctx, obj, 0);
1170 hprofDumpHeapObject(hctx, obj);
1177 hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],
1178 const HeapBitmap objectBitmaps[], size_t numBitmaps)
1180 hprof_context_t *hctx = gDvm.gcHeap->hprofContext;
1185 LOGI("hprof: dumping unreachable objects\n");
1187 HPROF_SET_GC_SCAN_STATE(HPROF_UNREACHABLE, 0);
1189 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1190 hprofUnreachableBitmapCallback, hctx);
1192 HPROF_CLEAR_GC_SCAN_STATE();
1197 sweepBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
1199 const ClassObject *const classJavaLangClass = gDvm.classJavaLangClass;
1202 for (i = 0; i < numPtrs; i++) {
1206 /* The pointers we're getting back are DvmHeapChunks, not
1209 hc = (DvmHeapChunk *)*ptrs++;
1210 obj = (Object *)chunk2ptr(hc);
1212 #if WITH_OBJECT_HEADERS
1213 if (hc->markGeneration == gGeneration) {
1214 LOGE("sweeping marked object: 0x%08x\n", (uint)obj);
1219 /* Free the monitor associated with the object.
1221 dvmFreeObjectMonitor(obj);
1223 /* NOTE: Dereferencing clazz is dangerous. If obj was the last
1224 * one to reference its class object, the class object could be
1225 * on the sweep list, and could already have been swept, leaving
1226 * us with a stale pointer.
1228 LOGV_SWEEP("FREE: 0x%08x %s\n", (uint)obj, obj->clazz->name);
1230 /* This assumes that java.lang.Class will never go away.
1231 * If it can, and we were the last reference to it, it
1232 * could have already been swept. However, even in that case,
1233 * gDvm.classJavaLangClass should still have a useful
1236 if (obj->clazz == classJavaLangClass) {
1237 LOGV_SWEEP("---------------> %s\n", ((ClassObject *)obj)->name);
1238 /* dvmFreeClassInnards() may have already been called,
1239 * but it's safe to call on the same ClassObject twice.
1241 dvmFreeClassInnards((ClassObject *)obj);
1245 /* Overwrite the to-be-freed object to make stale references
1250 ClassObject *clazz = obj->clazz;
1251 #if WITH_OBJECT_HEADERS
1252 DvmHeapChunk chunk = *hc;
1253 chunk.header = ~OBJECT_HEADER | 1;
1255 chunklen = dvmHeapSourceChunkSize(hc);
1256 memset(hc, 0xa5, chunklen);
1257 obj->clazz = (ClassObject *)((uintptr_t)clazz ^ 0xffffffff);
1258 #if WITH_OBJECT_HEADERS
1264 //TODO: provide a heapsource function that takes a list of pointers to free
1265 // and call it outside of this loop.
1266 dvmHeapSourceFree(hc);
1272 /* A function suitable for passing to dvmHashForeachRemove()
1273 * to clear out any unmarked objects. Clears the low bits
1274 * of the pointer because the intern table may set them.
1276 static int isUnmarkedObject(void *object)
1278 return !isMarked(ptr2chunk((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
1279 &gDvm.gcHeap->markContext);
1282 /* Walk through the list of objects that haven't been
1283 * marked and free them.
1286 dvmHeapSweepUnmarkedObjects(int *numFreed, size_t *sizeFreed)
1288 const HeapBitmap *markBitmaps;
1289 const GcMarkContext *markContext;
1290 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
1291 size_t origObjectsAllocated;
1292 size_t origBytesAllocated;
1295 /* All reachable objects have been marked.
1296 * Detach any unreachable interned strings before
1299 dvmGcDetachDeadInternedStrings(isUnmarkedObject);
1301 /* Free any known objects that are not marked.
1303 origObjectsAllocated = dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1304 origBytesAllocated = dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1306 markContext = &gDvm.gcHeap->markContext;
1307 markBitmaps = markContext->bitmaps;
1308 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
1309 HEAP_SOURCE_MAX_HEAP_COUNT);
1311 if (numBitmaps != markContext->numBitmaps) {
1312 LOGE("heap bitmap count mismatch: %zd != %zd\n",
1313 numBitmaps, markContext->numBitmaps);
1318 #if WITH_HPROF && WITH_HPROF_UNREACHABLE
1319 hprofDumpUnmarkedObjects(markBitmaps, objectBitmaps, numBitmaps);
1322 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1323 sweepBitmapCallback, NULL);
1325 *numFreed = origObjectsAllocated -
1326 dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1327 *sizeFreed = origBytesAllocated -
1328 dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1330 #ifdef WITH_PROFILER
1331 if (gDvm.allocProf.enabled) {
1332 gDvm.allocProf.freeCount += *numFreed;
1333 gDvm.allocProf.freeSize += *sizeFreed;