BaseHandleScope* const handle_scope_;
};
+// Number of references allocated in JNI ShadowFrames on the given thread.
+static size_t NumJniShadowFrameReferences(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
+ return self->GetManagedStack()->NumJniShadowFrameReferences();
+}
+
+// Number of references in handle scope on the given thread.
+static size_t NumHandleReferences(Thread* self) {
+ size_t count = 0;
+ for (BaseHandleScope* cur = self->GetTopHandleScope(); cur != nullptr; cur = cur->GetLink()) {
+ count += cur->NumberOfReferences();
+ }
+ return count;
+}
+
+// Number of references allocated in handle scopes & JNI shadow frames on this thread.
+static size_t NumStackReferences(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
+ return NumHandleReferences(self) + NumJniShadowFrameReferences(self);
+}
+
static void expectNumStackReferences(size_t val1, size_t val2) {
// In rare cases when JNI functions call themselves recursively,
// disable this test because it will have a false negative.
/* @CriticalNative doesn't build a HandleScope, so this test is meaningless then. */
ScopedObjectAccess soa(Thread::Current());
- size_t actual_num = Thread::Current()->NumStackReferences();
+ size_t actual_num = NumStackReferences(Thread::Current());
// XX: Not too sure what's going on.
// Sometimes null references get placed and sometimes they don't?
EXPECT_TRUE(val1 == actual_num || val2 == actual_num)
}
}
-size_t Thread::NumHandleReferences() {
- size_t count = 0;
- for (BaseHandleScope* cur = tlsPtr_.top_handle_scope; cur != nullptr; cur = cur->GetLink()) {
- count += cur->NumberOfReferences();
- }
- return count;
-}
-
bool Thread::HandleScopeContains(jobject obj) const {
StackReference<mirror::Object>* hs_entry =
reinterpret_cast<StackReference<mirror::Object>*>(obj);
tlsPtr_.stack_end = tlsPtr_.stack_begin + GetStackOverflowReservedBytes(kRuntimeISA);
}
- // Install the protected region for implicit stack checks.
- void InstallImplicitProtection();
-
bool IsHandlingStackOverflow() const {
return tlsPtr_.stack_end == tlsPtr_.stack_begin;
}
ManagedStack::TopShadowFrameOffset());
}
- // Number of references allocated in JNI ShadowFrames on this thread.
- size_t NumJniShadowFrameReferences() const REQUIRES_SHARED(Locks::mutator_lock_) {
- return tlsPtr_.managed_stack.NumJniShadowFrameReferences();
- }
-
- // Number of references in handle scope on this thread.
- size_t NumHandleReferences();
-
- // Number of references allocated in handle scopes & JNI shadow frames on this thread.
- size_t NumStackReferences() REQUIRES_SHARED(Locks::mutator_lock_) {
- return NumHandleReferences() + NumJniShadowFrameReferences();
- }
-
// Is the given obj in this thread's stack indirect reference table?
bool HandleScopeContains(jobject obj) const;
tlsPtr_.held_mutexes[level] = mutex;
}
- void RunCheckpointFunction();
-
- bool PassActiveSuspendBarriers(Thread* self)
- REQUIRES(!Locks::thread_suspend_count_lock_);
-
void ClearSuspendBarrier(AtomicInteger* target)
REQUIRES(Locks::thread_suspend_count_lock_);
bool for_debugger)
REQUIRES(Locks::thread_suspend_count_lock_);
+ void RunCheckpointFunction();
+
+ bool PassActiveSuspendBarriers(Thread* self)
+ REQUIRES(!Locks::thread_suspend_count_lock_);
+
+ // Install the protected region for implicit stack checks.
+ void InstallImplicitProtection();
+
// 32 bits of atomically changed state and flags. Keeping as 32 bits allows and atomic CAS to
// change from being Suspended to Runnable without a suspend request occurring.
union PACKED(4) StateAndFlags {