ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
ScopedObjectAccess soa(self);
StackHandleScope<1> hs(self);
- Handle<mirror::ClassLoader> h_loader(hs.NewHandle(
- reinterpret_cast<mirror::ClassLoader*>(self->DecodeJObject(class_loader))));
+ Handle<mirror::ClassLoader> h_loader(
+ hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader)));
mirror::Class* klass = class_linker->FindClass(self, "LStaticLeafMethods;", h_loader);
ASSERT_NE(klass, nullptr);
Thread* self = Thread::Current();
ScopedObjectAccess soa(self);
StackHandleScope<1> hs(self);
- Handle<mirror::ClassLoader> h_loader(hs.NewHandle(
- reinterpret_cast<mirror::ClassLoader*>(self->DecodeJObject(class_loader))));
+ Handle<mirror::ClassLoader> h_loader(
+ hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader)));
mirror::Class* klass = class_linker->FindClass(self, clazz.c_str(), h_loader);
ASSERT_NE(klass, nullptr);
Thread* const self = Thread::Current();
ReaderMutexLock mu(self, *class_linker->DexLock());
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
- if (dex_cache == nullptr || IsInBootImage(dex_cache)) {
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
+ if (dex_cache == nullptr || IsInBootImage(dex_cache.Ptr())) {
continue;
}
const DexFile* dex_file = dex_cache->GetDexFile();
}
}
-void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset, DexCache* dex_cache) {
+void ImageWriter::AddDexCacheArrayRelocation(void* array,
+ size_t offset,
+ ObjPtr<mirror::DexCache> dex_cache) {
if (array != nullptr) {
DCHECK(!IsInBootImage(array));
size_t oat_index = GetOatIndexForDexCache(dex_cache);
if (self->IsJWeakCleared(data.weak_root)) {
continue;
}
- mirror::DexCache* dex_cache = self->DecodeJObject(data.weak_root)->AsDexCache();
+ ObjPtr<mirror::DexCache> dex_cache = self->DecodeJObject(data.weak_root)->AsDexCache();
for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
Class* klass = dex_cache->GetResolvedType(i);
if (klass != nullptr && !KeepClass(klass)) {
ReaderMutexLock mu(self, *class_linker->DexLock());
// Count number of dex caches not in the boot image.
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache == nullptr) {
continue;
}
const DexFile* dex_file = dex_cache->GetDexFile();
- if (!IsInBootImage(dex_cache)) {
+ if (!IsInBootImage(dex_cache.Ptr())) {
dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
}
}
size_t non_image_dex_caches = 0;
// Re-count number of non image dex caches.
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache == nullptr) {
continue;
}
const DexFile* dex_file = dex_cache->GetDexFile();
- if (!IsInBootImage(dex_cache)) {
+ if (!IsInBootImage(dex_cache.Ptr())) {
non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
}
}
<< "The number of non-image dex caches changed.";
size_t i = 0;
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache == nullptr) {
continue;
}
const DexFile* dex_file = dex_cache->GetDexFile();
- if (!IsInBootImage(dex_cache) && image_dex_files.find(dex_file) != image_dex_files.end()) {
- dex_caches->Set<false>(i, dex_cache);
+ if (!IsInBootImage(dex_cache.Ptr()) &&
+ image_dex_files.find(dex_file) != image_dex_files.end()) {
+ dex_caches->Set<false>(i, dex_cache.Ptr());
++i;
}
}
return it->second;
}
-size_t ImageWriter::GetOatIndexForDexCache(mirror::DexCache* dex_cache) const {
- if (dex_cache == nullptr) {
- return GetDefaultOatIndex();
- } else {
- return GetOatIndexForDexFile(dex_cache->GetDexFile());
- }
+size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
+ return (dex_cache == nullptr)
+ ? GetDefaultOatIndex()
+ : GetOatIndexForDexFile(dex_cache->GetDexFile());
}
void ImageWriter::UpdateOatFileLayout(size_t oat_index,
size_t GetOatIndexForDexFile(const DexFile* dex_file) const;
// Get the index of the oat file containing the dex file served by the dex cache.
- size_t GetOatIndexForDexCache(mirror::DexCache* dex_cache) const
+ size_t GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const
REQUIRES_SHARED(Locks::mutator_lock_);
// Update the oat layout for the given oat file.
REQUIRES_SHARED(Locks::mutator_lock_);
BinSlot GetImageBinSlot(mirror::Object* object) const REQUIRES_SHARED(Locks::mutator_lock_);
- void AddDexCacheArrayRelocation(void* array, size_t offset, mirror::DexCache* dex_cache)
+ void AddDexCacheArrayRelocation(void* array, size_t offset, ObjPtr<mirror::DexCache> dex_cache)
REQUIRES_SHARED(Locks::mutator_lock_);
void AddMethodPointerArray(mirror::PointerArray* arr) REQUIRES_SHARED(Locks::mutator_lock_);
{
ReaderMutexLock mu(self, *class_linker->DexLock());
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache != nullptr) {
- dex_caches_.insert(dex_cache);
+ dex_caches_.insert(dex_cache.Ptr());
}
}
}
okay = false;
} else {
obj = soa.Vm()->DecodeWeakGlobal(soa.Self(), ref);
- okay = Runtime::Current()->IsClearedJniWeakGlobal(obj.Ptr());
+ okay = Runtime::Current()->IsClearedJniWeakGlobal(obj);
}
if (!okay) {
AbortF("%s is an invalid %s: %p (%p)",
#include "mirror/iftable.h"
#include "mirror/object_array.h"
#include "handle_scope-inl.h"
+#include "scoped_thread_state_change-inl.h"
#include <atomic>
if (!self->IsJWeakCleared(data.weak_root) &&
proxy_method->HasSameDexCacheResolvedTypes(data.resolved_types,
image_pointer_size_)) {
- mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(
- self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache != nullptr) {
ArtMethod* resolved_method = dex_cache->GetResolvedMethod(
proxy_method->GetDexMethodIndex(), image_pointer_size_);
for (const DexCacheData& data : dex_caches_) {
// Avoid decoding (and read barriers) other unrelated dex caches.
if (data.dex_file == &dex_file) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache != nullptr) {
- return dex_cache;
- } else {
- break;
+ return dex_cache.Ptr();
}
+ break;
}
}
if (allow_failure) {
std::string location(dex_file.GetLocation());
// Failure, dump diagnostic and abort.
for (const DexCacheData& data : dex_caches_) {
- mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
if (dex_cache != nullptr) {
LOG(ERROR) << "Registered dex file " << dex_cache->GetDexFile()->GetLocation();
}
ReaderMutexLock mu(self, dex_lock_);
for (const DexCacheData& data : dex_caches_) {
if (!self->IsJWeakCleared(data.weak_root)) {
- mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(
+ ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
self->DecodeJObject(data.weak_root));
if (dex_cache != nullptr) {
dex_cache->Fixup(resolution_method, image_pointer_size_);
Thread* const self = Thread::Current();
for (const ClassLoaderData& data : class_loaders_) {
// Need to use DecodeJObject so that we get null for cleared JNI weak globals.
- auto* const class_loader = down_cast<mirror::ClassLoader*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
+ self->DecodeJObject(data.weak_root));
if (class_loader != nullptr) {
- visitor->Visit(class_loader);
+ visitor->Visit(class_loader.Ptr());
}
}
}
for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
const ClassLoaderData& data = *it;
// Need to use DecodeJObject so that we get null for cleared JNI weak globals.
- auto* const class_loader =
- down_cast<mirror::ClassLoader*>(self->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::ClassLoader> class_loader =
+ ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
if (class_loader != nullptr) {
++it;
} else {
if (soa.Self()->IsJWeakCleared(data.weak_root)) {
continue;
}
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(soa.Self()->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache = soa.Decode<mirror::DexCache>(data.weak_root);
if (dex_cache == nullptr) {
continue;
}
ReaderMutexLock mu(self, *DexLock());
for (const ClassLinker::DexCacheData& data : GetDexCachesData()) {
if (!self->IsJWeakCleared(data.weak_root)) {
- mirror::DexCache* dex_cache =
- down_cast<mirror::DexCache*>(soa.Self()->DecodeJObject(data.weak_root));
+ ObjPtr<mirror::DexCache> dex_cache = soa.Decode<mirror::DexCache>(data.weak_root);
if (dex_cache != nullptr) {
const DexFile* dex_file = dex_cache->GetDexFile();
// There could be duplicates if two dex files with the same location are mapped.
{
ReaderMutexLock mu(soa.Self(), *class_linker->DexLock());
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- dex_cache.Assign(down_cast<mirror::DexCache*>(soa.Self()->DecodeJObject(data.weak_root)));
+ dex_cache.Assign(soa.Self()->DecodeJObject(data.weak_root)->AsDexCache());
if (dex_cache.Get() != nullptr) {
break;
}
error_msg = "Could not create stack trace.";
}
// Throw the exception.
- self->SetException(reinterpret_cast<mirror::Throwable*>(self->DecodeJObject(exc.get())));
+ self->SetException(self->DecodeJObject(exc.get())->AsThrowable());
} else {
// Could not allocate a string object.
error_msg = "Couldn't throw new StackOverflowError because JNI NewStringUTF failed.";
}
Handle<mirror::Object> value_object(hs.NewHandle(annotation_value.value_.GetL()));
- mirror::Class* annotation_member_class =
+ ObjPtr<mirror::Class> annotation_member_class =
WellKnownClasses::ToClass(WellKnownClasses::libcore_reflect_AnnotationMember);
Handle<mirror::Object> new_member(hs.NewHandle(annotation_member_class->AllocObject(self)));
mirror::Method* method_obj_ptr;
Thread* self)
NO_THREAD_SAFETY_ANALYSIS {
// Must decode before pop. The 'result' may not be valid in case of an exception, though.
- mirror::Object* o = self->IsExceptionPending() ? nullptr : self->DecodeJObject(result);
+ ObjPtr<mirror::Object> o;
+ if (!self->IsExceptionPending()) {
+ o = self->DecodeJObject(result);
+ }
PopLocalReferences(saved_local_ref_cookie, self);
// Process result.
if (UNLIKELY(self->GetJniEnv()->check_jni)) {
// CheckReferenceResult can resolve types.
StackHandleScope<1> hs(self);
- HandleWrapper<mirror::Object> h_obj(hs.NewHandleWrapper(&o));
+ HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&o));
CheckReferenceResult(h_obj, self);
}
- VerifyObject(o);
- return o;
+ VerifyObject(o.Ptr());
+ return o.Ptr();
}
extern mirror::Object* JniMethodEndWithReference(jobject result,
size_t data_offset = mirror::Array::DataOffset(component_size).SizeValue();
if (data_offset > byte_size) {
// An int array is too big. Use java.lang.Object.
- mirror::Class* java_lang_Object = WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object);
- AssertToSpaceInvariant(nullptr, MemberOffset(0), java_lang_Object);
+ ObjPtr<mirror::Class> java_lang_Object =
+ WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object);
+ AssertToSpaceInvariant(nullptr, MemberOffset(0), java_lang_Object.Ptr());
CHECK_EQ(byte_size, (java_lang_Object->GetObjectSize<kVerifyNone, kWithoutReadBarrier>()));
- dummy_obj->SetClass(java_lang_Object);
+ dummy_obj->SetClass(java_lang_Object.Ptr());
CHECK_EQ(byte_size, (dummy_obj->SizeOf<kVerifyNone, kWithoutReadBarrier>()));
} else {
// Use an int array.
byte_array_class_ = self->GetJniEnv()->NewLocalRef(byte_array_class);
EXPECT_TRUE(byte_array_class_ != nullptr);
}
- return reinterpret_cast<mirror::Class*>(self->DecodeJObject(byte_array_class_));
+ return self->DecodeJObject(byte_array_class_)->AsClass();
}
mirror::Object* Alloc(space::MallocSpace* alloc_space,
result->SetJ(bit_cast<int64_t, double>(in));
}
-static mirror::Object* GetDexFromDexCache(Thread* self, mirror::DexCache* dex_cache)
+static ObjPtr<mirror::Object> GetDexFromDexCache(Thread* self, mirror::DexCache* dex_cache)
REQUIRES_SHARED(Locks::mutator_lock_) {
const DexFile* dex_file = dex_cache->GetDexFile();
if (dex_file == nullptr) {
mirror::Object* src = shadow_frame->GetVRegReference(arg_offset);
bool have_dex = false;
if (src != nullptr) {
- mirror::Object* dex = GetDexFromDexCache(self, reinterpret_cast<mirror::DexCache*>(src));
+ ObjPtr<mirror::Object> dex = GetDexFromDexCache(self, src->AsDexCache());
if (dex != nullptr) {
have_dex = true;
- result->SetL(dex);
+ result->SetL(dex.Ptr());
}
}
if (!have_dex) {
ScopedLocalRef<jobject> result_jobj(env,
InvokeMethod(soa, java_method.get(), java_receiver.get(), java_args.get()));
- result->SetL(self->DecodeJObject(result_jobj.get()));
+ result->SetL(self->DecodeJObject(result_jobj.get()).Ptr());
// Conservatively flag all exceptions as transaction aborts. This way we don't need to unwrap
// InvocationTargetExceptions.
weak_globals_add_condition_.Broadcast(self);
}
-mirror::Object* JavaVMExt::DecodeGlobal(IndirectRef ref) {
- return globals_.SynchronizedGet(ref).Ptr();
+ObjPtr<mirror::Object> JavaVMExt::DecodeGlobal(IndirectRef ref) {
+ return globals_.SynchronizedGet(ref);
}
-void JavaVMExt::UpdateGlobal(Thread* self, IndirectRef ref, mirror::Object* result) {
+void JavaVMExt::UpdateGlobal(Thread* self, IndirectRef ref, ObjPtr<mirror::Object> result) {
WriterMutexLock mu(self, globals_lock_);
globals_.Update(ref, result);
}
allow_accessing_weak_globals_.LoadSequentiallyConsistent();
}
-mirror::Object* JavaVMExt::DecodeWeakGlobal(Thread* self, IndirectRef ref) {
+ObjPtr<mirror::Object> JavaVMExt::DecodeWeakGlobal(Thread* self, IndirectRef ref) {
// It is safe to access GetWeakRefAccessEnabled without the lock since CC uses checkpoints to call
// SetWeakRefAccessEnabled, and the other collectors only modify allow_accessing_weak_globals_
// when the mutators are paused.
// if MayAccessWeakGlobals is false.
DCHECK_EQ(GetIndirectRefKind(ref), kWeakGlobal);
if (LIKELY(MayAccessWeakGlobalsUnlocked(self))) {
- return weak_globals_.SynchronizedGet(ref).Ptr();
+ return weak_globals_.SynchronizedGet(ref);
}
MutexLock mu(self, weak_globals_lock_);
return DecodeWeakGlobalLocked(self, ref);
}
-mirror::Object* JavaVMExt::DecodeWeakGlobalLocked(Thread* self, IndirectRef ref) {
+ObjPtr<mirror::Object> JavaVMExt::DecodeWeakGlobalLocked(Thread* self, IndirectRef ref) {
if (kDebugLocking) {
weak_globals_lock_.AssertHeld(self);
}
while (UNLIKELY(!MayAccessWeakGlobals(self))) {
weak_globals_add_condition_.WaitHoldingLocks(self);
}
- return weak_globals_.Get(ref).Ptr();
+ return weak_globals_.Get(ref);
}
-mirror::Object* JavaVMExt::DecodeWeakGlobalDuringShutdown(Thread* self, IndirectRef ref) {
+ObjPtr<mirror::Object> JavaVMExt::DecodeWeakGlobalDuringShutdown(Thread* self, IndirectRef ref) {
DCHECK_EQ(GetIndirectRefKind(ref), kWeakGlobal);
DCHECK(Runtime::Current()->IsShuttingDown(self));
if (self != nullptr) {
if (!kUseReadBarrier) {
DCHECK(allow_accessing_weak_globals_.LoadSequentiallyConsistent());
}
- return weak_globals_.SynchronizedGet(ref).Ptr();
+ return weak_globals_.SynchronizedGet(ref);
}
bool JavaVMExt::IsWeakGlobalCleared(Thread* self, IndirectRef ref) {
return Runtime::Current()->IsClearedJniWeakGlobal(weak_globals_.Get<kWithoutReadBarrier>(ref));
}
-void JavaVMExt::UpdateWeakGlobal(Thread* self, IndirectRef ref, mirror::Object* result) {
+void JavaVMExt::UpdateWeakGlobal(Thread* self, IndirectRef ref, ObjPtr<mirror::Object> result) {
MutexLock mu(self, weak_globals_lock_);
weak_globals_.Update(ref, result);
}
void SweepJniWeakGlobals(IsMarkedVisitor* visitor)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!weak_globals_lock_);
- mirror::Object* DecodeGlobal(IndirectRef ref)
+ ObjPtr<mirror::Object> DecodeGlobal(IndirectRef ref)
REQUIRES_SHARED(Locks::mutator_lock_);
- void UpdateGlobal(Thread* self, IndirectRef ref, mirror::Object* result)
+ void UpdateGlobal(Thread* self, IndirectRef ref, ObjPtr<mirror::Object> result)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!globals_lock_);
- mirror::Object* DecodeWeakGlobal(Thread* self, IndirectRef ref)
+ ObjPtr<mirror::Object> DecodeWeakGlobal(Thread* self, IndirectRef ref)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!weak_globals_lock_);
- mirror::Object* DecodeWeakGlobalLocked(Thread* self, IndirectRef ref)
+ ObjPtr<mirror::Object> DecodeWeakGlobalLocked(Thread* self, IndirectRef ref)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(weak_globals_lock_);
// Like DecodeWeakGlobal() but to be used only during a runtime shutdown where self may be
// null.
- mirror::Object* DecodeWeakGlobalDuringShutdown(Thread* self, IndirectRef ref)
+ ObjPtr<mirror::Object> DecodeWeakGlobalDuringShutdown(Thread* self, IndirectRef ref)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!weak_globals_lock_);
return weak_globals_lock_;
}
- void UpdateWeakGlobal(Thread* self, IndirectRef ref, mirror::Object* result)
+ void UpdateWeakGlobal(Thread* self, IndirectRef ref, ObjPtr<mirror::Object> result)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!weak_globals_lock_);
const JNIInvokeInterface* GetUncheckedFunctions() const {
}
ObjectRegistryEntry& entry = *it->second;
*error = JDWP::ERR_NONE;
- return self->DecodeJObject(entry.jni_reference);
+ return self->DecodeJObject(entry.jni_reference).Ptr();
}
jobject ObjectRegistry::GetJObject(JDWP::ObjectId id) {
Thread* self = Thread::Current();
ScopedObjectAccess soa(self);
StackHandleScope<1> hs(self);
- Handle<mirror::ClassLoader> h_loader(hs.NewHandle(
- reinterpret_cast<mirror::ClassLoader*>(self->DecodeJObject(class_loader))));
+ Handle<mirror::ClassLoader> h_loader(
+ hs.NewHandle(self->DecodeJObject(class_loader)->AsClassLoader()));
mirror::Class* klass = class_linker->FindClass(self, clazz.c_str(), h_loader);
const auto pointer_size = class_linker->GetImagePointerSize();
static std::string ComputeMonitorDescription(Thread* self,
jobject obj) REQUIRES_SHARED(Locks::mutator_lock_) {
- mirror::Object* o = self->DecodeJObject(obj);
+ ObjPtr<mirror::Object> o = self->DecodeJObject(obj);
if ((o->GetLockWord(false).GetState() == LockWord::kThinLocked) &&
Locks::mutator_lock_->IsExclusiveHeld(self)) {
// Getting the identity hashcode here would result in lock inflation and suspension of the
// current thread, which isn't safe if this is the only runnable thread.
return StringPrintf("<@addr=0x%" PRIxPTR "> (a %s)",
- reinterpret_cast<intptr_t>(o),
+ reinterpret_cast<intptr_t>(o.Ptr()),
PrettyTypeOf(o).c_str());
} else {
// IdentityHashCode can cause thread suspension, which would invalidate o if it moved. So
[self, frame, monitors](const std::pair<uintptr_t, jobject>& pair)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (frame == pair.first) {
- mirror::Object* o = self->DecodeJObject(pair.second);
+ ObjPtr<mirror::Object> o = self->DecodeJObject(pair.second);
monitors->Remove(o);
return true;
}
locked_objects_.erase(it);
} else {
// Check whether this monitor was locked in another JNI "session."
- mirror::Object* mirror_obj = self->DecodeJObject(obj);
+ ObjPtr<mirror::Object> mirror_obj = self->DecodeJObject(obj);
for (std::pair<uintptr_t, jobject>& pair : locked_objects_) {
if (self->DecodeJObject(pair.second) == mirror_obj) {
std::string monitor_descr = ComputeMonitorDescription(self, pair.second);
static jobject NewGlobalRef(JNIEnv* env, jobject obj) {
ScopedObjectAccess soa(env);
ObjPtr<mirror::Object> decoded_obj = soa.Decode<mirror::Object>(obj);
- return soa.Vm()->AddGlobalRef(soa.Self(), decoded_obj.Ptr());
+ return soa.Vm()->AddGlobalRef(soa.Self(), decoded_obj);
}
static void DeleteGlobalRef(JNIEnv* env, jobject obj) {
static jweak NewWeakGlobalRef(JNIEnv* env, jobject obj) {
ScopedObjectAccess soa(env);
ObjPtr<mirror::Object> decoded_obj = soa.Decode<mirror::Object>(obj);
- return soa.Vm()->AddWeakGlobalRef(soa.Self(), decoded_obj.Ptr());
+ return soa.Vm()->AddWeakGlobalRef(soa.Self(), decoded_obj);
}
static void DeleteWeakGlobalRef(JNIEnv* env, jweak obj) {
if (soa.Self()->IsExceptionPending()) {
return JNI_ERR;
}
- soa.Env()->monitors.Add(o.Ptr());
+ soa.Env()->monitors.Add(o);
return JNI_OK;
}
if (soa.Self()->IsExceptionPending()) {
return JNI_ERR;
}
- soa.Env()->monitors.Remove(o.Ptr());
+ soa.Env()->monitors.Remove(o);
return JNI_OK;
}
}
}
+ // Static function to be friendly with null pointers.
+ template <typename SourceType>
+ static ObjPtr<MirrorType> DownCast(ObjPtr<SourceType> ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
+ static_assert(std::is_base_of<SourceType, MirrorType>::value,
+ "Target type must be a subtype of source type");
+ return static_cast<MirrorType*>(ptr.Ptr());
+ }
+
private:
// Trim off high bits of thread local cookie.
ALWAYS_INLINE static uintptr_t TrimCookie(uintptr_t cookie) {
class_linker->FindClass(self, dex_file_name, null_loader)
->FindDeclaredInstanceField("mInternalCookie", "Ljava/lang/Object;");
CHECK(dex_file_cookie_field != nullptr);
- art::Handle<art::mirror::Class> klass(
- hs.NewHandle(art::down_cast<art::mirror::Class*>(self->DecodeJObject(jklass))));
+ art::Handle<art::mirror::Class> klass(hs.NewHandle(self->DecodeJObject(jklass)->AsClass()));
art::mirror::Object* dex_file_ptr = nullptr;
art::mirror::ClassLoader* class_loader_ptr = nullptr;
// Find dalvik.system.DexFile that represents the dex file we are changing.
ReferenceTable::~ReferenceTable() {
}
-void ReferenceTable::Add(mirror::Object* obj) {
+void ReferenceTable::Add(ObjPtr<mirror::Object> obj) {
DCHECK(obj != nullptr);
- VerifyObject(obj);
+ VerifyObject(obj.Ptr());
if (entries_.size() >= max_size_) {
LOG(FATAL) << "ReferenceTable '" << name_ << "' "
<< "overflowed (" << max_size_ << " entries)";
entries_.push_back(GcRoot<mirror::Object>(obj));
}
-void ReferenceTable::Remove(mirror::Object* obj) {
+void ReferenceTable::Remove(ObjPtr<mirror::Object> obj) {
// We iterate backwards on the assumption that references are LIFO.
for (int i = entries_.size() - 1; i >= 0; --i) {
- mirror::Object* entry = entries_[i].Read();
+ ObjPtr<mirror::Object> entry = entries_[i].Read();
if (entry == obj) {
entries_.erase(entries_.begin() + i);
return;
// If "obj" is an array, return the number of elements in the array.
// Otherwise, return zero.
-static size_t GetElementCount(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
+static size_t GetElementCount(ObjPtr<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_) {
// We assume the special cleared value isn't an array in the if statement below.
DCHECK(!Runtime::Current()->GetClearedJniWeakGlobal()->IsArrayInstance());
if (obj == nullptr || !obj->IsArrayInstance()) {
// Pass in the number of elements in the array (or 0 if this is not an
// array object), and the number of additional objects that are identical
// or equivalent to the original.
-static void DumpSummaryLine(std::ostream& os, mirror::Object* obj, size_t element_count,
+static void DumpSummaryLine(std::ostream& os, ObjPtr<mirror::Object> obj, size_t element_count,
int identical, int equiv)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (obj == nullptr) {
// are no suspend points which can happen during the sorting process. This works since
// we are guaranteed that the addresses of obj1, obj2, obj1->GetClass, obj2->GetClass wont
// change during the sorting process. The classes are forwarded by ref->GetClass().
- mirror::Object* obj1 = root1.Read<kWithoutReadBarrier>();
- mirror::Object* obj2 = root2.Read<kWithoutReadBarrier>();
+ ObjPtr<mirror::Object> obj1 = root1.Read<kWithoutReadBarrier>();
+ ObjPtr<mirror::Object> obj2 = root2.Read<kWithoutReadBarrier>();
DCHECK(obj1 != nullptr);
DCHECK(obj2 != nullptr);
Runtime* runtime = Runtime::Current();
return size1 < size2;
}
// ...and finally by address.
- return obj1 < obj2;
+ return obj1.Ptr() < obj2.Ptr();
}
};
os << " Last " << (count - first) << " entries (of " << count << "):\n";
Runtime* runtime = Runtime::Current();
for (int idx = count - 1; idx >= first; --idx) {
- mirror::Object* ref = entries[idx].Read();
+ ObjPtr<mirror::Object> ref = entries[idx].Read();
if (ref == nullptr) {
continue;
}
if (ref->GetClass() == nullptr) {
// should only be possible right after a plain dvmMalloc().
size_t size = ref->SizeOf();
- os << StringPrintf(" %5d: %p (raw) (%zd bytes)\n", idx, ref, size);
+ os << StringPrintf(" %5d: %p (raw) (%zd bytes)\n", idx, ref.Ptr(), size);
continue;
}
if (element_count != 0) {
StringAppendF(&extras, " (%zd elements)", element_count);
} else if (ref->GetClass()->IsStringClass()) {
- mirror::String* s = ref->AsString();
+ ObjPtr<mirror::String> s = ref->AsString();
std::string utf8(s->ToModifiedUtf8());
if (s->GetLength() <= 16) {
StringAppendF(&extras, " \"%s\"", utf8.c_str());
StringAppendF(&extras, " \"%.16s... (%d chars)", utf8.c_str(), s->GetLength());
}
} else if (ref->IsReferenceInstance()) {
- mirror::Object* referent = ref->AsReference()->GetReferent();
+ ObjPtr<mirror::Object> referent = ref->AsReference()->GetReferent();
if (referent == nullptr) {
extras = " (referent is null)";
} else {
os << " Summary:\n";
size_t equiv = 0;
size_t identical = 0;
- mirror::Object* prev = nullptr;
+ ObjPtr<mirror::Object> prev = nullptr;
for (GcRoot<mirror::Object>& root : sorted_entries) {
- mirror::Object* current = root.Read<kWithoutReadBarrier>();
+ ObjPtr<mirror::Object> current = root.Read<kWithoutReadBarrier>();
if (prev != nullptr) {
const size_t element_count = GetElementCount(prev);
if (current == prev) {
#include "base/allocator.h"
#include "base/mutex.h"
#include "gc_root.h"
+#include "obj_ptr.h"
#include "object_callbacks.h"
namespace art {
ReferenceTable(const char* name, size_t initial_size, size_t max_size);
~ReferenceTable();
- void Add(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
+ void Add(ObjPtr<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_);
- void Remove(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
+ void Remove(ObjPtr<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_);
size_t Size() const;
} else if (kind == kHandleScopeOrInvalid) {
LOG(FATAL) << "Unsupported UpdateReference for kind kHandleScopeOrInvalid";
} else if (kind == kGlobal) {
- self->GetJniEnv()->vm->UpdateGlobal(self, ref, result.Ptr());
+ self->GetJniEnv()->vm->UpdateGlobal(self, ref, result);
} else {
DCHECK_EQ(kind, kWeakGlobal);
- self->GetJniEnv()->vm->UpdateWeakGlobal(self, ref, result.Ptr());
+ self->GetJniEnv()->vm->UpdateWeakGlobal(self, ref, result);
}
}
inline ObjPtr<T, kPoison> ScopedObjectAccessAlreadyRunnable::Decode(jobject obj) const {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
- return down_cast<T*>(Self()->DecodeJObject(obj));
+ return down_cast<T*>(Self()->DecodeJObject(obj).Ptr());
}
inline ArtField* ScopedObjectAccessAlreadyRunnable::DecodeField(jfieldID fid) const {
}
}
-mirror::Object* Thread::DecodeJObject(jobject obj) const {
+ObjPtr<mirror::Object> Thread::DecodeJObject(jobject obj) const {
if (obj == nullptr) {
return nullptr;
}
tlsPtr_.jni_env->vm->JniAbortF(nullptr, "use of deleted %s %p",
ToStr<IndirectRefKind>(kind).c_str(), obj);
}
- return result.Ptr();
+ return result;
}
bool Thread::IsJWeakCleared(jweak obj) const {
// case in the compiler. We won't be able to invoke the constructor of the exception, so set
// the exception fields directly.
if (msg != nullptr) {
- exception->SetDetailMessage(down_cast<mirror::String*>(DecodeJObject(msg_string.get())));
+ exception->SetDetailMessage(DecodeJObject(msg_string.get())->AsString());
}
if (cause.get() != nullptr) {
- exception->SetCause(down_cast<mirror::Throwable*>(DecodeJObject(cause.get())));
+ exception->SetCause(DecodeJObject(cause.get())->AsThrowable());
}
ScopedLocalRef<jobject> trace(GetJniEnv(),
Runtime::Current()->IsActiveTransaction()
? CreateInternalStackTrace<true>(soa)
: CreateInternalStackTrace<false>(soa));
if (trace.get() != nullptr) {
- exception->SetStackState(down_cast<mirror::Throwable*>(DecodeJObject(trace.get())));
+ exception->SetStackState(DecodeJObject(trace.get()).Ptr());
}
SetException(exception.Get());
} else {
}
// Convert a jobject into a Object*
- mirror::Object* DecodeJObject(jobject obj) const REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<mirror::Object> DecodeJObject(jobject obj) const REQUIRES_SHARED(Locks::mutator_lock_);
// Checks if the weak global ref has been cleared by the GC without decoding it.
bool IsJWeakCleared(jweak obj) const REQUIRES_SHARED(Locks::mutator_lock_);
#include "entrypoints/quick/quick_entrypoints_enum.h"
#include "mirror/class.h"
#include "mirror/throwable.h"
+#include "obj_ptr-inl.h"
#include "ScopedLocalRef.h"
#include "scoped_thread_state_change-inl.h"
#include "thread-inl.h"
"Ljava/lang/String;");
}
-mirror::Class* WellKnownClasses::ToClass(jclass global_jclass) {
- return reinterpret_cast<mirror::Class*>(Thread::Current()->DecodeJObject(global_jclass));
+ObjPtr<mirror::Class> WellKnownClasses::ToClass(jclass global_jclass) {
+ return ObjPtr<mirror::Class>::DownCast(Thread::Current()->DecodeJObject(global_jclass));
}
} // namespace art
#include "base/mutex.h"
#include "jni.h"
+#include "obj_ptr.h"
namespace art {
static ArtMethod* StringInitToStringFactory(ArtMethod* method);
static uint32_t StringInitToEntryPoint(ArtMethod* method);
- static mirror::Class* ToClass(jclass global_jclass)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ static ObjPtr<mirror::Class> ToClass(jclass global_jclass) REQUIRES_SHARED(Locks::mutator_lock_);
static jclass com_android_dex_Dex;
static jclass dalvik_annotation_optimization_CriticalNative;