Fix dangling SingleImplementations left after class unloading
Test: make test-art-host, manual using sample code
bug: 73143991
Change-Id: I4d56b39c69d4ed60266a8b90b9e9d18fba7b8227
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 8bf91d9..1565644 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -374,13 +374,14 @@
return ResolveClassFromTypeIndex(GetReturnTypeIndex());
}
+template <ReadBarrierOption kReadBarrierOption>
inline bool ArtMethod::HasSingleImplementation() {
- if (IsFinal() || GetDeclaringClass()->IsFinal()) {
+ if (IsFinal<kReadBarrierOption>() || GetDeclaringClass<kReadBarrierOption>()->IsFinal()) {
// We don't set kAccSingleImplementation for these cases since intrinsic
// can use the flag also.
return true;
}
- return (GetAccessFlags() & kAccSingleImplementation) != 0;
+ return (GetAccessFlags<kReadBarrierOption>() & kAccSingleImplementation) != 0;
}
inline bool ArtMethod::IsHiddenIntrinsic(uint32_t ordinal) {
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index bbc6007..f3c4959 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -88,13 +88,18 @@
}
}
+template <ReadBarrierOption kReadBarrierOption>
ArtMethod* ArtMethod::GetSingleImplementation(PointerSize pointer_size) {
- if (!IsAbstract()) {
+ if (!IsAbstract<kReadBarrierOption>()) {
// A non-abstract's single implementation is itself.
return this;
}
return reinterpret_cast<ArtMethod*>(GetDataPtrSize(pointer_size));
}
+template ArtMethod* ArtMethod::GetSingleImplementation<ReadBarrierOption::kWithReadBarrier>(
+ PointerSize pointer_size);
+template ArtMethod* ArtMethod::GetSingleImplementation<ReadBarrierOption::kWithoutReadBarrier>(
+ PointerSize pointer_size);
ArtMethod* ArtMethod::FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
jobject jlr_method) {
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 579e554..bd9b64d 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -172,8 +172,9 @@
return (GetAccessFlags() & synchonized) != 0;
}
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsFinal() {
- return (GetAccessFlags() & kAccFinal) != 0;
+ return (GetAccessFlags<kReadBarrierOption>() & kAccFinal) != 0;
}
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
@@ -242,10 +243,11 @@
}
// This is set by the class linker.
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsDefault() {
static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
"kAccDefault conflicts with intrinsic modifier");
- return (GetAccessFlags() & kAccDefault) != 0;
+ return (GetAccessFlags<kReadBarrierOption>() & kAccDefault) != 0;
}
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
@@ -280,8 +282,9 @@
return (GetAccessFlags() & mask) == mask;
}
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsAbstract() {
- return (GetAccessFlags() & kAccAbstract) != 0;
+ return (GetAccessFlags<kReadBarrierOption>() & kAccAbstract) != 0;
}
bool IsSynthetic() {
@@ -496,6 +499,7 @@
return DataOffset(kRuntimePointerSize);
}
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) {
@@ -513,12 +517,15 @@
ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
REQUIRES_SHARED(Locks::mutator_lock_);
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ArtMethod* GetSingleImplementation(PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size) {
- DCHECK(!IsNative());
- DCHECK(IsAbstract()); // Non-abstract method's single implementation is just itself.
+ DCHECK(!IsNative<kReadBarrierOption>());
+ // Non-abstract method's single implementation is just itself.
+ DCHECK(IsAbstract<kReadBarrierOption>());
SetDataPtrSize(method, pointer_size);
}
diff --git a/runtime/cha.cc b/runtime/cha.cc
index a53d7e5..f2e6a73 100644
--- a/runtime/cha.cc
+++ b/runtime/cha.cc
@@ -21,6 +21,7 @@
#include "jit/jit.h"
#include "jit/jit_code_cache.h"
#include "linear_alloc.h"
+#include "mirror/class_loader.h"
#include "runtime.h"
#include "scoped_thread_state_change-inl.h"
#include "stack.h"
@@ -77,6 +78,106 @@
}
}
+void ClassHierarchyAnalysis::ResetSingleImplementationInHierarchy(ObjPtr<mirror::Class> klass,
+ const LinearAlloc* alloc,
+ const PointerSize pointer_size)
+ const {
+ // Presumably called from some sort of class visitor, no null pointers expected.
+ DCHECK(klass != nullptr);
+ DCHECK(alloc != nullptr);
+
+ // Skip interfaces since they cannot provide SingleImplementations to work with.
+ if (klass->IsInterface()) {
+ return;
+ }
+
+ // This method is called while visiting classes in the class table of a class loader.
+ // That means, some 'klass'es can belong to other classloaders. Argument 'alloc'
+ // allows to explicitly indicate a classloader, which is going to be deleted.
+ // Filter out classes, that do not belong to it.
+ if (!alloc->ContainsUnsafe(klass->GetMethodsPtr())) {
+ return;
+ }
+
+ // CHA analysis is only applied to resolved classes.
+ if (!klass->IsResolved()) {
+ return;
+ }
+
+ ObjPtr<mirror::Class> super = klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
+
+ // Skip Object class and primitive classes.
+ if (super == nullptr) {
+ return;
+ }
+
+ // The class is going to be deleted. Iterate over the virtual methods of its superclasses to see
+ // if they have SingleImplementations methods defined by 'klass'.
+ // Skip all virtual methods that do not override methods from super class since they cannot be
+ // SingleImplementations for anything.
+ int32_t vtbl_size = super->GetVTableLength<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ ObjPtr<mirror::ClassLoader> loader =
+ klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ for (int vtbl_index = 0; vtbl_index < vtbl_size; ++vtbl_index) {
+ ArtMethod* method =
+ klass->GetVTableEntry<kDefaultVerifyFlags, kWithoutReadBarrier>(vtbl_index, pointer_size);
+ if (!alloc->ContainsUnsafe(method)) {
+ continue;
+ }
+
+ // Find all occurrences of virtual methods in parents' SingleImplementations fields
+ // and reset them.
+ // No need to reset SingleImplementations for the method itself (it will be cleared anyways),
+ // so start with a superclass and move up looking into a corresponding vtbl slot.
+ for (ObjPtr<mirror::Class> super_it = super;
+ super_it != nullptr &&
+ super_it->GetVTableLength<kDefaultVerifyFlags, kWithoutReadBarrier>() > vtbl_index;
+ super_it = super_it->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()) {
+ // Skip superclasses that are also going to be unloaded.
+ ObjPtr<mirror::ClassLoader> super_loader = super_it->
+ GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ if (super_loader == loader) {
+ continue;
+ }
+
+ ArtMethod* super_method = super_it->
+ GetVTableEntry<kDefaultVerifyFlags, kWithoutReadBarrier>(vtbl_index, pointer_size);
+ if (super_method->IsAbstract<kWithoutReadBarrier>() &&
+ super_method->HasSingleImplementation<kWithoutReadBarrier>() &&
+ super_method->GetSingleImplementation<kWithoutReadBarrier>(pointer_size) == method) {
+ // Do like there was no single implementation defined previously
+ // for this method of the superclass.
+ super_method->SetSingleImplementation<kWithoutReadBarrier>(nullptr, pointer_size);
+ } else {
+ // No related SingleImplementations could possibly be found any further.
+ DCHECK(!super_method->HasSingleImplementation<kWithoutReadBarrier>());
+ break;
+ }
+ }
+ }
+
+ // Check all possible interface methods too.
+ ObjPtr<mirror::IfTable> iftable = klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ const size_t ifcount = klass->GetIfTableCount<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ for (size_t i = 0; i < ifcount; ++i) {
+ ObjPtr<mirror::Class> interface =
+ iftable->GetInterface<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
+ for (size_t j = 0,
+ count = iftable->GetMethodArrayCount<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
+ j < count;
+ ++j) {
+ ArtMethod* method = interface->GetVirtualMethod(j, pointer_size);
+ if (method->HasSingleImplementation<kWithoutReadBarrier>() &&
+ alloc->ContainsUnsafe(
+ method->GetSingleImplementation<kWithoutReadBarrier>(pointer_size)) &&
+ !method->IsDefault<kWithoutReadBarrier>()) {
+ // Do like there was no single implementation defined previously for this method.
+ method->SetSingleImplementation<kWithoutReadBarrier>(nullptr, pointer_size);
+ }
+ }
+ }
+}
+
// This stack visitor walks the stack and for compiled code with certain method
// headers, sets the should_deoptimize flag on stack to 1.
// TODO: also set the register value to 1 when should_deoptimize is allocated in
diff --git a/runtime/cha.h b/runtime/cha.h
index 40999dd..d1a1b7c 100644
--- a/runtime/cha.h
+++ b/runtime/cha.h
@@ -110,6 +110,17 @@
const std::unordered_set<OatQuickMethodHeader*>& method_headers)
REQUIRES(Locks::cha_lock_);
+ // If a given class belongs to a linear allocation that is about to be deleted, in all its
+ // superclasses and superinterfaces reset SingleImplementation fields of their methods
+ // that might be affected by the deletion.
+ // The method is intended to be called during GC before ReclaimPhase, since it gets info from
+ // Java objects that are going to be collected.
+ // For the same reason it's important to access objects without read barrier to not revive them.
+ void ResetSingleImplementationInHierarchy(ObjPtr<mirror::Class> klass,
+ const LinearAlloc* alloc,
+ PointerSize pointer_size)
+ const REQUIRES_SHARED(Locks::mutator_lock_);
+
// Update CHA info for methods that `klass` overrides, after loading `klass`.
void UpdateAfterLoadingOf(Handle<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index bf0d3ad..3c1ffe0 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1162,6 +1162,25 @@
return true;
}
+class CHAOnDeleteUpdateClassVisitor {
+ public:
+ explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
+ : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
+ pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
+ self_(Thread::Current()) {}
+
+ bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
+ // This class is going to be unloaded. Tell CHA about it.
+ cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
+ return true;
+ }
+ private:
+ const LinearAlloc* allocator_;
+ const ClassHierarchyAnalysis* cha_;
+ const PointerSize pointer_size_;
+ const Thread* self_;
+};
+
class VerifyDeclaringClassVisitor : public ArtMethodVisitor {
public:
VerifyDeclaringClassVisitor() REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_)
@@ -2146,12 +2165,14 @@
mirror::EmulatedStackFrame::ResetClass();
Thread* const self = Thread::Current();
for (const ClassLoaderData& data : class_loaders_) {
- DeleteClassLoader(self, data);
+ // CHA unloading analysis is not needed. No negative consequences are expected because
+ // all the classloaders are deleted at the same time.
+ DeleteClassLoader(self, data, false /*cleanup_cha*/);
}
class_loaders_.clear();
}
-void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data) {
+void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
Runtime* const runtime = Runtime::Current();
JavaVMExt* const vm = runtime->GetJavaVM();
vm->DeleteWeakGlobalRef(self, data.weak_root);
@@ -2166,6 +2187,12 @@
// If we don't have a JIT, we need to manually remove the CHA dependencies manually.
cha_->RemoveDependenciesForLinearAlloc(data.allocator);
}
+ // Cleanup references to single implementation ArtMethods that will be deleted.
+ if (cleanup_cha) {
+ CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
+ data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
+ }
+
delete data.allocator;
delete data.class_table;
}
@@ -8891,7 +8918,8 @@
}
}
for (ClassLoaderData& data : to_delete) {
- DeleteClassLoader(self, data);
+ // CHA unloading analysis and SingleImplementaion cleanups are required.
+ DeleteClassLoader(self, data, true /*cleanup_cha*/);
}
}
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 8d6b3d2..d05e78f 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -752,7 +752,7 @@
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- void DeleteClassLoader(Thread* self, const ClassLoaderData& data)
+ void DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha)
REQUIRES_SHARED(Locks::mutator_lock_);
void VisitClassesInternal(ClassVisitor* visitor)
diff --git a/runtime/class_table-inl.h b/runtime/class_table-inl.h
index 718e93a..c59e2e8 100644
--- a/runtime/class_table-inl.h
+++ b/runtime/class_table-inl.h
@@ -60,12 +60,12 @@
}
}
-template <typename Visitor>
+template <typename Visitor, ReadBarrierOption kReadBarrierOption>
bool ClassTable::Visit(Visitor& visitor) {
ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
for (TableSlot& table_slot : class_set) {
- if (!visitor(table_slot.Read())) {
+ if (!visitor(table_slot.Read<kReadBarrierOption>())) {
return false;
}
}
@@ -73,12 +73,12 @@
return true;
}
-template <typename Visitor>
+template <typename Visitor, ReadBarrierOption kReadBarrierOption>
bool ClassTable::Visit(const Visitor& visitor) {
ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
for (TableSlot& table_slot : class_set) {
- if (!visitor(table_slot.Read())) {
+ if (!visitor(table_slot.Read<kReadBarrierOption>())) {
return false;
}
}
diff --git a/runtime/class_table.h b/runtime/class_table.h
index 52e9f82..3e90fe2 100644
--- a/runtime/class_table.h
+++ b/runtime/class_table.h
@@ -190,11 +190,11 @@
REQUIRES_SHARED(Locks::mutator_lock_);
// Stops visit if the visitor returns false.
- template <typename Visitor>
+ template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool Visit(Visitor& visitor)
REQUIRES(!lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- template <typename Visitor>
+ template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool Visit(const Visitor& visitor)
REQUIRES(!lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index ee7d217..f63f105 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -304,20 +304,25 @@
return GetVTable() != nullptr || ShouldHaveEmbeddedVTable();
}
+ template<VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption>
inline int32_t Class::GetVTableLength() {
- if (ShouldHaveEmbeddedVTable()) {
+ if (ShouldHaveEmbeddedVTable<kVerifyFlags, kReadBarrierOption>()) {
return GetEmbeddedVTableLength();
}
- return GetVTable() != nullptr ? GetVTable()->GetLength() : 0;
+ return GetVTable<kVerifyFlags, kReadBarrierOption>() != nullptr ?
+ GetVTable<kVerifyFlags, kReadBarrierOption>()->GetLength() : 0;
}
+ template<VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption>
inline ArtMethod* Class::GetVTableEntry(uint32_t i, PointerSize pointer_size) {
- if (ShouldHaveEmbeddedVTable()) {
+ if (ShouldHaveEmbeddedVTable<kVerifyFlags, kReadBarrierOption>()) {
return GetEmbeddedVTableEntry(i, pointer_size);
}
- auto* vtable = GetVTable();
+ auto* vtable = GetVTable<kVerifyFlags, kReadBarrierOption>();
DCHECK(vtable != nullptr);
- return vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
+ return vtable->template GetElementPtrSize<ArtMethod*, kVerifyFlags, kReadBarrierOption>(i, pointer_size);
}
inline int32_t Class::GetEmbeddedVTableLength() {
@@ -627,8 +632,10 @@
return ret.Ptr();
}
+template<VerifyObjectFlags kVerifyFlags,
+ ReadBarrierOption kReadBarrierOption>
inline int32_t Class::GetIfTableCount() {
- return GetIfTable()->Count();
+ return GetIfTable<kVerifyFlags, kReadBarrierOption>()->Count();
}
inline void Class::SetIfTable(ObjPtr<IfTable> new_iftable) {
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index ea06567..51d1376 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -808,8 +808,12 @@
static MemberOffset EmbeddedVTableEntryOffset(uint32_t i, PointerSize pointer_size);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
int32_t GetVTableLength() REQUIRES_SHARED(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ArtMethod* GetVTableEntry(uint32_t i, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -941,6 +945,8 @@
return (GetAccessFlags() & kAccRecursivelyInitialized) != 0;
}
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE int32_t GetIfTableCount() REQUIRES_SHARED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h
index 296c163..d72c786 100644
--- a/runtime/mirror/iftable.h
+++ b/runtime/mirror/iftable.h
@@ -25,8 +25,11 @@
class MANAGED IfTable FINAL : public ObjectArray<Object> {
public:
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE Class* GetInterface(int32_t i) REQUIRES_SHARED(Locks::mutator_lock_) {
- Class* interface = GetWithoutChecks((i * kMax) + kInterface)->AsClass();
+ Class* interface =
+ GetWithoutChecks<kVerifyFlags, kReadBarrierOption>((i * kMax) + kInterface)->AsClass();
DCHECK(interface != nullptr);
return interface;
}