summaryrefslogtreecommitdiff
path: root/runtime/class_linker.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/class_linker.cc')
-rw-r--r--runtime/class_linker.cc1719
1 files changed, 1197 insertions, 522 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index c8dbc75e61..ecb2fcf472 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -22,6 +22,7 @@
#include <deque>
#include <forward_list>
#include <iostream>
+#include <iterator>
#include <map>
#include <memory>
#include <queue>
@@ -32,17 +33,19 @@
#include <vector>
#include "android-base/stringprintf.h"
-
+#include "android-base/strings.h"
#include "art_field-inl.h"
#include "art_method-inl.h"
#include "barrier.h"
#include "base/arena_allocator.h"
+#include "base/arena_bit_vector.h"
#include "base/casts.h"
#include "base/file_utils.h"
#include "base/hash_map.h"
#include "base/hash_set.h"
#include "base/leb128.h"
#include "base/logging.h"
+#include "base/mem_map_arena_pool.h"
#include "base/metrics/metrics.h"
#include "base/mutex-inl.h"
#include "base/os.h"
@@ -67,6 +70,7 @@
#include "dex/class_accessor-inl.h"
#include "dex/descriptors_names.h"
#include "dex/dex_file-inl.h"
+#include "dex/dex_file_annotations.h"
#include "dex/dex_file_exception_helpers.h"
#include "dex/dex_file_loader.h"
#include "dex/signature-inl.h"
@@ -96,7 +100,7 @@
#include "jit/jit_code_cache.h"
#include "jni/java_vm_ext.h"
#include "jni/jni_internal.h"
-#include "linear_alloc.h"
+#include "linear_alloc-inl.h"
#include "mirror/array-alloc-inl.h"
#include "mirror/array-inl.h"
#include "mirror/call_site.h"
@@ -120,8 +124,8 @@
#include "mirror/object_array-alloc-inl.h"
#include "mirror/object_array-inl.h"
#include "mirror/object_array.h"
-#include "mirror/object_reference.h"
#include "mirror/object_reference-inl.h"
+#include "mirror/object_reference.h"
#include "mirror/proxy.h"
#include "mirror/reference-inl.h"
#include "mirror/stack_trace_element.h"
@@ -141,6 +145,7 @@
#include "runtime.h"
#include "runtime_callbacks.h"
#include "scoped_thread_state_change-inl.h"
+#include "startup_completed_task.h"
#include "thread-inl.h"
#include "thread.h"
#include "thread_list.h"
@@ -270,7 +275,6 @@ class ClassLinker::VisiblyInitializedCallback final
}
void Run(Thread* self) override {
- self->ClearMakeVisiblyInitializedCounter();
AdjustThreadVisibilityCounter(self, -1);
}
@@ -298,7 +302,13 @@ class ClassLinker::VisiblyInitializedCallback final
}
}
- static constexpr size_t kMaxClasses = 16;
+ // Making classes initialized in bigger batches helps with app startup for
+ // apps that initialize a lot of classes by running fewer checkpoints.
+ // (On the other hand, bigger batches make class initialization checks more
+ // likely to take a slow path but that is mitigated by making partially
+ // filled buffers visibly initialized if we take the slow path many times.
+ // See `Thread::kMakeVisiblyInitializedCounterTriggerCount`.)
+ static constexpr size_t kMaxClasses = 48;
ClassLinker* const class_linker_;
size_t num_classes_;
@@ -321,6 +331,7 @@ void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wa
}
std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
if (wait) {
+ Locks::mutator_lock_->AssertNotHeld(self);
maybe_barrier.emplace(0);
}
int wait_count = 0;
@@ -534,10 +545,9 @@ static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_) {
Thread* self = Thread::Current();
- JNIEnv* env = self->GetJniEnv();
- ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
- CHECK(cause.get() != nullptr);
+ ObjPtr<mirror::Throwable> cause = self->GetException();
+ CHECK(cause != nullptr);
// Boot classpath classes should not fail initialization. This is a consistency debug check.
// This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
@@ -552,12 +562,8 @@ static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
<< self->GetException()->Dump();
}
- env->ExceptionClear();
- bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
- env->Throw(cause.get());
-
// We only wrap non-Error exceptions; an Error can just be used as-is.
- if (!is_error) {
+ if (!cause->IsError()) {
self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
}
VlogClassInitializationFailure(klass);
@@ -1084,19 +1090,113 @@ void ClassLinker::FinishInit(Thread* self) {
VLOG(startup) << "ClassLinker::FinishInit exiting";
}
+static void EnsureRootInitialized(ClassLinker* class_linker,
+ Thread* self,
+ ObjPtr<mirror::Class> klass)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (!klass->IsVisiblyInitialized()) {
+ DCHECK(!klass->IsArrayClass());
+ DCHECK(!klass->IsPrimitive());
+ StackHandleScope<1> hs(self);
+ Handle<mirror::Class> h_class(hs.NewHandle(klass));
+ if (!class_linker->EnsureInitialized(
+ self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true)) {
+ LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
+ << ": " << self->GetException()->Dump();
+ }
+ }
+}
+
+void ClassLinker::RunEarlyRootClinits(Thread* self) {
+ StackHandleScope<1u> hs(self);
+ Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
+ EnsureRootInitialized(this, self, GetClassRoot<mirror::Class>(class_roots.Get()));
+ EnsureRootInitialized(this, self, GetClassRoot<mirror::String>(class_roots.Get()));
+ // `Field` class is needed for register_java_net_InetAddress in libcore, b/28153851.
+ EnsureRootInitialized(this, self, GetClassRoot<mirror::Field>(class_roots.Get()));
+
+ WellKnownClasses::Init(self->GetJniEnv());
+
+ // `FinalizerReference` class is needed for initialization of `java.net.InetAddress`.
+ // (Indirectly by constructing a `ObjectStreamField` which uses a `StringBuilder`
+ // and, when resizing, initializes the `System` class for `System.arraycopy()`
+ // and `System.<clinit> creates a finalizable object.)
+ EnsureRootInitialized(
+ this, self, WellKnownClasses::java_lang_ref_FinalizerReference_add->GetDeclaringClass());
+}
+
void ClassLinker::RunRootClinits(Thread* self) {
+ StackHandleScope<1u> hs(self);
+ Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
- ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
- if (!c->IsArrayClass() && !c->IsPrimitive()) {
- StackHandleScope<1> hs(self);
- Handle<mirror::Class> h_class(hs.NewHandle(c));
- if (!EnsureInitialized(self, h_class, true, true)) {
- LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
- << ": " << self->GetException()->Dump();
- }
- } else {
- DCHECK(c->IsInitialized());
- }
+ EnsureRootInitialized(this, self, GetClassRoot(ClassRoot(i), class_roots.Get()));
+ }
+
+ // Make sure certain well-known classes are initialized. Note that well-known
+ // classes are always in the boot image, so this code is primarily intended
+ // for running without boot image but may be needed for boot image if the
+ // AOT-initialization fails due to introduction of new code to `<clinit>`.
+ ArtMethod* methods_of_classes_to_initialize[] = {
+ // Initialize primitive boxing classes (avoid check at runtime).
+ WellKnownClasses::java_lang_Boolean_valueOf,
+ WellKnownClasses::java_lang_Byte_valueOf,
+ WellKnownClasses::java_lang_Character_valueOf,
+ WellKnownClasses::java_lang_Double_valueOf,
+ WellKnownClasses::java_lang_Float_valueOf,
+ WellKnownClasses::java_lang_Integer_valueOf,
+ WellKnownClasses::java_lang_Long_valueOf,
+ WellKnownClasses::java_lang_Short_valueOf,
+ // Initialize `StackOverflowError`.
+ WellKnownClasses::java_lang_StackOverflowError_init,
+ // Ensure class loader classes are initialized (avoid check at runtime).
+ // Superclass `ClassLoader` is a class root and already initialized above.
+ // Superclass `BaseDexClassLoader` is initialized implicitly.
+ WellKnownClasses::dalvik_system_DelegateLastClassLoader_init,
+ WellKnownClasses::dalvik_system_DexClassLoader_init,
+ WellKnownClasses::dalvik_system_InMemoryDexClassLoader_init,
+ WellKnownClasses::dalvik_system_PathClassLoader_init,
+ WellKnownClasses::java_lang_BootClassLoader_init,
+ // Ensure `Daemons` class is initialized (avoid check at runtime).
+ WellKnownClasses::java_lang_Daemons_start,
+ // Ensure `Thread` and `ThreadGroup` classes are initialized (avoid check at runtime).
+ WellKnownClasses::java_lang_Thread_init,
+ WellKnownClasses::java_lang_ThreadGroup_add,
+ // Ensure reference classes are initialized (avoid check at runtime).
+ // The `FinalizerReference` class was initialized in `RunEarlyRootClinits()`.
+ WellKnownClasses::java_lang_ref_ReferenceQueue_add,
+ // Ensure `InvocationTargetException` class is initialized (avoid check at runtime).
+ WellKnownClasses::java_lang_reflect_InvocationTargetException_init,
+ // Ensure `Parameter` class is initialized (avoid check at runtime).
+ WellKnownClasses::java_lang_reflect_Parameter_init,
+ // Ensure `MethodHandles` class is initialized (avoid check at runtime).
+ WellKnownClasses::java_lang_invoke_MethodHandles_lookup,
+ // Ensure `DirectByteBuffer` class is initialized (avoid check at runtime).
+ WellKnownClasses::java_nio_DirectByteBuffer_init,
+ // Ensure `FloatingDecimal` class is initialized (avoid check at runtime).
+ WellKnownClasses::jdk_internal_math_FloatingDecimal_getBinaryToASCIIConverter_D,
+ // Ensure reflection annotation classes are initialized (avoid check at runtime).
+ WellKnownClasses::libcore_reflect_AnnotationFactory_createAnnotation,
+ WellKnownClasses::libcore_reflect_AnnotationMember_init,
+ // We're suppressing exceptions from `DdmServer` and we do not want to repeatedly
+ // suppress class initialization error (say, due to OOM), so initialize it early.
+ WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch,
+ };
+ for (ArtMethod* method : methods_of_classes_to_initialize) {
+ EnsureRootInitialized(this, self, method->GetDeclaringClass());
+ }
+ ArtField* fields_of_classes_to_initialize[] = {
+ // Ensure classes used by class loaders are initialized (avoid check at runtime).
+ WellKnownClasses::dalvik_system_DexFile_cookie,
+ WellKnownClasses::dalvik_system_DexPathList_dexElements,
+ WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
+ // Ensure `VMRuntime` is initialized (avoid check at runtime).
+ WellKnownClasses::dalvik_system_VMRuntime_nonSdkApiUsageConsumer,
+ // Initialize empty arrays needed by `StackOverflowError`.
+ WellKnownClasses::java_util_Collections_EMPTY_LIST,
+ WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT,
+ };
+ for (ArtField* field : fields_of_classes_to_initialize) {
+ EnsureRootInitialized(this, self, field->GetDeclaringClass());
}
}
@@ -1298,22 +1398,13 @@ bool ClassLinker::InitFromBootImage(std::string* error_msg) {
runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
- for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
- // Boot class loader, use a null handle.
- std::vector<std::unique_ptr<const DexFile>> dex_files;
- if (!AddImageSpace(spaces[i],
- ScopedNullHandle<mirror::ClassLoader>(),
- /*out*/&dex_files,
- error_msg)) {
- return false;
- }
- // Append opened dex files at the end.
- boot_dex_files_.insert(boot_dex_files_.end(),
- std::make_move_iterator(dex_files.begin()),
- std::make_move_iterator(dex_files.end()));
- }
- for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
- OatDexFile::MadviseDexFileAtLoad(*dex_file);
+ // Boot class loader, use a null handle.
+ if (!AddImageSpaces(ArrayRef<gc::space::ImageSpace*>(spaces),
+ ScopedNullHandle<mirror::ClassLoader>(),
+ /*context=*/nullptr,
+ &boot_dex_files_,
+ error_msg)) {
+ return false;
}
InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
image_pointer_size_,
@@ -1338,11 +1429,9 @@ void ClassLinker::AddExtraBootDexFiles(
}
}
-bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
- ObjPtr<mirror::ClassLoader> class_loader) {
+bool ClassLinker::IsBootClassLoader(ObjPtr<mirror::Object> class_loader) {
return class_loader == nullptr ||
- soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
- class_loader->GetClass();
+ WellKnownClasses::java_lang_BootClassLoader == class_loader->GetClass();
}
class CHAOnDeleteUpdateClassVisitor {
@@ -1491,24 +1580,40 @@ static void VisitInternedStringReferences(
uint32_t raw_member_offset = sro_base[offset_index].second;
DCHECK_ALIGNED(base_offset, 2);
- DCHECK_ALIGNED(raw_member_offset, 2);
ObjPtr<mirror::Object> obj_ptr =
reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
- MemberOffset member_offset(raw_member_offset);
- ObjPtr<mirror::String> referred_string =
- obj_ptr->GetFieldObject<mirror::String,
+ if (obj_ptr->IsDexCache() && raw_member_offset >= sizeof(mirror::DexCache)) {
+ // Special case for strings referenced from dex cache array: the offset is
+ // actually decoded as an index into the dex cache string array.
+ uint32_t index = raw_member_offset - sizeof(mirror::DexCache);
+ mirror::GcRootArray<mirror::String>* array = obj_ptr->AsDexCache()->GetStringsArray();
+ // The array could be concurrently set to null. See `StartupCompletedTask`.
+ if (array != nullptr) {
+ ObjPtr<mirror::String> referred_string = array->Get(index);
+ DCHECK(referred_string != nullptr);
+ ObjPtr<mirror::String> visited = visitor(referred_string);
+ if (visited != referred_string) {
+ array->Set(index, visited.Ptr());
+ }
+ }
+ } else {
+ DCHECK_ALIGNED(raw_member_offset, 2);
+ MemberOffset member_offset(raw_member_offset);
+ ObjPtr<mirror::String> referred_string =
+ obj_ptr->GetFieldObject<mirror::String,
+ kVerifyNone,
+ kWithoutReadBarrier,
+ /* kIsVolatile= */ false>(member_offset);
+ DCHECK(referred_string != nullptr);
+
+ ObjPtr<mirror::String> visited = visitor(referred_string);
+ if (visited != referred_string) {
+ obj_ptr->SetFieldObject</* kTransactionActive= */ false,
+ /* kCheckTransaction= */ false,
kVerifyNone,
- kWithoutReadBarrier,
- /* kIsVolatile= */ false>(member_offset);
- DCHECK(referred_string != nullptr);
-
- ObjPtr<mirror::String> visited = visitor(referred_string);
- if (visited != referred_string) {
- obj_ptr->SetFieldObject</* kTransactionActive= */ false,
- /* kCheckTransaction= */ false,
- kVerifyNone,
- /* kIsVolatile= */ false>(member_offset, visited);
+ /* kIsVolatile= */ false>(member_offset, visited);
+ }
}
}
}
@@ -1574,6 +1679,7 @@ void AppImageLoadingHelper::Update(
Runtime* const runtime = Runtime::Current();
gc::Heap* const heap = runtime->GetHeap();
const ImageHeader& header = space->GetImageHeader();
+ int32_t number_of_dex_cache_arrays_cleared = 0;
{
// Register dex caches with the class loader.
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
@@ -1582,10 +1688,24 @@ void AppImageLoadingHelper::Update(
{
WriterMutexLock mu2(self, *Locks::dex_lock_);
CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
+ if (runtime->GetStartupCompleted()) {
+ number_of_dex_cache_arrays_cleared++;
+ // Free up dex cache arrays that we would only allocate at startup.
+ // We do this here before registering and within the lock to be
+ // consistent with `StartupCompletedTask`.
+ dex_cache->UnlinkStartupCaches();
+ }
class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
}
}
}
+ if (number_of_dex_cache_arrays_cleared == dex_caches->GetLength()) {
+ // Free up dex cache arrays that we would only allocate at startup.
+ // If `number_of_dex_cache_arrays_cleared` isn't the number of dex caches in
+ // the image, then there is a race with the `StartupCompletedTask`, which
+ // will release the space instead.
+ space->ReleaseMetadata();
+ }
if (ClassLinker::kAppImageMayContainStrings) {
HandleAppImageStrings(space);
@@ -1730,114 +1850,159 @@ bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
return true;
}
+bool ClassLinker::OpenAndInitImageDexFiles(
+ const gc::space::ImageSpace* space,
+ Handle<mirror::ClassLoader> class_loader,
+ std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
+ std::string* error_msg) {
+ DCHECK(out_dex_files != nullptr);
+ const bool app_image = class_loader != nullptr;
+ const ImageHeader& header = space->GetImageHeader();
+ ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
+ DCHECK(dex_caches_object != nullptr);
+ Thread* const self = Thread::Current();
+ StackHandleScope<3> hs(self);
+ Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
+ hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
+ const OatFile* oat_file = space->GetOatFile();
+ if (oat_file->GetOatHeader().GetDexFileCount() !=
+ static_cast<uint32_t>(dex_caches->GetLength())) {
+ *error_msg =
+ "Dex cache count and dex file count mismatch while trying to initialize from image";
+ return false;
+ }
+
+ for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
+ std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
+ std::unique_ptr<const DexFile> dex_file =
+ OpenOatDexFile(oat_file, dex_file_location.c_str(), error_msg);
+ if (dex_file == nullptr) {
+ return false;
+ }
+
+ {
+ // Native fields are all null. Initialize them.
+ WriterMutexLock mu(self, *Locks::dex_lock_);
+ dex_cache->Initialize(dex_file.get(), class_loader.Get());
+ }
+ if (!app_image) {
+ // Register dex files, keep track of existing ones that are conflicts.
+ AppendToBootClassPath(dex_file.get(), dex_cache);
+ }
+ out_dex_files->push_back(std::move(dex_file));
+ }
+ return true;
+}
+
// Helper class for ArtMethod checks when adding an image. Keeps all required functionality
// together and caches some intermediate results.
+template <PointerSize kPointerSize>
class ImageChecker final {
public:
- static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
+ static void CheckObjects(gc::Heap* heap, gc::space::ImageSpace* space)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ImageChecker ic(heap, class_linker);
+ // There can be no GC during boot image initialization, so we do not need read barriers.
+ ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
+
+ CHECK_EQ(kPointerSize, space->GetImageHeader().GetPointerSize());
+ const ImageSection& objects_section = space->GetImageHeader().GetObjectsSection();
+ uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin());
+ uintptr_t objects_begin = space_begin + objects_section.Offset();
+ uintptr_t objects_end = objects_begin + objects_section.Size();
+ ImageChecker ic(heap);
auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(obj != nullptr);
- CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
- CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
- if (obj->IsClass()) {
+ mirror::Class* obj_klass = obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ CHECK(obj_klass != nullptr) << "Null class in object " << obj;
+ mirror::Class* class_class = obj_klass->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ CHECK(class_class != nullptr) << "Null class class " << obj;
+ if (obj_klass == class_class) {
auto klass = obj->AsClass();
for (ArtField& field : klass->GetIFields()) {
- CHECK_EQ(field.GetDeclaringClass(), klass);
+ CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
}
for (ArtField& field : klass->GetSFields()) {
- CHECK_EQ(field.GetDeclaringClass(), klass);
+ CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
}
- const PointerSize pointer_size = ic.pointer_size_;
- for (ArtMethod& m : klass->GetMethods(pointer_size)) {
+ for (ArtMethod& m : klass->GetMethods(kPointerSize)) {
ic.CheckArtMethod(&m, klass);
}
- ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
+ ObjPtr<mirror::PointerArray> vtable =
+ klass->GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
if (vtable != nullptr) {
- ic.CheckArtMethodPointerArray(vtable, nullptr);
+ ic.CheckArtMethodPointerArray(vtable);
}
if (klass->ShouldHaveImt()) {
- ImTable* imt = klass->GetImt(pointer_size);
+ ImTable* imt = klass->GetImt(kPointerSize);
for (size_t i = 0; i < ImTable::kSize; ++i) {
- ic.CheckArtMethod(imt->Get(i, pointer_size), nullptr);
+ ic.CheckArtMethod(imt->Get(i, kPointerSize), /*expected_class=*/ nullptr);
}
}
if (klass->ShouldHaveEmbeddedVTable()) {
for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
- ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
+ ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, kPointerSize),
+ /*expected_class=*/ nullptr);
}
}
- ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
- for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
- if (iftable->GetMethodArrayCount(i) > 0) {
- ic.CheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
+ ObjPtr<mirror::IfTable> iftable =
+ klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ int32_t iftable_count = (iftable != nullptr) ? iftable->Count() : 0;
+ for (int32_t i = 0; i < iftable_count; ++i) {
+ ObjPtr<mirror::PointerArray> method_array =
+ iftable->GetMethodArrayOrNull<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
+ if (method_array != nullptr) {
+ ic.CheckArtMethodPointerArray(method_array);
}
}
}
};
- heap->VisitObjects(visitor);
+ space->GetLiveBitmap()->VisitMarkedRange(objects_begin, objects_end, visitor);
}
private:
- ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
- : spaces_(heap->GetBootImageSpaces()),
- pointer_size_(class_linker->GetImagePointerSize()) {
- space_begin_.reserve(spaces_.size());
- method_sections_.reserve(spaces_.size());
- runtime_method_sections_.reserve(spaces_.size());
- for (gc::space::ImageSpace* space : spaces_) {
+ explicit ImageChecker(gc::Heap* heap) {
+ ArrayRef<gc::space::ImageSpace* const> spaces(heap->GetBootImageSpaces());
+ space_begin_.reserve(spaces.size());
+ for (gc::space::ImageSpace* space : spaces) {
+ CHECK_EQ(static_cast<const void*>(space->Begin()), &space->GetImageHeader());
space_begin_.push_back(space->Begin());
- auto& header = space->GetImageHeader();
- method_sections_.push_back(&header.GetMethodsSection());
- runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
}
}
void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
REQUIRES_SHARED(Locks::mutator_lock_) {
+ ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
if (m->IsRuntimeMethod()) {
- ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
} else if (m->IsCopied()) {
- CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
+ CHECK(declaring_class != nullptr) << m->PrettyMethod();
} else if (expected_class != nullptr) {
- CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
- }
- if (!spaces_.empty()) {
- bool contains = false;
- for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
- const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
- contains = method_sections_[i]->Contains(offset) ||
- runtime_method_sections_[i]->Contains(offset);
+ CHECK_EQ(declaring_class, expected_class) << m->PrettyMethod();
+ }
+ bool contains = false;
+ for (const uint8_t* begin : space_begin_) {
+ const size_t offset = reinterpret_cast<uint8_t*>(m) - begin;
+ const ImageHeader* header = reinterpret_cast<const ImageHeader*>(begin);
+ if (header->GetMethodsSection().Contains(offset) ||
+ header->GetRuntimeMethodsSection().Contains(offset)) {
+ contains = true;
+ break;
}
- CHECK(contains) << m << " not found";
}
+ CHECK(contains) << m << " not found";
}
- void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
- ObjPtr<mirror::Class> expected_class)
+ void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
REQUIRES_SHARED(Locks::mutator_lock_) {
CHECK(arr != nullptr);
for (int32_t j = 0; j < arr->GetLength(); ++j) {
- auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
- // expected_class == null means we are a dex cache.
- if (expected_class != nullptr) {
- CHECK(method != nullptr);
- }
- if (method != nullptr) {
- CheckArtMethod(method, expected_class);
- }
+ auto* method = arr->GetElementPtrSize<ArtMethod*>(j, kPointerSize);
+ CHECK(method != nullptr);
+ CheckArtMethod(method, /*expected_class=*/ nullptr);
}
}
- const std::vector<gc::space::ImageSpace*>& spaces_;
- const PointerSize pointer_size_;
-
- // Cached sections from the spaces.
std::vector<const uint8_t*> space_begin_;
- std::vector<const ImageSection*> method_sections_;
- std::vector<const ImageSection*> runtime_method_sections_;
};
static void VerifyAppImage(const ImageHeader& header,
@@ -1872,12 +2037,11 @@ static void VerifyAppImage(const ImageHeader& header,
}
}
-bool ClassLinker::AddImageSpace(
- gc::space::ImageSpace* space,
- Handle<mirror::ClassLoader> class_loader,
- std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
- std::string* error_msg) {
- DCHECK(out_dex_files != nullptr);
+bool ClassLinker::AddImageSpace(gc::space::ImageSpace* space,
+ Handle<mirror::ClassLoader> class_loader,
+ ClassLoaderContext* context,
+ const std::vector<std::unique_ptr<const DexFile>>& dex_files,
+ std::string* error_msg) {
DCHECK(error_msg != nullptr);
const uint64_t start_time = NanoTime();
const bool app_image = class_loader != nullptr;
@@ -1906,9 +2070,8 @@ bool ClassLinker::AddImageSpace(
hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
- MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
- app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
- : nullptr));
+ MutableHandle<mirror::Object> special_root(hs.NewHandle(
+ app_image ? header.GetImageRoot(ImageHeader::kSpecialRoots) : nullptr));
DCHECK(class_roots != nullptr);
if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
*error_msg = StringPrintf("Expected %d class roots but got %d",
@@ -1925,46 +2088,105 @@ bool ClassLinker::AddImageSpace(
}
}
const OatFile* oat_file = space->GetOatFile();
- if (oat_file->GetOatHeader().GetDexFileCount() !=
- static_cast<uint32_t>(dex_caches->GetLength())) {
- *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
- "image";
- return false;
- }
- for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
- std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
- std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
- dex_file_location.c_str(),
- error_msg);
- if (dex_file == nullptr) {
+ if (app_image) {
+ ScopedAssertNoThreadSuspension sants("Checking app image");
+ if (special_root == nullptr) {
+ *error_msg = "Unexpected null special root in app image";
return false;
- }
+ } else if (special_root->IsByteArray()) {
+ OatHeader* oat_header = reinterpret_cast<OatHeader*>(special_root->AsByteArray()->GetData());
+ if (!oat_header->IsValid()) {
+ *error_msg = "Invalid oat header in special root";
+ return false;
+ }
+ if (oat_file->GetVdexFile()->GetNumberOfDexFiles() != oat_header->GetDexFileCount()) {
+ *error_msg = "Checksums count does not match";
+ return false;
+ }
+ if (oat_header->IsConcurrentCopying() != gUseReadBarrier) {
+ *error_msg = "GCs do not match";
+ return false;
+ }
- {
- // Native fields are all null. Initialize them.
- WriterMutexLock mu(self, *Locks::dex_lock_);
- dex_cache->Initialize(dex_file.get(), class_loader.Get());
- }
- if (!app_image) {
- // Register dex files, keep track of existing ones that are conflicts.
- AppendToBootClassPath(dex_file.get(), dex_cache);
- }
- out_dex_files->push_back(std::move(dex_file));
- }
+ // Check if the dex checksums match the dex files that we just loaded.
+ uint32_t* checksums = reinterpret_cast<uint32_t*>(
+ reinterpret_cast<uint8_t*>(oat_header) + oat_header->GetHeaderSize());
+ for (uint32_t i = 0; i < oat_header->GetDexFileCount(); ++i) {
+ uint32_t dex_checksum = dex_files.at(i)->GetHeader().checksum_;
+ if (checksums[i] != dex_checksum) {
+ *error_msg = StringPrintf(
+ "Image and dex file checksums did not match for %s: image has %d, dex file has %d",
+ dex_files.at(i)->GetLocation().c_str(),
+ checksums[i],
+ dex_checksum);
+ return false;
+ }
+ }
- if (app_image) {
- ScopedObjectAccessUnchecked soa(Thread::Current());
- ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
- if (IsBootClassLoader(soa, image_class_loader.Get())) {
+ // Validate the class loader context.
+ const char* stored_context = oat_header->GetStoreValueByKey(OatHeader::kClassPathKey);
+ if (stored_context == nullptr) {
+ *error_msg = "Missing class loader context in special root";
+ return false;
+ }
+ if (context->VerifyClassLoaderContextMatch(stored_context) ==
+ ClassLoaderContext::VerificationResult::kMismatch) {
+ *error_msg = StringPrintf("Class loader contexts don't match: %s", stored_context);
+ return false;
+ }
+
+ // Validate the apex versions.
+ if (!gc::space::ImageSpace::ValidateApexVersions(*oat_header,
+ runtime->GetApexVersions(),
+ space->GetImageLocation(),
+ error_msg)) {
+ return false;
+ }
+
+ // Validate the boot classpath.
+ const char* bcp = oat_header->GetStoreValueByKey(OatHeader::kBootClassPathKey);
+ if (bcp == nullptr) {
+ *error_msg = "Missing boot classpath in special root";
+ return false;
+ }
+ std::string runtime_bcp = android::base::Join(runtime->GetBootClassPathLocations(), ':');
+ if (strcmp(bcp, runtime_bcp.c_str()) != 0) {
+ *error_msg = StringPrintf("Mismatch boot classpath: image has %s, runtime has %s",
+ bcp,
+ runtime_bcp.c_str());
+ return false;
+ }
+
+ // Validate the dex checksums of the boot classpath.
+ const char* bcp_checksums =
+ oat_header->GetStoreValueByKey(OatHeader::kBootClassPathChecksumsKey);
+ if (bcp_checksums == nullptr) {
+ *error_msg = "Missing boot classpath checksums in special root";
+ return false;
+ }
+ if (strcmp(bcp_checksums, runtime->GetBootClassPathChecksums().c_str()) != 0) {
+ *error_msg = StringPrintf("Mismatch boot classpath checksums: image has %s, runtime has %s",
+ bcp_checksums,
+ runtime->GetBootClassPathChecksums().c_str());
+ return false;
+ }
+ } else if (IsBootClassLoader(special_root.Get())) {
*error_msg = "Unexpected BootClassLoader in app image";
return false;
+ } else if (!special_root->IsClassLoader()) {
+ *error_msg = "Unexpected special root in app image";
+ return false;
}
}
if (kCheckImageObjects) {
if (!app_image) {
- ImageChecker::CheckObjects(heap, this);
+ if (image_pointer_size_ == PointerSize::k64) {
+ ImageChecker<PointerSize::k64>::CheckObjects(heap, space);
+ } else {
+ ImageChecker<PointerSize::k32>::CheckObjects(heap, space);
+ }
}
}
@@ -2012,8 +2234,7 @@ bool ClassLinker::AddImageSpace(
// Set image methods' entry point that point to the nterp trampoline to the
// nterp entry point. This allows taking the fast path when doing a
// nterp->nterp call.
- DCHECK_IMPLIES(NeedsClinitCheckBeforeCall(&method),
- method.GetDeclaringClass()->IsVisiblyInitialized());
+ DCHECK(!method.StillNeedsClinitCheck());
method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
} else {
method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
@@ -2024,7 +2245,7 @@ bool ClassLinker::AddImageSpace(
if (runtime->IsVerificationSoftFail()) {
header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (!method.IsNative() && method.IsInvokable()) {
+ if (method.IsManagedAndInvokable()) {
method.ClearSkipAccessChecks();
}
}, space->Begin(), image_pointer_size_);
@@ -2062,9 +2283,11 @@ bool ClassLinker::AddImageSpace(
ObjPtr<mirror::Class> klass(root.Read());
// Do not update class loader for boot image classes where the app image
// class loader is only the initiating loader but not the defining loader.
- // Avoid read barrier since we are comparing against null.
- if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
+ if (space->HasAddress(klass.Ptr())) {
klass->SetClassLoader(loader);
+ } else {
+ DCHECK(klass->IsBootStrapClassLoaded());
+ DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass.Ptr()));
}
}
}
@@ -2109,13 +2332,39 @@ bool ClassLinker::AddImageSpace(
return true;
}
+bool ClassLinker::AddImageSpaces(ArrayRef<gc::space::ImageSpace*> spaces,
+ Handle<mirror::ClassLoader> class_loader,
+ ClassLoaderContext* context,
+ /*out*/ std::vector<std::unique_ptr<const DexFile>>* dex_files,
+ /*out*/ std::string* error_msg) {
+ std::vector<std::vector<std::unique_ptr<const DexFile>>> dex_files_by_space_index;
+ for (const gc::space::ImageSpace* space : spaces) {
+ std::vector<std::unique_ptr<const DexFile>> space_dex_files;
+ if (!OpenAndInitImageDexFiles(space, class_loader, /*out*/ &space_dex_files, error_msg)) {
+ return false;
+ }
+ dex_files_by_space_index.push_back(std::move(space_dex_files));
+ }
+ // This must be done in a separate loop after all dex files are initialized because there can be
+ // references from an image space to another image space that comes after it.
+ for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
+ std::vector<std::unique_ptr<const DexFile>>& space_dex_files = dex_files_by_space_index[i];
+ if (!AddImageSpace(spaces[i], class_loader, context, space_dex_files, error_msg)) {
+ return false;
+ }
+ // Append opened dex files at the end.
+ std::move(space_dex_files.begin(), space_dex_files.end(), std::back_inserter(*dex_files));
+ }
+ return true;
+}
+
void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
// Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
// enabling tracing requires the mutator lock, there are no race conditions here.
const bool tracing_enabled = Trace::IsTracingEnabled();
Thread* const self = Thread::Current();
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
- if (kUseReadBarrier) {
+ if (gUseReadBarrier) {
// We do not track new roots for CC.
DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
kVisitRootFlagClearRootLog |
@@ -2146,12 +2395,20 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
boot_class_table_->VisitRoots(root_visitor);
// If tracing is enabled, then mark all the class loaders to prevent unloading.
if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
- for (const ClassLoaderData& data : class_loaders_) {
- GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
- root.VisitRoot(visitor, RootInfo(kRootVMInternal));
+ gc::Heap* const heap = Runtime::Current()->GetHeap();
+ // Don't visit class-loaders if compacting with userfaultfd GC as these
+ // weaks are updated using Runtime::SweepSystemWeaks() and the GC doesn't
+ // tolerate double updates.
+ if (!heap->IsPerformingUffdCompaction()) {
+ for (const ClassLoaderData& data : class_loaders_) {
+ GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
+ root.VisitRoot(visitor, RootInfo(kRootVMInternal));
+ }
+ } else {
+ DCHECK_EQ(heap->CurrentCollectorType(), gc::CollectorType::kCollectorTypeCMC);
}
}
- } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
+ } else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
for (auto& root : new_class_roots_) {
ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
root.VisitRoot(visitor, RootInfo(kRootStickyClass));
@@ -2172,13 +2429,13 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
}
}
}
- if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
+ if (!gUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
new_class_roots_.clear();
new_bss_roots_boot_oat_files_.clear();
}
- if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
+ if (!gUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
log_new_roots_ = true;
- } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
+ } else if (!gUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
log_new_roots_ = false;
}
// We deliberately ignore the class roots in the image since we
@@ -2366,7 +2623,7 @@ void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, b
}
} else if (cha_ != nullptr) {
// If we don't have a JIT, we need to manually remove the CHA dependencies manually.
- cha_->RemoveDependenciesForLinearAlloc(data.allocator);
+ cha_->RemoveDependenciesForLinearAlloc(self, data.allocator);
}
// Cleanup references to single implementation ArtMethods that will be deleted.
if (cleanup_cha) {
@@ -2652,19 +2909,16 @@ do { \
} \
} while (0)
-bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
- Thread* self,
+bool ClassLinker::FindClassInSharedLibraries(Thread* self,
const char* descriptor,
size_t hash,
Handle<mirror::ClassLoader> class_loader,
/*out*/ ObjPtr<mirror::Class>* result) {
- ArtField* field =
- jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
- return FindClassInSharedLibrariesHelper(soa, self, descriptor, hash, class_loader, field, result);
+ ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
+ return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
}
-bool ClassLinker::FindClassInSharedLibrariesHelper(ScopedObjectAccessAlreadyRunnable& soa,
- Thread* self,
+bool ClassLinker::FindClassInSharedLibrariesHelper(Thread* self,
const char* descriptor,
size_t hash,
Handle<mirror::ClassLoader> class_loader,
@@ -2682,38 +2936,35 @@ bool ClassLinker::FindClassInSharedLibrariesHelper(ScopedObjectAccessAlreadyRunn
for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
temp_loader.Assign(loader);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result),
+ FindClassInBaseDexClassLoader(self, descriptor, hash, temp_loader, result),
*result,
self);
}
return true;
}
-bool ClassLinker::FindClassInSharedLibrariesAfter(ScopedObjectAccessAlreadyRunnable& soa,
- Thread* self,
+bool ClassLinker::FindClassInSharedLibrariesAfter(Thread* self,
const char* descriptor,
size_t hash,
Handle<mirror::ClassLoader> class_loader,
/*out*/ ObjPtr<mirror::Class>* result) {
- ArtField* field = jni::DecodeArtField(
- WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter);
- return FindClassInSharedLibrariesHelper(soa, self, descriptor, hash, class_loader, field, result);
+ ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
+ return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
}
-bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
- Thread* self,
+bool ClassLinker::FindClassInBaseDexClassLoader(Thread* self,
const char* descriptor,
size_t hash,
Handle<mirror::ClassLoader> class_loader,
/*out*/ ObjPtr<mirror::Class>* result) {
// Termination case: boot class loader.
- if (IsBootClassLoader(soa, class_loader.Get())) {
+ if (IsBootClassLoader(class_loader.Get())) {
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
return true;
}
- if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
+ if (IsPathOrDexClassLoader(class_loader) || IsInMemoryDexClassLoader(class_loader)) {
// For regular path or dex class loader the search order is:
// - parent
// - shared libraries
@@ -2723,19 +2974,19 @@ bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnabl
StackHandleScope<1> hs(self);
Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result),
+ FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
*result,
self);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result),
+ FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
*result,
self);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader, result),
+ FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
*result,
self);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInSharedLibrariesAfter(soa, self, descriptor, hash, class_loader, result),
+ FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
*result,
self);
// We did not find a class, but the class loader chain was recognized, so we
@@ -2743,7 +2994,7 @@ bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnabl
return true;
}
- if (IsDelegateLastClassLoader(soa, class_loader)) {
+ if (IsDelegateLastClassLoader(class_loader)) {
// For delegate last, the search order is:
// - boot class path
// - shared libraries
@@ -2752,15 +3003,15 @@ bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnabl
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result),
+ FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
*result,
self);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader, result),
+ FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
*result,
self);
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInSharedLibrariesAfter(soa, self, descriptor, hash, class_loader, result),
+ FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
*result,
self);
@@ -2768,7 +3019,7 @@ bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnabl
StackHandleScope<1> hs(self);
Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
- FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result),
+ FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
*result,
self);
// We did not find a class, but the class loader chain was recognized, so we
@@ -2837,14 +3088,14 @@ bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
}
bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
- ScopedObjectAccessAlreadyRunnable& soa,
+ Thread* self,
const char* descriptor,
size_t hash,
Handle<mirror::ClassLoader> class_loader,
/*out*/ ObjPtr<mirror::Class>* result) {
- DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
- IsInMemoryDexClassLoader(soa, class_loader) ||
- IsDelegateLastClassLoader(soa, class_loader))
+ DCHECK(IsPathOrDexClassLoader(class_loader) ||
+ IsInMemoryDexClassLoader(class_loader) ||
+ IsDelegateLastClassLoader(class_loader))
<< "Unexpected class loader for descriptor " << descriptor;
const DexFile* dex_file = nullptr;
@@ -2859,15 +3110,15 @@ bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
}
return true; // Continue with the next DexFile.
};
- VisitClassLoaderDexFiles(soa, class_loader, find_class_def);
+ VisitClassLoaderDexFiles(self, class_loader, find_class_def);
if (class_def != nullptr) {
- *result = DefineClass(soa.Self(), descriptor, hash, class_loader, *dex_file, *class_def);
+ *result = DefineClass(self, descriptor, hash, class_loader, *dex_file, *class_def);
if (UNLIKELY(*result == nullptr)) {
- CHECK(soa.Self()->IsExceptionPending()) << descriptor;
- FilterDexFileCaughtExceptions(soa.Self(), this);
+ CHECK(self->IsExceptionPending()) << descriptor;
+ FilterDexFileCaughtExceptions(self, this);
} else {
- DCHECK(!soa.Self()->IsExceptionPending());
+ DCHECK(!self->IsExceptionPending());
}
}
// A BaseDexClassLoader is always a known lookup.
@@ -2923,7 +3174,7 @@ ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
} else {
ScopedObjectAccessUnchecked soa(self);
bool known_hierarchy =
- FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
+ FindClassInBaseDexClassLoader(self, descriptor, hash, class_loader, &result_ptr);
if (result_ptr != nullptr) {
// The chain was understood and we found the class. We still need to add the class to
// the class table to protect from racy programs that can try and redefine the path list
@@ -2978,29 +3229,23 @@ ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
"%s",
class_name_string.c_str());
} else {
- ScopedLocalRef<jobject> class_loader_object(
- soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
- ScopedLocalRef<jobject> result(soa.Env(), nullptr);
- {
- ScopedThreadStateChange tsc(self, ThreadState::kNative);
- ScopedLocalRef<jobject> class_name_object(
- soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
- if (class_name_object.get() == nullptr) {
- DCHECK(self->IsExceptionPending()); // OOME.
- return nullptr;
- }
- CHECK(class_loader_object.get() != nullptr);
- result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
- WellKnownClasses::java_lang_ClassLoader_loadClass,
- class_name_object.get()));
+ StackHandleScope<1u> hs(self);
+ Handle<mirror::String> class_name_object = hs.NewHandle(
+ mirror::String::AllocFromModifiedUtf8(self, class_name_string.c_str()));
+ if (class_name_object == nullptr) {
+ DCHECK(self->IsExceptionPending()); // OOME.
+ return nullptr;
}
- if (result.get() == nullptr && !self->IsExceptionPending()) {
+ DCHECK(class_loader != nullptr);
+ result_ptr = ObjPtr<mirror::Class>::DownCast(
+ WellKnownClasses::java_lang_ClassLoader_loadClass->InvokeVirtual<'L', 'L'>(
+ self, class_loader.Get(), class_name_object.Get()));
+ if (result_ptr == nullptr && !self->IsExceptionPending()) {
// broken loader - throw NPE to be compatible with Dalvik
ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
class_name_string.c_str()).c_str());
return nullptr;
}
- result_ptr = soa.Decode<mirror::Class>(result.get());
// Check the name of the returned class.
descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
}
@@ -3114,6 +3359,7 @@ ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
ScopedDefiningClass sdc(self);
StackHandleScope<3> hs(self);
metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
+ metrics::AutoTimer timeDelta{GetMetrics()->ClassLoadingTotalTimeDelta()};
auto klass = hs.NewHandle<mirror::Class>(nullptr);
// Load the class from the dex file.
@@ -3271,7 +3517,7 @@ ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
// classes. However it could not update methods of this class while we
// were loading it. Now the class is resolved, we can update entrypoints
// as required by instrumentation.
- if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
+ if (Runtime::Current()->GetInstrumentation()->EntryExitStubsInstalled()) {
// We must be in the kRunnable state to prevent instrumentation from
// suspending all threads to update entrypoints while we are doing it
// for this class.
@@ -3389,14 +3635,11 @@ void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> kla
}
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
- // Link the code of methods skipped by LinkCode.
for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
- if (!method->IsStatic()) {
- // Only update static methods.
- continue;
+ if (method->NeedsClinitCheckBeforeCall()) {
+ instrumentation->UpdateMethodsCode(method, instrumentation->GetCodeForInvoke(method));
}
- instrumentation->UpdateMethodsCode(method, instrumentation->GetCodeForInvoke(method));
}
// Ignore virtual methods on the iterator.
}
@@ -3424,7 +3667,7 @@ static void LinkCode(ClassLinker* class_linker,
}
// Method shouldn't have already been linked.
- DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
+ DCHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), nullptr);
DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
if (!method->IsInvokable()) {
@@ -3480,7 +3723,7 @@ LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
// If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
- void* array_storage = allocator->Alloc(self, storage_size);
+ void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtFieldArray);
auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
CHECK(ret != nullptr);
std::uninitialized_fill_n(&ret->At(0), length, ArtField());
@@ -3497,7 +3740,7 @@ LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
const size_t method_size = ArtMethod::Size(image_pointer_size_);
const size_t storage_size =
LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
- void* array_storage = allocator->Alloc(self, storage_size);
+ void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtMethodArray);
auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
CHECK(ret != nullptr);
for (size_t i = 0; i < length; ++i) {
@@ -3810,7 +4053,8 @@ void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
std::string dex_file_location = dex_file.GetLocation();
// The following paths checks don't work on preopt when using boot dex files, where the dex
// cache location is the one on device, and the dex_file's location is the one on host.
- if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
+ Runtime* runtime = Runtime::Current();
+ if (!(runtime->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
CHECK_GE(dex_file_location.length(), dex_cache_length)
<< dex_cache_location << " " << dex_file.GetLocation();
const std::string dex_file_suffix = dex_file_location.substr(
@@ -3820,34 +4064,37 @@ void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
// dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
CHECK_EQ(dex_cache_location, dex_file_suffix);
}
+
+ // Check if we need to initialize OatFile data (.data.bimg.rel.ro and .bss
+ // sections) needed for code execution and register the oat code range.
const OatFile* oat_file =
(dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
- // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
- // and we are lazily removing null entries. Also check if we need to initialize OatFile data
- // (.data.bimg.rel.ro and .bss sections) needed for code execution.
bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
- JavaVMExt* const vm = self->GetJniEnv()->GetVm();
- for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
- const DexCacheData& data = it->second;
- if (self->IsJWeakCleared(data.weak_root)) {
- vm->DeleteWeakGlobalRef(self, data.weak_root);
- it = dex_caches_.erase(it);
- } else {
- if (initialize_oat_file_data &&
- it->first->GetOatDexFile() != nullptr &&
- it->first->GetOatDexFile()->GetOatFile() == oat_file) {
+ if (initialize_oat_file_data) {
+ for (const auto& entry : dex_caches_) {
+ if (!self->IsJWeakCleared(entry.second.weak_root) &&
+ entry.first->GetOatDexFile() != nullptr &&
+ entry.first->GetOatDexFile()->GetOatFile() == oat_file) {
initialize_oat_file_data = false; // Already initialized.
+ break;
}
- ++it;
}
}
if (initialize_oat_file_data) {
oat_file->InitializeRelocations();
+ // Notify the fault handler about the new executable code range if needed.
+ size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
+ DCHECK_LE(exec_offset, oat_file->Size());
+ size_t exec_size = oat_file->Size() - exec_offset;
+ if (exec_size != 0u) {
+ runtime->AddGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
+ }
}
+
// Let hiddenapi assign a domain to the newly registered dex file.
hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
- jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
+ jweak dex_cache_jweak = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, dex_cache);
DexCacheData data;
data.weak_root = dex_cache_jweak;
data.class_table = ClassTableForClassLoader(class_loader);
@@ -4045,11 +4292,22 @@ ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFil
for (const auto& entry : dex_caches_) {
const DexCacheData& data = entry.second;
if (DecodeDexCacheLocked(self, &data) != nullptr) {
- LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
- }
- }
- LOG(FATAL) << "Failed to find DexCache for OatDexFile " << oat_dex_file.GetDexFileLocation()
- << " " << &oat_dex_file;
+ const OatDexFile* other_oat_dex_file = entry.first->GetOatDexFile();
+ const OatFile* oat_file =
+ (other_oat_dex_file == nullptr) ? nullptr : other_oat_dex_file->GetOatFile();
+ LOG(FATAL_WITHOUT_ABORT)
+ << "Registered dex file " << entry.first->GetLocation()
+ << " oat_dex_file=" << other_oat_dex_file
+ << " oat_file=" << oat_file
+ << " oat_location=" << (oat_file == nullptr ? "null" : oat_file->GetLocation())
+ << " dex_file=" << &entry.first;
+ }
+ }
+ LOG(FATAL) << "Failed to find DexCache for OatDexFile "
+ << oat_dex_file.GetDexFileLocation()
+ << " oat_dex_file=" << &oat_dex_file
+ << " oat_file=" << oat_dex_file.GetOatFile()
+ << " oat_location=" << oat_dex_file.GetOatFile()->GetLocation();
UNREACHABLE();
}
@@ -4144,10 +4402,11 @@ ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
class_loader)));
if (component_type == nullptr) {
DCHECK(self->IsExceptionPending());
- // We need to accept erroneous classes as component types.
+ // We need to accept erroneous classes as component types. Under AOT, we
+ // don't accept them as we cannot encode the erroneous class in an image.
const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
- if (component_type == nullptr) {
+ if (component_type == nullptr || Runtime::Current()->IsAotCompiler()) {
DCHECK(self->IsExceptionPending());
return nullptr;
} else {
@@ -4766,6 +5025,12 @@ void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
return; // nothing to process
}
const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
+ CHECK(method->GetDexFile()->IsInDataSection(handlers_ptr))
+ << method->PrettyMethod()
+ << "@" << method->GetDexFile()->GetLocation()
+ << "@" << reinterpret_cast<const void*>(handlers_ptr)
+ << " is_compact_dex=" << method->GetDexFile()->IsCompactDexFile();
+
uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
for (uint32_t idx = 0; idx < handlers_size; idx++) {
CatchHandlerIterator iterator(handlers_ptr);
@@ -5022,8 +5287,7 @@ void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod*
// Find the <init>(InvocationHandler)V method. The exact method offset varies depending
// on which front-end compiler was used to build the libcore DEX files.
- ArtMethod* proxy_constructor =
- jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
+ ArtMethod* proxy_constructor = WellKnownClasses::java_lang_reflect_Proxy_init;
DCHECK(proxy_constructor != nullptr)
<< "Could not find <init> method in java.lang.reflect.Proxy";
@@ -5088,11 +5352,19 @@ void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) cons
CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
}
-bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
+bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass,
+ bool can_init_statics,
bool can_init_parents) {
if (can_init_statics && can_init_parents) {
return true;
}
+ DCHECK(Runtime::Current()->IsAotCompiler());
+
+ // We currently don't support initializing at AOT time classes that need access
+ // checks.
+ if (klass->IsVerifiedNeedsAccessChecks()) {
+ return false;
+ }
if (!can_init_statics) {
// Check if there's a class initializer.
ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
@@ -5842,17 +6114,6 @@ ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> cl
return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
}
-static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- while (klass->HasSuperClass()) {
- klass = klass->GetSuperClass();
- if (klass->ShouldHaveImt()) {
- return klass->GetImt(pointer_size);
- }
- }
- return nullptr;
-}
-
bool ClassLinker::LinkClass(Thread* self,
const char* descriptor,
Handle<mirror::Class> klass,
@@ -5888,7 +6149,7 @@ bool ClassLinker::LinkClass(Thread* self,
// will possibly create a table that is incorrect for either of the classes.
// Same IMT with new_conflict does not happen very often.
if (!new_conflict) {
- ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
+ ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
if (super_imt != nullptr) {
bool imt_equals = true;
for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
@@ -5902,7 +6163,9 @@ bool ClassLinker::LinkClass(Thread* self,
if (imt == nullptr) {
LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
imt = reinterpret_cast<ImTable*>(
- allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
+ allocator->Alloc(self,
+ ImTable::SizeInBytes(image_pointer_size_),
+ LinearAllocKind::kNoGCRoots));
if (imt == nullptr) {
return false;
}
@@ -6093,6 +6356,10 @@ bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
klass->PrettyDescriptor().c_str());
return false;
}
+ if (!VerifyRecordClass(klass, super)) {
+ DCHECK(Thread::Current()->IsExceptionPending());
+ return false;
+ }
// Inherit kAccClassIsFinalizable from the superclass in case this
// class doesn't override finalize.
@@ -6169,13 +6436,36 @@ class MethodNameAndSignatureComparator final : public ValueObject {
std::string_view name_view_;
};
+static ObjPtr<mirror::Class> GetImtOwner(ObjPtr<mirror::Class> klass)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ImTable* imt = klass->GetImt(kRuntimePointerSize);
+ DCHECK(imt != nullptr);
+ while (klass->HasSuperClass()) {
+ ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
+ if (super_class->ShouldHaveImt() && imt != super_class->GetImt(kRuntimePointerSize)) {
+ // IMT not shared with the super class, return the current class.
+ return klass;
+ }
+ klass = super_class;
+ }
+ return nullptr;
+}
+
ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
ArtMethod* conflict_method,
ArtMethod* interface_method,
ArtMethod* method) {
ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
Runtime* const runtime = Runtime::Current();
- LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
+
+ // The IMT may be shared with a super class, in which case we need to use that
+ // super class's `LinearAlloc`. The conflict itself should be limited to
+ // methods at or higher up the chain of the IMT owner, otherwise class
+ // linker would have created a different IMT.
+ ObjPtr<mirror::Class> imt_owner = GetImtOwner(klass);
+ DCHECK(imt_owner != nullptr);
+
+ LinearAlloc* linear_alloc = GetAllocatorForClassLoader(imt_owner->GetClassLoader());
// Create a new entry if the existing one is the shared conflict method.
ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
@@ -6185,8 +6475,9 @@ ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
// Allocate a new table. Note that we will leak this table at the next conflict,
// but that's a tradeoff compared to making the table fixed size.
void* data = linear_alloc->Alloc(
- Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
- image_pointer_size_));
+ Thread::Current(),
+ ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table, image_pointer_size_),
+ LinearAllocKind::kNoGCRoots);
if (data == nullptr) {
LOG(ERROR) << "Failed to allocate conflict table";
return conflict_method;
@@ -6259,9 +6550,8 @@ void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
// Compare the IMT with the super class including the conflict methods. If they are equivalent,
// we can just use the same pointer.
ImTable* imt = nullptr;
- ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
- if (super_class != nullptr && super_class->ShouldHaveImt()) {
- ImTable* super_imt = super_class->GetImt(image_pointer_size_);
+ ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
+ if (super_imt != nullptr) {
bool same = true;
for (size_t i = 0; same && i < ImTable::kSize; ++i) {
ArtMethod* method = imt_data[i];
@@ -6290,6 +6580,7 @@ void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
if (imt == nullptr) {
imt = klass->GetImt(image_pointer_size_);
DCHECK(imt != nullptr);
+ DCHECK_NE(imt, super_imt);
imt->Populate(imt_data, image_pointer_size_);
} else {
klass->SetImt(imt, image_pointer_size_);
@@ -6300,8 +6591,8 @@ ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
LinearAlloc* linear_alloc,
PointerSize image_pointer_size) {
void* data = linear_alloc->Alloc(Thread::Current(),
- ImtConflictTable::ComputeSize(count,
- image_pointer_size));
+ ImtConflictTable::ComputeSize(count, image_pointer_size),
+ LinearAllocKind::kNoGCRoots);
return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
}
@@ -6917,7 +7208,7 @@ class ClassLinker::LinkMethodsHelper {
klass_(klass),
self_(self),
runtime_(runtime),
- stack_(runtime->GetLinearAlloc()->GetArenaPool()),
+ stack_(runtime->GetArenaPool()),
allocator_(&stack_),
copied_method_records_(copied_method_records_initial_buffer_,
kCopiedMethodRecordInitialBufferSize,
@@ -6997,6 +7288,10 @@ class ClassLinker::LinkMethodsHelper {
kMethodSize,
kMethodAlignment);
memset(old_methods, 0xFEu, old_size);
+ // Set size to 0 to avoid visiting declaring classes.
+ if (gUseUserfaultfd) {
+ old_methods->SetSize(0);
+ }
}
}
}
@@ -7599,16 +7894,25 @@ void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror:
const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader())->Realloc(
- self_, old_methods, old_methods_ptr_size, new_size));
+ self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
CHECK(methods != nullptr); // Native allocation failure aborts.
if (methods != old_methods) {
- StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
- // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
- // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
- for (auto& m : klass->GetMethods(kPointerSize)) {
- out->CopyFrom(&m, kPointerSize);
- ++out;
+ if (gUseReadBarrier) {
+ StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
+ // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
+ // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
+ for (auto& m : klass->GetMethods(kPointerSize)) {
+ out->CopyFrom(&m, kPointerSize);
+ ++out;
+ }
+ } else if (gUseUserfaultfd) {
+ // Clear the declaring class of the old dangling method array so that GC doesn't
+ // try to update them, which could cause crashes in userfaultfd GC due to
+ // checks in post-compact address computation.
+ for (auto& m : klass->GetMethods(kPointerSize)) {
+ m.SetDeclaringClass(nullptr);
+ }
}
}
@@ -7756,7 +8060,8 @@ bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
if (new_methods != nullptr) {
DCHECK_NE(new_methods->size(), 0u);
- imt_methods_begin = reinterpret_cast<uintptr_t>(&new_methods->At(0));
+ imt_methods_begin =
+ reinterpret_cast<uintptr_t>(&new_methods->At(0, kMethodSize, kMethodAlignment));
imt_methods_size = new_methods->size() * kMethodSize;
}
}
@@ -7859,20 +8164,6 @@ bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
return true;
}
-NO_INLINE
-static void ThrowIllegalAccessErrorForImplementingMethod(ObjPtr<mirror::Class> klass,
- ArtMethod* vtable_method,
- ArtMethod* interface_method)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- DCHECK(!vtable_method->IsAbstract());
- DCHECK(!vtable_method->IsPublic());
- ThrowIllegalAccessError(
- klass,
- "Method '%s' implementing interface method '%s' is not public",
- vtable_method->PrettyMethod().c_str(),
- interface_method->PrettyMethod().c_str());
-}
-
template <PointerSize kPointerSize>
ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
Thread* self, size_t length) {
@@ -7961,55 +8252,17 @@ size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
static constexpr double kMinLoadFactor = 0.3;
static constexpr double kMaxLoadFactor = 0.5;
static constexpr size_t kMaxStackBuferSize = 256;
- const size_t super_vtable_buffer_size = super_vtable_length * 3;
const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
- const size_t total_buffer_size = super_vtable_buffer_size + declared_virtuals_buffer_size;
- uint32_t* super_vtable_buffer_ptr = (total_buffer_size <= kMaxStackBuferSize)
- ? reinterpret_cast<uint32_t*>(alloca(total_buffer_size * sizeof(uint32_t)))
- : allocator_.AllocArray<uint32_t>(total_buffer_size);
- uint32_t* declared_virtuals_buffer_ptr = super_vtable_buffer_ptr + super_vtable_buffer_size;
- VTableSignatureSet super_vtable_signatures(
- kMinLoadFactor,
- kMaxLoadFactor,
- VTableSignatureHash(super_vtable_accessor),
- VTableSignatureEqual(super_vtable_accessor),
- super_vtable_buffer_ptr,
- super_vtable_buffer_size,
- allocator_.Adapter());
- ArrayRef<uint32_t> same_signature_vtable_lists;
- // Insert the first `mirror::Object::kVTableLength` indexes with pre-calculated hashes.
- DCHECK_GE(super_vtable_length, mirror::Object::kVTableLength);
- for (uint32_t i = 0; i != mirror::Object::kVTableLength; ++i) {
- size_t hash = class_linker_->object_virtual_method_hashes_[i];
- // There are no duplicate signatures in `java.lang.Object`, so use `HashSet<>::PutWithHash()`.
- // This avoids equality comparison for the three `java.lang.Object.wait()` overloads.
- super_vtable_signatures.PutWithHash(i, hash);
- }
- // Insert the remaining indexes, check for duplicate signatures.
- if (super_vtable_length > mirror::Object::kVTableLength) {
- for (size_t i = mirror::Object::kVTableLength; i < super_vtable_length; ++i) {
- // Use `super_vtable_accessor` for getting the method for hash calculation.
- // Letting `HashSet<>::insert()` use the internal accessor copy in the hash
- // function prevents the compiler from optimizing this properly because the
- // compiler cannot prove that the accessor copy is immutable.
- size_t hash = ComputeMethodHash(super_vtable_accessor.GetVTableEntry(i));
- auto [it, inserted] = super_vtable_signatures.InsertWithHash(i, hash);
- if (UNLIKELY(!inserted)) {
- if (same_signature_vtable_lists.empty()) {
- same_signature_vtable_lists = ArrayRef<uint32_t>(
- allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
- std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
- same_signature_vtable_lists_ = same_signature_vtable_lists;
- }
- DCHECK_LT(*it, i);
- same_signature_vtable_lists[i] = *it;
- *it = i;
- }
- }
- }
+ const size_t super_vtable_buffer_size = super_vtable_length * 3;
+ const size_t bit_vector_size = BitVector::BitsToWords(num_virtual_methods);
+ const size_t total_size =
+ declared_virtuals_buffer_size + super_vtable_buffer_size + bit_vector_size;
+
+ uint32_t* declared_virtuals_buffer_ptr = (total_size <= kMaxStackBuferSize)
+ ? reinterpret_cast<uint32_t*>(alloca(total_size * sizeof(uint32_t)))
+ : allocator_.AllocArray<uint32_t>(total_size);
+ uint32_t* bit_vector_buffer_ptr = declared_virtuals_buffer_ptr + declared_virtuals_buffer_size;
- // For each declared virtual method, look for a superclass virtual method
- // to override and assign a new vtable index if no method was overridden.
DeclaredVirtualSignatureSet declared_virtual_signatures(
kMinLoadFactor,
kMaxLoadFactor,
@@ -8018,8 +8271,24 @@ size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
declared_virtuals_buffer_ptr,
declared_virtuals_buffer_size,
allocator_.Adapter());
+
+ ArrayRef<uint32_t> same_signature_vtable_lists;
const bool is_proxy_class = klass->IsProxyClass();
size_t vtable_length = super_vtable_length;
+
+ // Record which declared methods are overriding a super method.
+ BitVector initialized_methods(/* expandable= */ false,
+ Allocator::GetNoopAllocator(),
+ bit_vector_size,
+ bit_vector_buffer_ptr);
+
+ // Note: our sets hash on the method name, and therefore we pay a high
+ // performance price when a class has many overloads.
+ //
+ // We populate a set of declared signatures instead of signatures from the
+ // super vtable (which is only lazy populated in case of interface overriding,
+ // see below). This makes sure that we pay the performance price only on that
+ // class, and not on its subclasses (except in the case of interface overriding, see below).
for (size_t i = 0; i != num_virtual_methods; ++i) {
ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
@@ -8028,59 +8297,79 @@ size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
: virtual_method;
size_t hash = ComputeMethodHash(signature_method);
declared_virtual_signatures.PutWithHash(i, hash);
- auto it = super_vtable_signatures.FindWithHash(signature_method, hash);
- if (it != super_vtable_signatures.end()) {
- size_t super_index = *it;
- DCHECK_LT(super_index, super_vtable_length);
- ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(super_index);
- // Historical note: Before Android 4.1, an inaccessible package-private
- // superclass method would have been incorrectly overridden.
- bool overrides = klass->CanAccessMember(super_method->GetDeclaringClass(),
- super_method->GetAccessFlags());
- if (overrides && super_method->IsFinal()) {
- sants.reset();
- ThrowLinkageError(klass, "Method %s overrides final method in class %s",
- virtual_method->PrettyMethod().c_str(),
- super_method->GetDeclaringClassDescriptor());
- return 0u;
- }
- if (UNLIKELY(!same_signature_vtable_lists.empty())) {
- // We may override more than one method according to JLS, see b/211854716 .
- // We record the highest overridden vtable index here so that we can walk
- // the list to find other overridden methods when constructing the vtable.
- // However, we walk all the methods to check for final method overriding.
- size_t current_index = super_index;
- while (same_signature_vtable_lists[current_index] != dex::kDexNoIndex) {
- DCHECK_LT(same_signature_vtable_lists[current_index], current_index);
- current_index = same_signature_vtable_lists[current_index];
- ArtMethod* current_method = super_vtable_accessor.GetVTableEntry(current_index);
- if (klass->CanAccessMember(current_method->GetDeclaringClass(),
- current_method->GetAccessFlags())) {
- if (current_method->IsFinal()) {
- sants.reset();
- ThrowLinkageError(klass, "Method %s overrides final method in class %s",
- virtual_method->PrettyMethod().c_str(),
- current_method->GetDeclaringClassDescriptor());
- return 0u;
- }
- if (!overrides) {
- overrides = true;
- super_index = current_index;
- super_method = current_method;
- }
- }
- }
- }
- if (overrides) {
- virtual_method->SetMethodIndex(super_index);
- continue;
- }
+ }
+
+ // Loop through each super vtable method and see if they are overridden by a method we added to
+ // the hash table.
+ for (size_t j = 0; j < super_vtable_length; ++j) {
+ // Search the hash table to see if we are overridden by any method.
+ ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(j);
+ if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
+ super_method->GetAccessFlags())) {
+ // Continue on to the next method since this one is package private and cannot be overridden.
+ // Before Android 4.1, the package-private method super_method might have been incorrectly
+ // overridden.
+ continue;
+ }
+ size_t hash = (j < mirror::Object::kVTableLength)
+ ? class_linker_->object_virtual_method_hashes_[j]
+ : ComputeMethodHash(super_method);
+ auto it = declared_virtual_signatures.FindWithHash(super_method, hash);
+ if (it == declared_virtual_signatures.end()) {
+ continue;
}
- // The method does not override any method from superclass, so it needs a new vtable index.
- virtual_method->SetMethodIndex(vtable_length);
- ++vtable_length;
+ ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it, kPointerSize);
+ if (super_method->IsFinal()) {
+ sants.reset();
+ ThrowLinkageError(klass, "Method %s overrides final method in class %s",
+ virtual_method->PrettyMethod().c_str(),
+ super_method->GetDeclaringClassDescriptor());
+ return 0u;
+ }
+ if (initialized_methods.IsBitSet(*it)) {
+ // The method is overriding more than one method.
+ // We record that information in a linked list to later set the method in the vtable
+ // locations that are not the method index.
+ if (same_signature_vtable_lists.empty()) {
+ same_signature_vtable_lists = ArrayRef<uint32_t>(
+ allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
+ std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
+ same_signature_vtable_lists_ = same_signature_vtable_lists;
+ }
+ same_signature_vtable_lists[j] = virtual_method->GetMethodIndexDuringLinking();
+ } else {
+ initialized_methods.SetBit(*it);
+ }
+
+ // We arbitrarily set to the largest index. This is also expected when
+ // iterating over the `same_signature_vtable_lists_`.
+ virtual_method->SetMethodIndex(j);
}
+ // Add the non-overridden methods at the end.
+ for (size_t i = 0; i < num_virtual_methods; ++i) {
+ if (!initialized_methods.IsBitSet(i)) {
+ ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
+ local_method->SetMethodIndex(vtable_length);
+ vtable_length++;
+ }
+ }
+
+ // A lazily constructed super vtable set, which we only populate in the less
+ // common sittuation of a superclass implementing a method declared in an
+ // interface this class inherits.
+ // We still try to allocate the set on the stack as using the arena will have
+ // a larger cost.
+ uint32_t* super_vtable_buffer_ptr = bit_vector_buffer_ptr + bit_vector_size;
+ VTableSignatureSet super_vtable_signatures(
+ kMinLoadFactor,
+ kMaxLoadFactor,
+ VTableSignatureHash(super_vtable_accessor),
+ VTableSignatureEqual(super_vtable_accessor),
+ super_vtable_buffer_ptr,
+ super_vtable_buffer_size,
+ allocator_.Adapter());
+
// Assign vtable indexes for interface methods in new interfaces and store them
// in implementation method arrays. These shall be replaced by actual method
// pointers later. We do not need to do this for superclass interfaces as we can
@@ -8099,42 +8388,39 @@ size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
size_t hash = ComputeMethodHash(interface_method);
ArtMethod* vtable_method = nullptr;
- bool found = false;
auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
if (it1 != declared_virtual_signatures.end()) {
- vtable_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
- found = true;
+ ArtMethod* found_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
+ // For interface overriding, we only look at public methods.
+ if (found_method->IsPublic()) {
+ vtable_method = found_method;
+ }
} else {
+ // This situation should be rare (a superclass implements a method
+ // declared in an interface this class is inheriting). Only in this case
+ // do we lazily populate the super_vtable_signatures.
+ if (super_vtable_signatures.empty()) {
+ for (size_t k = 0; k < super_vtable_length; ++k) {
+ ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(k);
+ if (!super_method->IsPublic()) {
+ // For interface overriding, we only look at public methods.
+ continue;
+ }
+ size_t super_hash = (k < mirror::Object::kVTableLength)
+ ? class_linker_->object_virtual_method_hashes_[k]
+ : ComputeMethodHash(super_method);
+ auto [it, inserted] = super_vtable_signatures.InsertWithHash(k, super_hash);
+ DCHECK(inserted || super_vtable_accessor.GetVTableEntry(*it) == super_method);
+ }
+ }
auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
if (it2 != super_vtable_signatures.end()) {
- // If there are multiple vtable methods with the same signature, the one with
- // the highest vtable index is not nessarily the one in most-derived class.
- // Find the most-derived method. See b/211854716 .
vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
- if (UNLIKELY(!same_signature_vtable_lists.empty())) {
- size_t current_index = *it2;
- while (same_signature_vtable_lists[current_index] != dex::kDexNoIndex) {
- DCHECK_LT(same_signature_vtable_lists[current_index], current_index);
- current_index = same_signature_vtable_lists[current_index];
- ArtMethod* current_method = super_vtable_accessor.GetVTableEntry(current_index);
- ObjPtr<mirror::Class> current_class = current_method->GetDeclaringClass();
- if (current_class->IsSubClass(vtable_method->GetDeclaringClass())) {
- vtable_method = current_method;
- }
- }
- }
- found = true;
}
}
+
uint32_t vtable_index = vtable_length;
- if (found) {
- DCHECK(vtable_method != nullptr);
- if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
- // FIXME: Delay the exception until we actually try to call the method. b/211854716
- sants.reset();
- ThrowIllegalAccessErrorForImplementingMethod(klass, vtable_method, interface_method);
- return 0u;
- }
+ if (vtable_method != nullptr) {
vtable_index = vtable_method->GetMethodIndexDuringLinking();
if (!vtable_method->IsOverridableByDefaultMethod()) {
method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
@@ -8144,7 +8430,7 @@ size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
auto [it, inserted] = copied_method_records_.InsertWithHash(
CopiedMethodRecord(interface_method, vtable_index), hash);
- if (found) {
+ if (vtable_method != nullptr) {
DCHECK_EQ(vtable_index, it->GetMethodIndex());
} else if (inserted) {
DCHECK_EQ(vtable_index, it->GetMethodIndex());
@@ -8310,26 +8596,29 @@ bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
return false;
}
+ // Assign each method an interface table index and set the default flag.
bool has_defaults = false;
- // Assign each method an IMT index and set the default flag.
for (size_t i = 0; i < num_virtual_methods; ++i) {
ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
m->SetMethodIndex(i);
- if (!m->IsAbstract()) {
+ uint32_t access_flags = m->GetAccessFlags();
+ DCHECK(!ArtMethod::IsDefault(access_flags));
+ DCHECK_EQ(!ArtMethod::IsAbstract(access_flags), ArtMethod::IsInvokable(access_flags));
+ if (ArtMethod::IsInvokable(access_flags)) {
// If the dex file does not support default methods, throw ClassFormatError.
// This check is necessary to protect from odd cases, such as native default
// methods, that the dex file verifier permits for old dex file versions. b/157170505
// FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
// currently running CTS tests for default methods with dex file version 035 which
// does not support default methods. So, we limit this to native methods. b/157718952
- if (m->IsNative()) {
+ if (ArtMethod::IsNative(access_flags)) {
DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
ThrowClassFormatError(klass.Get(),
"Dex file does not support default method '%s'",
m->PrettyMethod().c_str());
return false;
}
- if (!m->IsPublic()) {
+ if (!ArtMethod::IsPublic(access_flags)) {
// The verifier should have caught the non-public method for dex version 37.
// Just warn and skip it since this is from before default-methods so we don't
// really need to care that it has code.
@@ -8337,7 +8626,7 @@ bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
<< "This will be a fatal error in subsequent versions of android. "
<< "Continuing anyway.";
}
- m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
+ m->SetAccessFlags(access_flags | kAccDefault);
has_defaults = true;
}
}
@@ -8484,17 +8773,16 @@ bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
- // We may override more than one method according to JLS, see b/211854716 .
- // If we do, arbitrarily update the method index to the lowest overridden vtable index.
+ // We may override more than one method according to JLS, see b/211854716.
while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
vtable_index = same_signature_vtable_lists[vtable_index];
- ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
- if (klass->CanAccessMember(current_method->GetDeclaringClass(),
- current_method->GetAccessFlags())) {
+ vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
+ if (kIsDebugBuild) {
+ ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
+ DCHECK(klass->CanAccessMember(current_method->GetDeclaringClass(),
+ current_method->GetAccessFlags()));
DCHECK(!current_method->IsFinal());
- vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
- virtual_method.SetMethodIndex(vtable_index);
}
}
}
@@ -8606,7 +8894,8 @@ class ClassLinker::LinkFieldsHelper {
};
// We use the following order of field types for assigning offsets.
-// Some fields can be shuffled forward to fill gaps, see `ClassLinker::LinkFields()`.
+// Some fields can be shuffled forward to fill gaps, see
+// `ClassLinker::LinkFieldsHelper::LinkFields()`.
enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
kReference = 0u,
kLong,
@@ -9082,6 +9371,308 @@ bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, si
return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
}
+enum class RecordElementType : uint8_t {
+ kNames = 0,
+ kTypes = 1,
+ kSignatures = 2,
+ kAnnotationVisibilities = 3,
+ kAnnotations = 4
+};
+
+static const char* kRecordElementNames[] = {"componentNames",
+ "componentTypes",
+ "componentSignatures",
+ "componentAnnotationVisibilities",
+ "componentAnnotations"};
+
+class RecordAnnotationVisitor final : public annotations::AnnotationVisitor {
+ public:
+ RecordAnnotationVisitor() {}
+
+ bool ValidateCounts() {
+ if (is_error_) {
+ return false;
+ }
+
+ // Verify the counts.
+ bool annotation_element_exists =
+ (signatures_count_ != UINT32_MAX) || (annotations_count_ != UINT32_MAX);
+ if (count_ >= 2) {
+ SetErrorMsg("Record class can't have more than one @Record Annotation");
+ } else if (names_count_ == UINT32_MAX) {
+ SetErrorMsg("componentNames element is required");
+ } else if (types_count_ == UINT32_MAX) {
+ SetErrorMsg("componentTypes element is required");
+ } else if (names_count_ != types_count_) { // Every component must have a name and a type.
+ SetErrorMsg(StringPrintf(
+ "componentTypes is expected to have %i, but has %i types", names_count_, types_count_));
+ // The other 3 elements are optional, but is expected to have the same count if it exists.
+ } else if (signatures_count_ != UINT32_MAX && signatures_count_ != names_count_) {
+ SetErrorMsg(StringPrintf("componentSignatures size is %i, but is expected to be %i",
+ signatures_count_,
+ names_count_));
+ } else if (annotation_element_exists && visibilities_count_ != names_count_) {
+ SetErrorMsg(
+ StringPrintf("componentAnnotationVisibilities size is %i, but is expected to be %i",
+ visibilities_count_,
+ names_count_));
+ } else if (annotation_element_exists && annotations_count_ != names_count_) {
+ SetErrorMsg(StringPrintf("componentAnnotations size is %i, but is expected to be %i",
+ annotations_count_,
+ names_count_));
+ }
+
+ return !is_error_;
+ }
+
+ const std::string& GetErrorMsg() { return error_msg_; }
+
+ bool IsRecordAnnotationFound() { return count_ != 0; }
+
+ annotations::VisitorStatus VisitAnnotation(const char* descriptor, uint8_t visibility) override {
+ if (is_error_) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+
+ if (visibility != DexFile::kDexVisibilitySystem) {
+ return annotations::VisitorStatus::kVisitNext;
+ }
+
+ if (strcmp(descriptor, "Ldalvik/annotation/Record;") != 0) {
+ return annotations::VisitorStatus::kVisitNext;
+ }
+
+ count_ += 1;
+ if (count_ >= 2) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ return annotations::VisitorStatus::kVisitInner;
+ }
+
+ annotations::VisitorStatus VisitAnnotationElement(const char* element_name,
+ uint8_t type,
+ [[maybe_unused]] const JValue& value) override {
+ if (is_error_) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+
+ RecordElementType visiting_type;
+ uint32_t* element_count;
+ if (strcmp(element_name, "componentNames") == 0) {
+ visiting_type = RecordElementType::kNames;
+ element_count = &names_count_;
+ } else if (strcmp(element_name, "componentTypes") == 0) {
+ visiting_type = RecordElementType::kTypes;
+ element_count = &types_count_;
+ } else if (strcmp(element_name, "componentSignatures") == 0) {
+ visiting_type = RecordElementType::kSignatures;
+ element_count = &signatures_count_;
+ } else if (strcmp(element_name, "componentAnnotationVisibilities") == 0) {
+ visiting_type = RecordElementType::kAnnotationVisibilities;
+ element_count = &visibilities_count_;
+ } else if (strcmp(element_name, "componentAnnotations") == 0) {
+ visiting_type = RecordElementType::kAnnotations;
+ element_count = &annotations_count_;
+ } else {
+ // ignore this element that could be introduced in the future ART.
+ return annotations::VisitorStatus::kVisitNext;
+ }
+
+ if ((*element_count) != UINT32_MAX) {
+ SetErrorMsg(StringPrintf("Two %s annotation elements are found but only one is expected",
+ kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+
+ if (type != DexFile::kDexAnnotationArray) {
+ SetErrorMsg(StringPrintf("%s must be array type", element_name));
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+
+ *element_count = 0;
+ visiting_type_ = visiting_type;
+ return annotations::VisitorStatus::kVisitInner;
+ }
+
+ annotations::VisitorStatus VisitArrayElement(uint8_t depth,
+ uint32_t index,
+ uint8_t type,
+ [[maybe_unused]] const JValue& value) override {
+ if (is_error_) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ switch (visiting_type_) {
+ case RecordElementType::kNames: {
+ if (depth == 0) {
+ if (!ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationString, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ names_count_++;
+ return annotations::VisitorStatus::kVisitNext;
+ }
+ break;
+ }
+ case RecordElementType::kTypes: {
+ if (depth == 0) {
+ if (!ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationType, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ types_count_++;
+ return annotations::VisitorStatus::kVisitNext;
+ }
+ break;
+ }
+ case RecordElementType::kSignatures: {
+ if (depth == 0) {
+ // kDexAnnotationNull implies no generic signature for the component.
+ if (type != DexFile::kDexAnnotationNull &&
+ !ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ signatures_count_++;
+ return annotations::VisitorStatus::kVisitNext;
+ }
+ break;
+ }
+ case RecordElementType::kAnnotationVisibilities: {
+ if (depth == 0) {
+ if (!ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ visibilities_count_++;
+ return annotations::VisitorStatus::kVisitInner;
+ } else if (depth == 1) {
+ if (!ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationByte, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ return annotations::VisitorStatus::kVisitNext;
+ }
+ break;
+ }
+ case RecordElementType::kAnnotations: {
+ if (depth == 0) {
+ if (!ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ annotations_count_++;
+ return annotations::VisitorStatus::kVisitInner;
+ } else if (depth == 1) {
+ if (!ExpectedTypeOrError(
+ type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+ return annotations::VisitorStatus::kVisitNext;
+ }
+ break;
+ }
+ }
+
+ // Should never happen if every next depth level is handled above whenever kVisitInner is
+ // returned.
+ DCHECK(false) << StringPrintf("Unexpected depth %i for element %s",
+ depth,
+ kRecordElementNames[static_cast<uint8_t>(visiting_type_)]);
+ return annotations::VisitorStatus::kVisitBreak;
+ }
+
+ private:
+ bool is_error_ = false;
+ uint32_t count_ = 0;
+ uint32_t names_count_ = UINT32_MAX;
+ uint32_t types_count_ = UINT32_MAX;
+ uint32_t signatures_count_ = UINT32_MAX;
+ uint32_t visibilities_count_ = UINT32_MAX;
+ uint32_t annotations_count_ = UINT32_MAX;
+ std::string error_msg_;
+ RecordElementType visiting_type_;
+
+ inline bool ExpectedTypeOrError(uint8_t type,
+ uint8_t expected,
+ RecordElementType visiting_type,
+ uint8_t depth,
+ uint32_t index) {
+ if (type == expected) {
+ return true;
+ }
+
+ SetErrorMsg(StringPrintf(
+ "Expect 0x%02x type but got 0x%02x at the index %i and depth %i for the element %s",
+ expected,
+ type,
+ index,
+ depth,
+ kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
+ return false;
+ }
+
+ void SetErrorMsg(const std::string& msg) {
+ is_error_ = true;
+ error_msg_ = msg;
+ }
+
+ DISALLOW_COPY_AND_ASSIGN(RecordAnnotationVisitor);
+};
+
+/**
+ * Set kClassFlagRecord and verify if klass is a record class.
+ * If the verification fails, a pending java exception is thrown.
+ *
+ * @return false if verification fails. If klass isn't a record class,
+ * it should always return true.
+ */
+bool ClassLinker::VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::Class> super) {
+ CHECK(klass != nullptr);
+ // First, we check the conditions specified in java.lang.Class#isRecord().
+ // If any of the conditions isn't fulfilled, it's not a record class and
+ // ART should treat it as a normal class even if it's inherited from java.lang.Record.
+ if (!klass->IsFinal()) {
+ return true;
+ }
+
+ if (super == nullptr) {
+ return true;
+ }
+
+ // Compare the string directly when this ClassLinker is initializing before
+ // WellKnownClasses initializes
+ if (WellKnownClasses::java_lang_Record == nullptr) {
+ if (!super->DescriptorEquals("Ljava/lang/Record;")) {
+ return true;
+ }
+ } else {
+ ObjPtr<mirror::Class> java_lang_Record =
+ WellKnownClasses::ToClass(WellKnownClasses::java_lang_Record);
+ if (super.Ptr() != java_lang_Record.Ptr()) {
+ return true;
+ }
+ }
+
+ // Verify @dalvik.annotation.Record
+ // The annotation has a mandatory element componentNames[] and componentTypes[] of the same size.
+ // componentSignatures[], componentAnnotationVisibilities[][], componentAnnotations[][] are
+ // optional, but should have the same size if it exists.
+ RecordAnnotationVisitor visitor;
+ annotations::VisitClassAnnotations(klass, &visitor);
+ if (!visitor.IsRecordAnnotationFound()) {
+ return true;
+ }
+
+ if (!visitor.ValidateCounts()) {
+ ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
+ return false;
+ }
+
+ // Set kClassFlagRecord.
+ klass->SetRecordClass();
+ return true;
+}
+
// Set the bitmap of reference instance field offsets.
void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
uint32_t reference_offsets = 0;
@@ -9340,19 +9931,7 @@ ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
Thread::Current()->AssertPendingException();
return nullptr;
}
- if (klass->IsInterface()) {
- resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
- } else {
- resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
- }
- if (resolved != nullptr &&
- hiddenapi::ShouldDenyAccessToMember(
- resolved,
- hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
- hiddenapi::AccessMethod::kLinking)) {
- resolved = nullptr;
- }
- return resolved;
+ return FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
}
ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
@@ -9504,6 +10083,9 @@ ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
Handle<mirror::MethodType> type = hs.NewHandle(
mirror::MethodType::Create(self, return_type, method_params));
if (type != nullptr) {
+ // Ensure all stores for the newly created MethodType are visible, before we attempt to place
+ // it in the DexCache (b/224733324).
+ std::atomic_thread_fence(std::memory_order_release);
dex_cache->SetResolvedMethodType(proto_idx, type.Get());
}
@@ -10000,11 +10582,13 @@ ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
Handle<mirror::ClassLoader> parent_loader,
Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
+ CHECK(loader_class.Get() == WellKnownClasses::dalvik_system_PathClassLoader ||
+ loader_class.Get() == WellKnownClasses::dalvik_system_DelegateLastClassLoader ||
+ loader_class.Get() == WellKnownClasses::dalvik_system_InMemoryDexClassLoader);
StackHandleScope<5> hs(self);
- ArtField* dex_elements_field =
- jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
+ ArtField* dex_elements_field = WellKnownClasses::dalvik_system_DexPathList_dexElements;
Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
DCHECK(dex_elements_class != nullptr);
@@ -10016,14 +10600,13 @@ ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
Handle<mirror::Class> h_dex_element_class =
hs.NewHandle(dex_elements_class->GetComponentType());
- ArtField* element_file_field =
- jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
+ ArtField* element_file_field = WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
- ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
+ ArtField* cookie_field = WellKnownClasses::dalvik_system_DexFile_cookie;
DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
- ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
+ ArtField* file_name_field = WellKnownClasses::dalvik_system_DexFile_fileName;
DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
// Fill the elements array.
@@ -10095,88 +10678,45 @@ ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
DCHECK(h_class_loader != nullptr);
// Set DexPathList.
- ArtField* path_list_field =
- jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
+ ArtField* path_list_field = WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList;
DCHECK(path_list_field != nullptr);
path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
// Make a pretend boot-classpath.
// TODO: Should we scan the image?
- ArtField* const parent_field =
- jni::DecodeArtField(WellKnownClasses::java_lang_ClassLoader_parent);
+ ArtField* const parent_field = WellKnownClasses::java_lang_ClassLoader_parent;
DCHECK(parent_field != nullptr);
if (parent_loader.Get() == nullptr) {
- ScopedObjectAccessUnchecked soa(self);
- ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
- WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
+ ObjPtr<mirror::Object> boot_loader(
+ WellKnownClasses::java_lang_BootClassLoader->AllocObject(self));
parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
} else {
parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
}
ArtField* shared_libraries_field =
- jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
+ WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
DCHECK(shared_libraries_field != nullptr);
shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
ArtField* shared_libraries_after_field =
- jni::DecodeArtField(
- WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter);
+ WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
DCHECK(shared_libraries_after_field != nullptr);
shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
shared_libraries_after.Get());
return h_class_loader.Get();
}
-jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
- const std::vector<const DexFile*>& dex_files,
- jclass loader_class,
- jobject parent_loader,
- jobject shared_libraries,
- jobject shared_libraries_after) {
- CHECK(self->GetJniEnv()->IsSameObject(loader_class,
- WellKnownClasses::dalvik_system_PathClassLoader) ||
- self->GetJniEnv()->IsSameObject(loader_class,
- WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
- self->GetJniEnv()->IsSameObject(loader_class,
- WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
-
- // SOAAlreadyRunnable is protected, and we need something to add a global reference.
- // We could move the jobject to the callers, but all call-sites do this...
- ScopedObjectAccessUnchecked soa(self);
-
- // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
- StackHandleScope<5> hs(self);
-
- Handle<mirror::Class> h_loader_class =
- hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
- Handle<mirror::ClassLoader> h_parent =
- hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
- Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
- hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
- Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries_after =
- hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries_after));
-
- ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
- self,
- dex_files,
- h_loader_class,
- h_parent,
- h_shared_libraries,
- h_shared_libraries_after);
-
- // Make it a global ref and return.
- ScopedLocalRef<jobject> local_ref(
- soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
- return soa.Env()->NewGlobalRef(local_ref.get());
-}
-
jobject ClassLinker::CreatePathClassLoader(Thread* self,
const std::vector<const DexFile*>& dex_files) {
- return CreateWellKnownClassLoader(self,
- dex_files,
- WellKnownClasses::dalvik_system_PathClassLoader,
- nullptr);
+ StackHandleScope<3u> hs(self);
+ Handle<mirror::Class> d_s_pcl =
+ hs.NewHandle(WellKnownClasses::dalvik_system_PathClassLoader.Get());
+ auto null_parent = hs.NewHandle<mirror::ClassLoader>(nullptr);
+ auto null_libs = hs.NewHandle<mirror::ObjectArray<mirror::ClassLoader>>(nullptr);
+ ObjPtr<mirror::ClassLoader> class_loader =
+ CreateWellKnownClassLoader(self, dex_files, d_s_pcl, null_parent, null_libs, null_libs);
+ return Runtime::Current()->GetJavaVM()->AddGlobalRef(self, class_loader);
}
void ClassLinker::DropFindArrayClassCache() {
@@ -10196,6 +10736,18 @@ void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
}
}
+void ClassLinker::VisitDexCaches(DexCacheVisitor* visitor) const {
+ Thread* const self = Thread::Current();
+ for (const auto& it : dex_caches_) {
+ // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
+ ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
+ self->DecodeJObject(it.second.weak_root));
+ if (dex_cache != nullptr) {
+ visitor->Visit(dex_cache);
+ }
+ }
+}
+
void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
for (const ClassLoaderData& data : class_loaders_) {
LinearAlloc* alloc = data.allocator;
@@ -10221,27 +10773,77 @@ void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
void ClassLinker::CleanupClassLoaders() {
Thread* const self = Thread::Current();
- std::vector<ClassLoaderData> to_delete;
+ std::list<ClassLoaderData> to_delete;
// Do the delete outside the lock to avoid lock violation in jit code cache.
{
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
- const ClassLoaderData& data = *it;
+ auto this_it = it;
+ ++it;
+ const ClassLoaderData& data = *this_it;
// Need to use DecodeJObject so that we get null for cleared JNI weak globals.
ObjPtr<mirror::ClassLoader> class_loader =
ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
- if (class_loader != nullptr) {
- ++it;
- } else {
+ if (class_loader == nullptr) {
VLOG(class_linker) << "Freeing class loader";
- to_delete.push_back(data);
- it = class_loaders_.erase(it);
+ to_delete.splice(to_delete.end(), class_loaders_, this_it);
+ }
+ }
+ }
+ if (to_delete.empty()) {
+ return;
+ }
+ std::set<const OatFile*> unregistered_oat_files;
+ JavaVMExt* vm = self->GetJniEnv()->GetVm();
+ {
+ WriterMutexLock mu(self, *Locks::dex_lock_);
+ for (auto it = dex_caches_.begin(), end = dex_caches_.end(); it != end; ) {
+ const DexFile* dex_file = it->first;
+ const DexCacheData& data = it->second;
+ if (self->DecodeJObject(data.weak_root) == nullptr) {
+ DCHECK(to_delete.end() != std::find_if(
+ to_delete.begin(),
+ to_delete.end(),
+ [&](const ClassLoaderData& cld) { return cld.class_table == data.class_table; }));
+ if (dex_file->GetOatDexFile() != nullptr &&
+ dex_file->GetOatDexFile()->GetOatFile() != nullptr &&
+ dex_file->GetOatDexFile()->GetOatFile()->IsExecutable()) {
+ unregistered_oat_files.insert(dex_file->GetOatDexFile()->GetOatFile());
+ }
+ vm->DeleteWeakGlobalRef(self, data.weak_root);
+ it = dex_caches_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ }
+ {
+ ScopedDebugDisallowReadBarriers sddrb(self);
+ for (ClassLoaderData& data : to_delete) {
+ // CHA unloading analysis and SingleImplementaion cleanups are required.
+ DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
+ }
+ }
+ Runtime* runtime = Runtime::Current();
+ if (!unregistered_oat_files.empty()) {
+ for (const OatFile* oat_file : unregistered_oat_files) {
+ // Notify the fault handler about removal of the executable code range if needed.
+ DCHECK(oat_file->IsExecutable());
+ size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
+ DCHECK_LE(exec_offset, oat_file->Size());
+ size_t exec_size = oat_file->Size() - exec_offset;
+ if (exec_size != 0u) {
+ runtime->RemoveGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
}
}
}
- for (ClassLoaderData& data : to_delete) {
- // CHA unloading analysis and SingleImplementaion cleanups are required.
- DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
+
+ if (runtime->GetStartupLinearAlloc() != nullptr) {
+ // Because the startup linear alloc can contain dex cache arrays associated
+ // to class loaders that got unloaded, we need to delete these
+ // arrays.
+ StartupCompletedTask::DeleteStartupDexCaches(self, /* called_by_gc= */ true);
+ DCHECK_EQ(runtime->GetStartupLinearAlloc(), nullptr);
}
}
@@ -10269,9 +10871,73 @@ ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* meth
CHECK(method->IsCopied());
FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
VisitClasses(&visitor);
+ DCHECK(visitor.holder_ != nullptr);
return visitor.holder_;
}
+ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thread* self,
+ ArtMethod* method) {
+ // Note: `GetHoldingClassOfCopiedMethod(method)` is a lot more expensive than finding
+ // the class loader, so we're using it only to verify the result in debug mode.
+ CHECK(method->IsCopied());
+ gc::Heap* heap = Runtime::Current()->GetHeap();
+ // Check if the copied method is in the boot class path.
+ if (heap->IsBootImageAddress(method) || GetAllocatorForClassLoader(nullptr)->Contains(method)) {
+ DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == nullptr);
+ return nullptr;
+ }
+ // Check if the copied method is in an app image.
+ // Note: Continuous spaces contain boot image spaces and app image spaces.
+ // However, they are sorted by address, so boot images are not trivial to skip.
+ ArrayRef<gc::space::ContinuousSpace* const> spaces(heap->GetContinuousSpaces());
+ DCHECK_GE(spaces.size(), heap->GetBootImageSpaces().size());
+ for (gc::space::ContinuousSpace* space : spaces) {
+ if (space->IsImageSpace()) {
+ gc::space::ImageSpace* image_space = space->AsImageSpace();
+ size_t offset = reinterpret_cast<const uint8_t*>(method) - image_space->Begin();
+ const ImageSection& methods_section = image_space->GetImageHeader().GetMethodsSection();
+ if (offset - methods_section.Offset() < methods_section.Size()) {
+ // Grab the class loader from the first non-BCP class in the app image class table.
+ // Note: If we allow classes from arbitrary parent or library class loaders in app
+ // images, this shall need to be updated to actually search for the exact class.
+ const ImageSection& class_table_section =
+ image_space->GetImageHeader().GetClassTableSection();
+ CHECK_NE(class_table_section.Size(), 0u);
+ const uint8_t* ptr = image_space->Begin() + class_table_section.Offset();
+ size_t read_count = 0;
+ ClassTable::ClassSet class_set(ptr, /*make_copy_of_data=*/ false, &read_count);
+ CHECK(!class_set.empty());
+ auto it = class_set.begin();
+ // No read barrier needed for references to non-movable image classes.
+ while ((*it).Read<kWithoutReadBarrier>()->IsBootStrapClassLoaded()) {
+ ++it;
+ CHECK(it != class_set.end());
+ }
+ ObjPtr<mirror::ClassLoader> class_loader =
+ (*it).Read<kWithoutReadBarrier>()->GetClassLoader();
+ DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == class_loader);
+ return class_loader;
+ }
+ }
+ }
+ // Otherwise, the method must be in one of the `LinearAlloc` memory areas.
+ jweak result = nullptr;
+ {
+ ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
+ for (const ClassLoaderData& data : class_loaders_) {
+ if (data.allocator->Contains(method)) {
+ result = data.weak_root;
+ break;
+ }
+ }
+ }
+ CHECK(result != nullptr) << "Did not find allocator holding the copied method: " << method
+ << " " << method->PrettyMethod();
+ // The `method` is alive, so the class loader must also be alive.
+ return ObjPtr<mirror::ClassLoader>::DownCast(
+ Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
+}
+
bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Should not be called on ClassLinker, only on AotClassLinker that overrides this.
@@ -10298,6 +10964,15 @@ void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
UNREACHABLE();
}
+void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
+ ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
+
+ auto it = dex_caches_.find(&dex_file);
+ if (it != dex_caches_.end()) {
+ dex_caches_.erase(it);
+ }
+}
+
// Instantiate ClassLinker::AllocClass.
template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
Thread* self,