diff options
Diffstat (limited to 'runtime/class_linker.cc')
| -rw-r--r-- | runtime/class_linker.cc | 818 |
1 files changed, 406 insertions, 412 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index 23c59422c4..c179c64491 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -37,6 +37,7 @@ #include "base/unix_file/fd_file.h" #include "base/value_object.h" #include "class_linker-inl.h" +#include "class_table-inl.h" #include "compiler_callbacks.h" #include "debugger.h" #include "dex_file-inl.h" @@ -55,6 +56,7 @@ #include "linear_alloc.h" #include "oat.h" #include "oat_file.h" +#include "oat_file-inl.h" #include "oat_file_assistant.h" #include "object_lock.h" #include "mirror/class.h" @@ -90,7 +92,7 @@ static constexpr bool kDuplicateClassesCheck = false; static void ThrowNoClassDefFoundError(const char* fmt, ...) __attribute__((__format__(__printf__, 1, 2))) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + SHARED_REQUIRES(Locks::mutator_lock_); static void ThrowNoClassDefFoundError(const char* fmt, ...) { va_list args; va_start(args, fmt); @@ -99,14 +101,12 @@ static void ThrowNoClassDefFoundError(const char* fmt, ...) { va_end(args); } -bool ClassLinker::HasInitWithString( - Thread* self, ClassLinker* class_linker, const char* descriptor) { +bool ClassLinker::HasInitWithString(Thread* self, const char* descriptor) { ArtMethod* method = self->GetCurrentMethod(nullptr); StackHandleScope<1> hs(self); Handle<mirror::ClassLoader> class_loader(hs.NewHandle(method != nullptr ? - method->GetDeclaringClass()->GetClassLoader() - : nullptr)); - mirror::Class* exception_class = class_linker->FindClass(self, descriptor, class_loader); + method->GetDeclaringClass()->GetClassLoader() : nullptr)); + mirror::Class* exception_class = FindClass(self, descriptor, class_loader); if (exception_class == nullptr) { // No exc class ~ no <init>-with-string. @@ -143,7 +143,7 @@ void ClassLinker::ThrowEarlierClassFailure(mirror::Class* c) { std::string temp; const char* descriptor = c->GetVerifyErrorClass()->GetDescriptor(&temp); - if (HasInitWithString(self, this, descriptor)) { + if (HasInitWithString(self, descriptor)) { self->ThrowNewException(descriptor, PrettyDescriptor(c).c_str()); } else { self->ThrowNewException(descriptor, nullptr); @@ -156,7 +156,7 @@ void ClassLinker::ThrowEarlierClassFailure(mirror::Class* c) { } static void VlogClassInitializationFailure(Handle<mirror::Class> klass) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { if (VLOG_IS_ON(class_linker)) { std::string temp; LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from " @@ -165,7 +165,7 @@ static void VlogClassInitializationFailure(Handle<mirror::Class> klass) } static void WrapExceptionInInitializer(Handle<mirror::Class> klass) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { Thread* self = Thread::Current(); JNIEnv* env = self->GetJniEnv(); @@ -194,7 +194,9 @@ struct FieldGapsComparator { bool operator() (const FieldGap& lhs, const FieldGap& rhs) NO_THREAD_SAFETY_ANALYSIS { // Sort by gap size, largest first. Secondary sort by starting offset. - return lhs.size > rhs.size || (lhs.size == rhs.size && lhs.start_offset < rhs.start_offset); + // Note that the priority queue returns the largest element, so operator() + // should return true if lhs is less than rhs. + return lhs.size < rhs.size || (lhs.size == rhs.size && lhs.start_offset > rhs.start_offset); } }; typedef std::priority_queue<FieldGap, std::vector<FieldGap>, FieldGapsComparator> FieldGaps; @@ -225,7 +227,7 @@ static void ShuffleForward(size_t* current_field_idx, MemberOffset* field_offset, std::deque<ArtField*>* grouped_and_sorted_fields, FieldGaps* gaps) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(current_field_idx != nullptr); DCHECK(grouped_and_sorted_fields != nullptr); DCHECK(gaps != nullptr); @@ -248,13 +250,13 @@ static void ShuffleForward(size_t* current_field_idx, if (!gaps->empty() && gaps->top().size >= n) { FieldGap gap = gaps->top(); gaps->pop(); - DCHECK(IsAligned<n>(gap.start_offset)); + DCHECK_ALIGNED(gap.start_offset, n); field->SetOffset(MemberOffset(gap.start_offset)); if (gap.size > n) { AddFieldGap(gap.start_offset + n, gap.start_offset + gap.size, gaps); } } else { - DCHECK(IsAligned<n>(field_offset->Uint32Value())); + DCHECK_ALIGNED(field_offset->Uint32Value(), n); field->SetOffset(*field_offset); *field_offset = MemberOffset(field_offset->Uint32Value() + n); } @@ -581,6 +583,7 @@ void ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b // Setup the ClassLoader, verifying the object_size_. class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;"); + class_root->SetClassLoaderClass(); CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize()); SetClassRoot(kJavaLangClassLoader, class_root); @@ -1018,7 +1021,7 @@ const OatFile* ClassLinker::FindOpenedOatFileFromOatLocation(const std::string& static void SanityCheckArtMethod(ArtMethod* m, mirror::Class* expected_class, gc::space::ImageSpace* space) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { if (m->IsRuntimeMethod()) { CHECK(m->GetDeclaringClass() == nullptr) << PrettyMethod(m); } else if (m->IsMiranda()) { @@ -1036,7 +1039,7 @@ static void SanityCheckArtMethod(ArtMethod* m, mirror::Class* expected_class, static void SanityCheckArtMethodPointerArray( mirror::PointerArray* arr, mirror::Class* expected_class, size_t pointer_size, - gc::space::ImageSpace* space) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + gc::space::ImageSpace* space) SHARED_REQUIRES(Locks::mutator_lock_) { CHECK(arr != nullptr); for (int32_t j = 0; j < arr->GetLength(); ++j) { auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size); @@ -1051,18 +1054,17 @@ static void SanityCheckArtMethodPointerArray( } static void SanityCheckObjectsCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(obj != nullptr); CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj; CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj; if (obj->IsClass()) { auto klass = obj->AsClass(); - ArtField* fields[2] = { klass->GetSFields(), klass->GetIFields() }; - size_t num_fields[2] = { klass->NumStaticFields(), klass->NumInstanceFields() }; - for (size_t i = 0; i < 2; ++i) { - for (size_t j = 0; j < num_fields[i]; ++j) { - CHECK_EQ(fields[i][j].GetDeclaringClass(), klass); - } + for (ArtField& field : klass->GetIFields()) { + CHECK_EQ(field.GetDeclaringClass(), klass); + } + for (ArtField& field : klass->GetSFields()) { + CHECK_EQ(field.GetDeclaringClass(), klass); } auto* runtime = Runtime::Current(); auto* image_space = runtime->GetHeap()->GetImageSpace(); @@ -1097,6 +1099,28 @@ static void SanityCheckObjectsCallback(mirror::Object* obj, void* arg ATTRIBUTE_ } } +// Set image methods' entry point to interpreter. +class SetInterpreterEntrypointArtMethodVisitor : public ArtMethodVisitor { + public: + explicit SetInterpreterEntrypointArtMethodVisitor(size_t image_pointer_size) + : image_pointer_size_(image_pointer_size) {} + + void Visit(ArtMethod* method) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) { + if (kIsDebugBuild && !method->IsRuntimeMethod()) { + CHECK(method->GetDeclaringClass() != nullptr); + } + if (!method->IsNative() && !method->IsRuntimeMethod() && !method->IsResolutionMethod()) { + method->SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(), + image_pointer_size_); + } + } + + private: + const size_t image_pointer_size_; + + DISALLOW_COPY_AND_ASSIGN(SetInterpreterEntrypointArtMethodVisitor); +}; + void ClassLinker::InitFromImage() { VLOG(startup) << "ClassLinker::InitFromImage entering"; CHECK(!init_done_); @@ -1187,23 +1211,10 @@ void ClassLinker::InitFromImage() { // Set entry point to interpreter if in InterpretOnly mode. if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) { - const auto& header = space->GetImageHeader(); - const auto& methods = header.GetMethodsSection(); - const auto art_method_size = ArtMethod::ObjectSize(image_pointer_size_); - for (uintptr_t pos = 0; pos < methods.Size(); pos += art_method_size) { - auto* method = reinterpret_cast<ArtMethod*>(space->Begin() + pos + methods.Offset()); - if (kIsDebugBuild && !method->IsRuntimeMethod()) { - CHECK(method->GetDeclaringClass() != nullptr); - } - if (!method->IsNative()) { - method->SetEntryPointFromInterpreterPtrSize( - artInterpreterToInterpreterBridge, image_pointer_size_); - if (!method->IsRuntimeMethod() && method != runtime->GetResolutionMethod()) { - method->SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(), - image_pointer_size_); - } - } - } + const ImageHeader& header = space->GetImageHeader(); + const ImageSection& methods = header.GetMethodsSection(); + SetInterpreterEntrypointArtMethodVisitor visitor(image_pointer_size_); + methods.VisitPackedArtMethods(&visitor, space->Begin(), image_pointer_size_); } // reinit class_roots_ @@ -1239,11 +1250,8 @@ void ClassLinker::InitFromImage() { bool ClassLinker::ClassInClassTable(mirror::Class* klass) { ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - auto it = class_table_.Find(GcRoot<mirror::Class>(klass)); - if (it == class_table_.end()) { - return false; - } - return it->Read() == klass; + ClassTable* const class_table = ClassTableForClassLoader(klass->GetClassLoader()); + return class_table != nullptr && class_table->Contains(klass); } void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { @@ -1255,8 +1263,7 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { // There is 3 GC cases to handle: // Non moving concurrent: // This case is easy to handle since the reference members of ArtMethod and ArtFields are held - // live by the class and class roots. In this case we probably don't even need to call - // VisitNativeRoots. + // live by the class and class roots. // // Moving non-concurrent: // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move. @@ -1267,35 +1274,18 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { // Moving concurrent: // Need to make sure to not copy ArtMethods without doing read barriers since the roots are // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy. - for (GcRoot<mirror::Class>& root : class_table_) { - buffered_visitor.VisitRoot(root); - if ((flags & kVisitRootFlagNonMoving) == 0) { - // Don't bother visiting ArtField and ArtMethod if kVisitRootFlagNonMoving is set since - // these roots are all reachable from the class or dex cache. - root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_); - } - } - // PreZygote classes can't move so we won't need to update fields' declaring classes. - for (GcRoot<mirror::Class>& root : pre_zygote_class_table_) { - buffered_visitor.VisitRoot(root); - if ((flags & kVisitRootFlagNonMoving) == 0) { - root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_); - } + boot_class_table_.VisitRoots(buffered_visitor); + for (GcRoot<mirror::ClassLoader>& root : class_loaders_) { + // May be null for boot ClassLoader. + root.VisitRoot(visitor, RootInfo(kRootVMInternal)); } } else if ((flags & kVisitRootFlagNewRoots) != 0) { for (auto& root : new_class_roots_) { mirror::Class* old_ref = root.Read<kWithoutReadBarrier>(); - old_ref->VisitNativeRoots(buffered_visitor, image_pointer_size_); root.VisitRoot(visitor, RootInfo(kRootStickyClass)); mirror::Class* new_ref = root.Read<kWithoutReadBarrier>(); - if (UNLIKELY(new_ref != old_ref)) { - // Uh ohes, GC moved a root in the log. Need to search the class_table and update the - // corresponding object. This is slow, but luckily for us, this may only happen with a - // concurrent moving GC. - auto it = class_table_.Find(GcRoot<mirror::Class>(old_ref)); - DCHECK(it != class_table_.end()); - *it = GcRoot<mirror::Class>(new_ref); - } + // Concurrent moving GC marked new roots through the to-space invariant. + CHECK_EQ(new_ref, old_ref); } } buffered_visitor.Flush(); // Flush before clearing new_class_roots_. @@ -1344,91 +1334,105 @@ void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) { } } -void ClassLinker::VisitClasses(ClassVisitor* visitor, void* arg) { - if (dex_cache_image_class_lookup_required_) { - MoveImageClassesToClassTable(); - } - // TODO: why isn't this a ReaderMutexLock? - WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - for (GcRoot<mirror::Class>& root : class_table_) { - if (!visitor(root.Read(), arg)) { - return; - } - } - for (GcRoot<mirror::Class>& root : pre_zygote_class_table_) { - if (!visitor(root.Read(), arg)) { - return; +void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) { + if (boot_class_table_.Visit(visitor)) { + for (GcRoot<mirror::ClassLoader>& root : class_loaders_) { + ClassTable* const class_table = root.Read()->GetClassTable(); + if (class_table != nullptr && !class_table->Visit(visitor)) { + return; + } } } } -static bool GetClassesVisitorSet(mirror::Class* c, void* arg) { - std::set<mirror::Class*>* classes = reinterpret_cast<std::set<mirror::Class*>*>(arg); - classes->insert(c); - return true; +void ClassLinker::VisitClasses(ClassVisitor* visitor) { + if (dex_cache_image_class_lookup_required_) { + MoveImageClassesToClassTable(); + } + Thread* const self = Thread::Current(); + ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); + // Not safe to have thread suspension when we are holding a lock. + if (self != nullptr) { + ScopedAssertNoThreadSuspension nts(self, __FUNCTION__); + VisitClassesInternal(visitor); + } else { + VisitClassesInternal(visitor); + } } -struct GetClassesVisitorArrayArg { - Handle<mirror::ObjectArray<mirror::Class>>* classes; - int32_t index; - bool success; +class GetClassesInToVector : public ClassVisitor { + public: + bool Visit(mirror::Class* klass) OVERRIDE { + classes_.push_back(klass); + return true; + } + std::vector<mirror::Class*> classes_; }; -static bool GetClassesVisitorArray(mirror::Class* c, void* varg) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - GetClassesVisitorArrayArg* arg = reinterpret_cast<GetClassesVisitorArrayArg*>(varg); - if (arg->index < (*arg->classes)->GetLength()) { - (*arg->classes)->Set(arg->index, c); - arg->index++; - return true; - } else { - arg->success = false; +class GetClassInToObjectArray : public ClassVisitor { + public: + explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr) + : arr_(arr), index_(0) {} + + bool Visit(mirror::Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) { + ++index_; + if (index_ <= arr_->GetLength()) { + arr_->Set(index_ - 1, klass); + return true; + } return false; } -} -void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor, void* arg) { + bool Succeeded() const SHARED_REQUIRES(Locks::mutator_lock_) { + return index_ <= arr_->GetLength(); + } + + private: + mirror::ObjectArray<mirror::Class>* const arr_; + int32_t index_; +}; + +void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) { // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem // is avoiding duplicates. if (!kMovingClasses) { - std::set<mirror::Class*> classes; - VisitClasses(GetClassesVisitorSet, &classes); - for (mirror::Class* klass : classes) { - if (!visitor(klass, arg)) { + GetClassesInToVector accumulator; + VisitClasses(&accumulator); + for (mirror::Class* klass : accumulator.classes_) { + if (!visitor->Visit(klass)) { return; } } } else { - Thread* self = Thread::Current(); + Thread* const self = Thread::Current(); StackHandleScope<1> hs(self); - MutableHandle<mirror::ObjectArray<mirror::Class>> classes = - hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr); - GetClassesVisitorArrayArg local_arg; - local_arg.classes = &classes; - local_arg.success = false; + auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr); // We size the array assuming classes won't be added to the class table during the visit. // If this assumption fails we iterate again. - while (!local_arg.success) { + while (true) { size_t class_table_size; { ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); - class_table_size = class_table_.Size() + pre_zygote_class_table_.Size(); + // Add 100 in case new classes get loaded when we are filling in the object array. + class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100; } mirror::Class* class_type = mirror::Class::GetJavaLangClass(); mirror::Class* array_of_class = FindArrayClass(self, &class_type); classes.Assign( mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size)); CHECK(classes.Get() != nullptr); // OOME. - local_arg.index = 0; - local_arg.success = true; - VisitClasses(GetClassesVisitorArray, &local_arg); + GetClassInToObjectArray accumulator(classes.Get()); + VisitClasses(&accumulator); + if (accumulator.Succeeded()) { + break; + } } for (int32_t i = 0; i < classes->GetLength(); ++i) { // If the class table shrank during creation of the clases array we expect null elements. If // the class table grew then the loop repeats. If classes are created after the loop has // finished then we don't visit. mirror::Class* klass = classes->Get(i); - if (klass != nullptr && !visitor(klass, arg)) { + if (klass != nullptr && !visitor->Visit(klass)) { return; } } @@ -1456,6 +1460,10 @@ ClassLinker::~ClassLinker() { mirror::LongArray::ResetArrayClass(); mirror::ShortArray::ResetArrayClass(); STLDeleteElements(&oat_files_); + for (GcRoot<mirror::ClassLoader>& root : class_loaders_) { + ClassTable* const class_table = root.Read()->GetClassTable(); + delete class_table; + } } mirror::PointerArray* ClassLinker::AllocPointerArray(Thread* self, size_t length) { @@ -1598,7 +1606,7 @@ ClassPathEntry FindInClassPath(const char* descriptor, static bool IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa, mirror::ClassLoader* class_loader) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { return class_loader == nullptr || class_loader->GetClass() == soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_BootClassLoader); @@ -2119,7 +2127,7 @@ const void* ClassLinker::GetQuickOatCodeFor(const DexFile& dex_file, uint16_t cl // Returns true if the method must run with interpreter, false otherwise. static bool NeedsInterpreter(ArtMethod* method, const void* quick_code) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { if (quick_code == nullptr) { // No code: need interpreter. // May return true for native code, in the case of generic JNI @@ -2206,11 +2214,6 @@ void ClassLinker::LinkCode(ArtMethod* method, const OatFile::OatClass* oat_class // Install entry point from interpreter. bool enter_interpreter = NeedsInterpreter(method, method->GetEntryPointFromQuickCompiledCode()); - if (enter_interpreter && !method->IsNative()) { - method->SetEntryPointFromInterpreter(artInterpreterToInterpreterBridge); - } else { - method->SetEntryPointFromInterpreter(artInterpreterToCompiledCodeBridge); - } if (method->IsAbstract()) { method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge()); @@ -2286,23 +2289,35 @@ void ClassLinker::LoadClass(Thread* self, const DexFile& dex_file, } } -ArtField* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) { - auto* const la = Runtime::Current()->GetLinearAlloc(); - auto* ptr = reinterpret_cast<ArtField*>(la->AllocArray<ArtField>(self, length)); - CHECK(ptr!= nullptr); - std::uninitialized_fill_n(ptr, length, ArtField()); - return ptr; +LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) { + if (length == 0) { + return nullptr; + } + // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>. + static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4."); + size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length); + void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size); + auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length); + CHECK(ret != nullptr); + std::uninitialized_fill_n(&ret->At(0), length, ArtField()); + return ret; } -ArtMethod* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) { - const size_t method_size = ArtMethod::ObjectSize(image_pointer_size_); - uintptr_t ptr = reinterpret_cast<uintptr_t>( - Runtime::Current()->GetLinearAlloc()->Alloc(self, method_size * length)); - CHECK_NE(ptr, 0u); +LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) { + if (length == 0) { + return nullptr; + } + const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_); + const size_t method_size = ArtMethod::Size(image_pointer_size_); + const size_t storage_size = + LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment); + void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size); + auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length); + CHECK(ret != nullptr); for (size_t i = 0; i < length; ++i) { - new(reinterpret_cast<void*>(ptr + i * method_size)) ArtMethod; + new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod; } - return reinterpret_cast<ArtMethod*>(ptr); + return ret; } void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, @@ -2317,8 +2332,7 @@ void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, // We allow duplicate definitions of the same field in a class_data_item // but ignore the repeated indexes here, b/21868015. ClassDataItemIterator it(dex_file, class_data); - ArtField* sfields = - it.NumStaticFields() != 0 ? AllocArtFieldArray(self, it.NumStaticFields()) : nullptr; + LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, it.NumStaticFields()); size_t num_sfields = 0; uint32_t last_field_idx = 0u; for (; it.HasNextStaticField(); it.Next()) { @@ -2326,17 +2340,15 @@ void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, DCHECK_GE(field_idx, last_field_idx); // Ordering enforced by DexFileVerifier. if (num_sfields == 0 || LIKELY(field_idx > last_field_idx)) { DCHECK_LT(num_sfields, it.NumStaticFields()); - LoadField(it, klass, &sfields[num_sfields]); + LoadField(it, klass, &sfields->At(num_sfields)); ++num_sfields; last_field_idx = field_idx; } } - klass->SetSFields(sfields); - klass->SetNumStaticFields(num_sfields); + klass->SetSFieldsPtr(sfields); DCHECK_EQ(klass->NumStaticFields(), num_sfields); // Load instance fields. - ArtField* ifields = - it.NumInstanceFields() != 0 ? AllocArtFieldArray(self, it.NumInstanceFields()) : nullptr; + LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self, it.NumInstanceFields()); size_t num_ifields = 0u; last_field_idx = 0u; for (; it.HasNextInstanceField(); it.Next()) { @@ -2344,7 +2356,7 @@ void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, DCHECK_GE(field_idx, last_field_idx); // Ordering enforced by DexFileVerifier. if (num_ifields == 0 || LIKELY(field_idx > last_field_idx)) { DCHECK_LT(num_ifields, it.NumInstanceFields()); - LoadField(it, klass, &ifields[num_ifields]); + LoadField(it, klass, &ifields->At(num_ifields)); ++num_ifields; last_field_idx = field_idx; } @@ -2356,18 +2368,11 @@ void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, << ", unique instance fields: " << num_ifields << "/" << it.NumInstanceFields() << ")"; // NOTE: Not shrinking the over-allocated sfields/ifields. } - klass->SetIFields(ifields); - klass->SetNumInstanceFields(num_ifields); + klass->SetIFieldsPtr(ifields); DCHECK_EQ(klass->NumInstanceFields(), num_ifields); // Load methods. - if (it.NumDirectMethods() != 0) { - klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods())); - } - klass->SetNumDirectMethods(it.NumDirectMethods()); - if (it.NumVirtualMethods() != 0) { - klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods())); - } - klass->SetNumVirtualMethods(it.NumVirtualMethods()); + klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods())); + klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods())); size_t class_def_method_index = 0; uint32_t last_dex_method_index = DexFile::kDexNoIndex; size_t last_class_def_method_index = 0; @@ -2395,6 +2400,8 @@ void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, } DCHECK(!it.HasNext()); } + // Ensure that the card is marked so that remembered sets pick up native roots. + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(klass.Get()); self->AllowThreadSuspension(); } @@ -2476,8 +2483,8 @@ void ClassLinker::AppendToBootClassPath(const DexFile& dex_file, bool ClassLinker::IsDexFileRegisteredLocked(const DexFile& dex_file) { dex_lock_.AssertSharedHeld(Thread::Current()); - for (size_t i = 0; i != dex_caches_.size(); ++i) { - mirror::DexCache* dex_cache = GetDexCache(i); + for (GcRoot<mirror::DexCache>& root : dex_caches_) { + mirror::DexCache* dex_cache = root.Read(); if (dex_cache->GetDexFile() == &dex_file) { return true; } @@ -2775,8 +2782,7 @@ mirror::Class* ClassLinker::FindPrimitiveClass(char type) { return nullptr; } -mirror::Class* ClassLinker::InsertClass(const char* descriptor, mirror::Class* klass, - size_t hash) { +mirror::Class* ClassLinker::InsertClass(const char* descriptor, mirror::Class* klass, size_t hash) { if (VLOG_IS_ON(class_linker)) { mirror::DexCache* dex_cache = klass->GetDexCache(); std::string source; @@ -2787,11 +2793,13 @@ mirror::Class* ClassLinker::InsertClass(const char* descriptor, mirror::Class* k LOG(INFO) << "Loaded class " << descriptor << source; } WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - mirror::Class* existing = LookupClassFromTableLocked(descriptor, klass->GetClassLoader(), hash); + mirror::ClassLoader* const class_loader = klass->GetClassLoader(); + ClassTable* const class_table = InsertClassTableForClassLoader(class_loader); + mirror::Class* existing = class_table->Lookup(descriptor, hash); if (existing != nullptr) { return existing; } - if (kIsDebugBuild && !klass->IsTemp() && klass->GetClassLoader() == nullptr && + if (kIsDebugBuild && !klass->IsTemp() && class_loader == nullptr && dex_cache_image_class_lookup_required_) { // Check a class loaded with the system class loader matches one in the image if the class // is in the image. @@ -2801,118 +2809,63 @@ mirror::Class* ClassLinker::InsertClass(const char* descriptor, mirror::Class* k } } VerifyObject(klass); - class_table_.InsertWithHash(GcRoot<mirror::Class>(klass), hash); + class_table->InsertWithHash(klass, hash); + if (class_loader != nullptr) { + // This is necessary because we need to have the card dirtied for remembered sets. + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(class_loader); + } if (log_new_class_table_roots_) { new_class_roots_.push_back(GcRoot<mirror::Class>(klass)); } return nullptr; } -void ClassLinker::UpdateClassVirtualMethods(mirror::Class* klass, ArtMethod* new_methods, - size_t new_num_methods) { - // classlinker_classes_lock_ is used to guard against races between root marking and changing the - // direct and virtual method pointers. - WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - klass->SetNumVirtualMethods(new_num_methods); +void ClassLinker::UpdateClassVirtualMethods(mirror::Class* klass, + LengthPrefixedArray<ArtMethod>* new_methods) { klass->SetVirtualMethodsPtr(new_methods); - if (log_new_class_table_roots_) { - new_class_roots_.push_back(GcRoot<mirror::Class>(klass)); - } -} - -mirror::Class* ClassLinker::UpdateClass(const char* descriptor, mirror::Class* klass, - size_t hash) { - WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - auto existing_it = class_table_.FindWithHash(std::make_pair(descriptor, klass->GetClassLoader()), - hash); - CHECK(existing_it != class_table_.end()); - mirror::Class* existing = existing_it->Read(); - CHECK_NE(existing, klass) << descriptor; - CHECK(!existing->IsResolved()) << descriptor; - CHECK_EQ(klass->GetStatus(), mirror::Class::kStatusResolving) << descriptor; - - CHECK(!klass->IsTemp()) << descriptor; - if (kIsDebugBuild && klass->GetClassLoader() == nullptr && - dex_cache_image_class_lookup_required_) { - // Check a class loaded with the system class loader matches one in the image if the class - // is in the image. - existing = LookupClassFromImage(descriptor); - if (existing != nullptr) { - CHECK_EQ(klass, existing) << descriptor; - } - } - VerifyObject(klass); - - // Update the element in the hash set. - *existing_it = GcRoot<mirror::Class>(klass); - if (log_new_class_table_roots_) { - new_class_roots_.push_back(GcRoot<mirror::Class>(klass)); - } - - return existing; + // Need to mark the card so that the remembered sets and mod union tables get updated. + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(klass); } bool ClassLinker::RemoveClass(const char* descriptor, mirror::ClassLoader* class_loader) { WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - auto pair = std::make_pair(descriptor, class_loader); - auto it = class_table_.Find(pair); - if (it != class_table_.end()) { - class_table_.Erase(it); - return true; - } - it = pre_zygote_class_table_.Find(pair); - if (it != pre_zygote_class_table_.end()) { - pre_zygote_class_table_.Erase(it); - return true; - } - return false; + ClassTable* const class_table = ClassTableForClassLoader(class_loader); + return class_table != nullptr && class_table->Remove(descriptor); } mirror::Class* ClassLinker::LookupClass(Thread* self, const char* descriptor, size_t hash, mirror::ClassLoader* class_loader) { { ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); - mirror::Class* result = LookupClassFromTableLocked(descriptor, class_loader, hash); - if (result != nullptr) { - return result; + ClassTable* const class_table = ClassTableForClassLoader(class_loader); + if (class_table != nullptr) { + mirror::Class* result = class_table->Lookup(descriptor, hash); + if (result != nullptr) { + return result; + } } } if (class_loader != nullptr || !dex_cache_image_class_lookup_required_) { return nullptr; - } else { - // Lookup failed but need to search dex_caches_. - mirror::Class* result = LookupClassFromImage(descriptor); - if (result != nullptr) { - InsertClass(descriptor, result, hash); - } else { - // Searching the image dex files/caches failed, we don't want to get into this situation - // often as map searches are faster, so after kMaxFailedDexCacheLookups move all image - // classes into the class table. - constexpr uint32_t kMaxFailedDexCacheLookups = 1000; - if (++failed_dex_cache_class_lookups_ > kMaxFailedDexCacheLookups) { - MoveImageClassesToClassTable(); - } - } - return result; } -} - -mirror::Class* ClassLinker::LookupClassFromTableLocked(const char* descriptor, - mirror::ClassLoader* class_loader, - size_t hash) { - auto descriptor_pair = std::make_pair(descriptor, class_loader); - auto it = pre_zygote_class_table_.FindWithHash(descriptor_pair, hash); - if (it == pre_zygote_class_table_.end()) { - it = class_table_.FindWithHash(descriptor_pair, hash); - if (it == class_table_.end()) { - return nullptr; + // Lookup failed but need to search dex_caches_. + mirror::Class* result = LookupClassFromImage(descriptor); + if (result != nullptr) { + result = InsertClass(descriptor, result, hash); + } else { + // Searching the image dex files/caches failed, we don't want to get into this situation + // often as map searches are faster, so after kMaxFailedDexCacheLookups move all image + // classes into the class table. + constexpr uint32_t kMaxFailedDexCacheLookups = 1000; + if (++failed_dex_cache_class_lookups_ > kMaxFailedDexCacheLookups) { + MoveImageClassesToClassTable(); } } - return it->Read(); + return result; } static mirror::ObjectArray<mirror::DexCache>* GetImageDexCaches() - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { gc::space::ImageSpace* image = Runtime::Current()->GetHeap()->GetImageSpace(); CHECK(image != nullptr); mirror::Object* root = image->GetImageHeader().GetImageRoot(ImageHeader::kDexCaches); @@ -2928,6 +2881,7 @@ void ClassLinker::MoveImageClassesToClassTable() { ScopedAssertNoThreadSuspension ants(self, "Moving image classes to class table"); mirror::ObjectArray<mirror::DexCache>* dex_caches = GetImageDexCaches(); std::string temp; + ClassTable* const class_table = InsertClassTableForClassLoader(nullptr); for (int32_t i = 0; i < dex_caches->GetLength(); i++) { mirror::DexCache* dex_cache = dex_caches->Get(i); mirror::ObjectArray<mirror::Class>* types = dex_cache->GetResolvedTypes(); @@ -2937,12 +2891,12 @@ void ClassLinker::MoveImageClassesToClassTable() { DCHECK(klass->GetClassLoader() == nullptr); const char* descriptor = klass->GetDescriptor(&temp); size_t hash = ComputeModifiedUtf8Hash(descriptor); - mirror::Class* existing = LookupClassFromTableLocked(descriptor, nullptr, hash); + mirror::Class* existing = class_table->Lookup(descriptor, hash); if (existing != nullptr) { CHECK_EQ(existing, klass) << PrettyClassAndClassLoader(existing) << " != " << PrettyClassAndClassLoader(klass); } else { - class_table_.Insert(GcRoot<mirror::Class>(klass)); + class_table->Insert(klass); if (log_new_class_table_roots_) { new_class_roots_.push_back(GcRoot<mirror::Class>(klass)); } @@ -2955,9 +2909,13 @@ void ClassLinker::MoveImageClassesToClassTable() { void ClassLinker::MoveClassTableToPreZygote() { WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - DCHECK(pre_zygote_class_table_.Empty()); - pre_zygote_class_table_ = std::move(class_table_); - class_table_.Clear(); + boot_class_table_.FreezeSnapshot(); + for (GcRoot<mirror::ClassLoader>& root : class_loaders_) { + ClassTable* const class_table = root.Read()->GetClassTable(); + if (class_table != nullptr) { + class_table->FreezeSnapshot(); + } + } } mirror::Class* ClassLinker::LookupClassFromImage(const char* descriptor) { @@ -2989,31 +2947,18 @@ void ClassLinker::LookupClasses(const char* descriptor, std::vector<mirror::Clas MoveImageClassesToClassTable(); } WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - while (true) { - auto it = class_table_.Find(descriptor); - if (it == class_table_.end()) { - break; - } - result.push_back(it->Read()); - class_table_.Erase(it); - } - for (mirror::Class* k : result) { - class_table_.Insert(GcRoot<mirror::Class>(k)); + const size_t hash = ComputeModifiedUtf8Hash(descriptor); + mirror::Class* klass = boot_class_table_.Lookup(descriptor, hash); + if (klass != nullptr) { + result.push_back(klass); } - size_t pre_zygote_start = result.size(); - // Now handle the pre zygote table. - // Note: This dirties the pre-zygote table but shouldn't be an issue since LookupClasses is only - // called from the debugger. - while (true) { - auto it = pre_zygote_class_table_.Find(descriptor); - if (it == pre_zygote_class_table_.end()) { - break; + for (GcRoot<mirror::ClassLoader>& root : class_loaders_) { + // There can only be one class with the same descriptor per class loader. + ClassTable* const class_table = root.Read()->GetClassTable(); + klass = class_table->Lookup(descriptor, hash); + if (klass != nullptr) { + result.push_back(klass); } - result.push_back(it->Read()); - pre_zygote_class_table_.Erase(it); - } - for (size_t i = pre_zygote_start; i < result.size(); ++i) { - pre_zygote_class_table_.Insert(GcRoot<mirror::Class>(result[i])); } } @@ -3046,6 +2991,18 @@ void ClassLinker::VerifyClass(Thread* self, Handle<mirror::Class> klass) { mirror::Class::SetStatus(klass, mirror::Class::kStatusVerifyingAtRuntime, self); } + // Skip verification if we are forcing a soft fail. + // This has to be before the normal verification enabled check, + // since technically verification is disabled in this mode. + if (UNLIKELY(Runtime::Current()->IsVerificationSoftFail())) { + // Force verification to be a 'soft failure'. + mirror::Class::SetStatus(klass, mirror::Class::kStatusVerified, self); + // As this is a fake verified status, make sure the methods are _not_ marked preverified + // later. + klass->SetPreverified(); + return; + } + // Skip verification if disabled. if (!Runtime::Current()->IsVerificationEnabled()) { mirror::Class::SetStatus(klass, mirror::Class::kStatusVerified, self); @@ -3271,14 +3228,13 @@ void ClassLinker::ResolveMethodExceptionHandlerTypes(const DexFile& dex_file, } const uint8_t* handlers_ptr = DexFile::GetCatchHandlerData(*code_item, 0); uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr); - ClassLinker* linker = Runtime::Current()->GetClassLinker(); for (uint32_t idx = 0; idx < handlers_size; idx++) { CatchHandlerIterator iterator(handlers_ptr); for (; iterator.HasNext(); iterator.Next()) { // Ensure exception types are resolved so that they don't need resolution to be delivered, // unresolved exception types will be ignored by exception delivery if (iterator.GetHandlerTypeIndex() != DexFile::kDexNoIndex16) { - mirror::Class* exception_type = linker->ResolveType(iterator.GetHandlerTypeIndex(), method); + mirror::Class* exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method); if (exception_type == nullptr) { DCHECK(Thread::Current()->IsExceptionPending()); Thread::Current()->ClearException(); @@ -3310,7 +3266,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& klass->SetDexCache(GetClassRoot(kJavaLangReflectProxy)->GetDexCache()); mirror::Class::SetStatus(klass, mirror::Class::kStatusIdx, self); std::string descriptor(GetDescriptorForProxy(klass.Get())); - size_t hash = ComputeModifiedUtf8Hash(descriptor.c_str()); + const size_t hash = ComputeModifiedUtf8Hash(descriptor.c_str()); // Insert the class before loading the fields as the field roots // (ArtField::declaring_class_) are only visited from the class @@ -3321,25 +3277,24 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& // Instance fields are inherited, but we add a couple of static fields... const size_t num_fields = 2; - ArtField* sfields = AllocArtFieldArray(self, num_fields); - klass->SetSFields(sfields); - klass->SetNumStaticFields(num_fields); + LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, num_fields); + klass->SetSFieldsPtr(sfields); // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by // our proxy, so Class.getInterfaces doesn't return the flattened set. - ArtField* interfaces_sfield = &sfields[0]; - interfaces_sfield->SetDexFieldIndex(0); - interfaces_sfield->SetDeclaringClass(klass.Get()); - interfaces_sfield->SetAccessFlags(kAccStatic | kAccPublic | kAccFinal); + ArtField& interfaces_sfield = sfields->At(0); + interfaces_sfield.SetDexFieldIndex(0); + interfaces_sfield.SetDeclaringClass(klass.Get()); + interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal); // 2. Create a static field 'throws' that holds exceptions thrown by our methods. - ArtField* throws_sfield = &sfields[1]; - throws_sfield->SetDexFieldIndex(1); - throws_sfield->SetDeclaringClass(klass.Get()); - throws_sfield->SetAccessFlags(kAccStatic | kAccPublic | kAccFinal); + ArtField& throws_sfield = sfields->At(1); + throws_sfield.SetDexFieldIndex(1); + throws_sfield.SetDeclaringClass(klass.Get()); + throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal); // Proxies have 1 direct method, the constructor - auto* directs = AllocArtMethodArray(self, 1); + LengthPrefixedArray<ArtMethod>* directs = AllocArtMethodArray(self, 1); // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we // want to throw OOM in the future. if (UNLIKELY(directs == nullptr)) { @@ -3347,13 +3302,12 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& return nullptr; } klass->SetDirectMethodsPtr(directs); - klass->SetNumDirectMethods(1u); CreateProxyConstructor(klass, klass->GetDirectMethodUnchecked(0, image_pointer_size_)); // Create virtual method using specified prototypes. auto h_methods = hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>*>(methods)); DCHECK_EQ(h_methods->GetClass(), mirror::Method::ArrayClass()) - << PrettyClass(h_methods->GetClass()); + << PrettyClass(h_methods->GetClass()); const size_t num_virtual_methods = h_methods->GetLength(); auto* virtuals = AllocArtMethodArray(self, num_virtual_methods); // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we @@ -3363,7 +3317,6 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& return nullptr; } klass->SetVirtualMethodsPtr(virtuals); - klass->SetNumVirtualMethods(num_virtual_methods); for (size_t i = 0; i < num_virtual_methods; ++i) { auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_); auto* prototype = h_methods->Get(i)->GetArtMethod(); @@ -3395,12 +3348,12 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& CHECK_NE(klass.Get(), new_class.Get()); klass.Assign(new_class.Get()); - CHECK_EQ(interfaces_sfield->GetDeclaringClass(), klass.Get()); - interfaces_sfield->SetObject<false>(klass.Get(), - soa.Decode<mirror::ObjectArray<mirror::Class>*>(interfaces)); - CHECK_EQ(throws_sfield->GetDeclaringClass(), klass.Get()); - throws_sfield->SetObject<false>(klass.Get(), - soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class> >*>(throws)); + CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get()); + interfaces_sfield.SetObject<false>(klass.Get(), + soa.Decode<mirror::ObjectArray<mirror::Class>*>(interfaces)); + CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get()); + throws_sfield.SetObject<false>( + klass.Get(), soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class> >*>(throws)); { // Lock on klass is released. Lock new class object. @@ -3410,7 +3363,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& // sanity checks if (kIsDebugBuild) { - CHECK(klass->GetIFields() == nullptr); + CHECK(klass->GetIFieldsPtr() == nullptr); CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_)); for (size_t i = 0; i < num_virtual_methods; ++i) { @@ -3444,8 +3397,7 @@ std::string ClassLinker::GetDescriptorForProxy(mirror::Class* proxy_class) { return DotToDescriptor(name->ToModifiedUtf8().c_str()); } -ArtMethod* ClassLinker::FindMethodForProxy(mirror::Class* proxy_class, - ArtMethod* proxy_method) { +ArtMethod* ClassLinker::FindMethodForProxy(mirror::Class* proxy_class, ArtMethod* proxy_method) { DCHECK(proxy_class->IsProxyClass()); DCHECK(proxy_method->IsProxyMethod()); { @@ -3516,7 +3468,6 @@ void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prot // At runtime the method looks like a reference and argument saving method, clone the code // related parameters from this method. out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler()); - out->SetEntryPointFromInterpreter(artInterpreterToCompiledCodeBridge); } void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const { @@ -3775,7 +3726,7 @@ bool ClassLinker::InitializeClass(Thread* self, Handle<mirror::Class> klass, bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass, Thread* self, ObjectLock<mirror::Class>& lock) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { while (true) { self->AssertNoPendingException(); CHECK(!klass->IsInitialized()); @@ -3819,7 +3770,7 @@ static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> Handle<mirror::Class> super_klass, ArtMethod* method, ArtMethod* m) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(Thread::Current()->IsExceptionPending()); DCHECK(!m->IsProxyMethod()); const DexFile* dex_file = m->GetDexFile(); @@ -3843,7 +3794,7 @@ static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass, ArtMethod* method, ArtMethod* m, uint32_t index, uint32_t arg_type_idx) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(Thread::Current()->IsExceptionPending()); DCHECK(!m->IsProxyMethod()); const DexFile* dex_file = m->GetDexFile(); @@ -3863,7 +3814,7 @@ static void ThrowSignatureMismatch(Handle<mirror::Class> klass, Handle<mirror::Class> super_klass, ArtMethod* method, const std::string& error_msg) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { ThrowLinkageError(klass.Get(), "Class %s method %s resolves differently in %s %s: %s", PrettyDescriptor(klass.Get()).c_str(), @@ -3878,7 +3829,7 @@ static bool HasSameSignatureWithDifferentClassLoaders(Thread* self, Handle<mirror::Class> super_klass, ArtMethod* method1, ArtMethod* method2) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { { StackHandleScope<1> hs(self); Handle<mirror::Class> return_type(hs.NewHandle(method1->GetReturnType())); @@ -4024,35 +3975,55 @@ bool ClassLinker::EnsureInitialized(Thread* self, Handle<mirror::Class> c, bool void ClassLinker::FixupTemporaryDeclaringClass(mirror::Class* temp_class, mirror::Class* new_class) { - ArtField* fields = new_class->GetIFields(); - DCHECK_EQ(temp_class->NumInstanceFields(), new_class->NumInstanceFields()); - for (size_t i = 0, count = new_class->NumInstanceFields(); i < count; i++) { - if (fields[i].GetDeclaringClass() == temp_class) { - fields[i].SetDeclaringClass(new_class); + DCHECK_EQ(temp_class->NumInstanceFields(), 0u); + for (ArtField& field : new_class->GetIFields()) { + if (field.GetDeclaringClass() == temp_class) { + field.SetDeclaringClass(new_class); } } - fields = new_class->GetSFields(); - DCHECK_EQ(temp_class->NumStaticFields(), new_class->NumStaticFields()); - for (size_t i = 0, count = new_class->NumStaticFields(); i < count; i++) { - if (fields[i].GetDeclaringClass() == temp_class) { - fields[i].SetDeclaringClass(new_class); + DCHECK_EQ(temp_class->NumStaticFields(), 0u); + for (ArtField& field : new_class->GetSFields()) { + if (field.GetDeclaringClass() == temp_class) { + field.SetDeclaringClass(new_class); } } - DCHECK_EQ(temp_class->NumDirectMethods(), new_class->NumDirectMethods()); + DCHECK_EQ(temp_class->NumDirectMethods(), 0u); for (auto& method : new_class->GetDirectMethods(image_pointer_size_)) { if (method.GetDeclaringClass() == temp_class) { method.SetDeclaringClass(new_class); } } - DCHECK_EQ(temp_class->NumVirtualMethods(), new_class->NumVirtualMethods()); + DCHECK_EQ(temp_class->NumVirtualMethods(), 0u); for (auto& method : new_class->GetVirtualMethods(image_pointer_size_)) { if (method.GetDeclaringClass() == temp_class) { method.SetDeclaringClass(new_class); } } + + // Make sure the remembered set and mod-union tables know that we updated some of the native + // roots. + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(new_class); +} + +ClassTable* ClassLinker::InsertClassTableForClassLoader(mirror::ClassLoader* class_loader) { + if (class_loader == nullptr) { + return &boot_class_table_; + } + ClassTable* class_table = class_loader->GetClassTable(); + if (class_table == nullptr) { + class_table = new ClassTable; + class_loaders_.push_back(class_loader); + // Don't already have a class table, add it to the class loader. + class_loader->SetClassTable(class_table); + } + return class_table; +} + +ClassTable* ClassLinker::ClassTableForClassLoader(mirror::ClassLoader* class_loader) { + return class_loader == nullptr ? &boot_class_table_ : class_loader->GetClassTable(); } bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror::Class> klass, @@ -4096,6 +4067,14 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror: // Retire the temporary class and create the correctly sized resolved class. StackHandleScope<1> hs(self); auto h_new_class = hs.NewHandle(klass->CopyOf(self, class_size, imt, image_pointer_size_)); + // Set arrays to null since we don't want to have multiple classes with the same ArtField or + // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC + // may not see any references to the target space and clean the card for a class if another + // class had the same array pointer. + klass->SetDirectMethodsPtrUnchecked(nullptr); + klass->SetVirtualMethodsPtr(nullptr); + klass->SetSFieldsPtrUnchecked(nullptr); + klass->SetIFieldsPtrUnchecked(nullptr); if (UNLIKELY(h_new_class.Get() == nullptr)) { self->AssertPendingOOMException(); mirror::Class::SetStatus(klass, mirror::Class::kStatusError, self); @@ -4105,9 +4084,26 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror: CHECK_EQ(h_new_class->GetClassSize(), class_size); ObjectLock<mirror::Class> lock(self, h_new_class); FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get()); - mirror::Class* existing = UpdateClass(descriptor, h_new_class.Get(), - ComputeModifiedUtf8Hash(descriptor)); - CHECK(existing == nullptr || existing == klass.Get()); + + { + WriterMutexLock mu(self, *Locks::classlinker_classes_lock_); + mirror::ClassLoader* const class_loader = h_new_class.Get()->GetClassLoader(); + ClassTable* const table = InsertClassTableForClassLoader(class_loader); + mirror::Class* existing = table->UpdateClass(descriptor, h_new_class.Get(), + ComputeModifiedUtf8Hash(descriptor)); + CHECK_EQ(existing, klass.Get()); + if (kIsDebugBuild && class_loader == nullptr && dex_cache_image_class_lookup_required_) { + // Check a class loaded with the system class loader matches one in the image if the class + // is in the image. + mirror::Class* const image_class = LookupClassFromImage(descriptor); + if (image_class != nullptr) { + CHECK_EQ(klass.Get(), existing) << descriptor; + } + } + if (log_new_class_table_roots_) { + new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get())); + } + } // This will notify waiters on temp class that saw the not yet resolved class in the // class_table_ during EnsureResolved. @@ -4240,7 +4236,7 @@ static bool CheckSuperClassChange(Handle<mirror::Class> klass, const DexFile& dex_file, const DexFile::ClassDef& class_def, mirror::Class* super_class) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { // Check for unexpected changes in the superclass. // Quick check 1) is the super_class class-loader the boot class loader? This always has // precedence. @@ -4383,6 +4379,11 @@ bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) { klass->SetFinalizable(); } + // Inherit class loader flag form super class. + if (super->IsClassLoaderClass()) { + klass->SetClassLoaderClass(); + } + // Inherit reference flags (if any) from the superclass. int reference_flags = (super->GetAccessFlags() & kAccReferenceFlagsMask); if (reference_flags != 0) { @@ -4433,7 +4434,7 @@ bool ClassLinker::LinkMethods(Thread* self, Handle<mirror::Class> klass, class MethodNameAndSignatureComparator FINAL : public ValueObject { public: explicit MethodNameAndSignatureComparator(ArtMethod* method) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : + SHARED_REQUIRES(Locks::mutator_lock_) : dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())), name_(nullptr), name_len_(0) { DCHECK(!method->IsProxyMethod()) << PrettyMethod(method); @@ -4447,7 +4448,7 @@ class MethodNameAndSignatureComparator FINAL : public ValueObject { } bool HasSameNameAndSignature(ArtMethod* other) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(!other->IsProxyMethod()) << PrettyMethod(other); const DexFile* other_dex_file = other->GetDexFile(); const DexFile::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex()); @@ -4483,7 +4484,7 @@ class LinkVirtualHashTable { image_pointer_size_(image_pointer_size) { std::fill(hash_table_, hash_table_ + hash_size_, invalid_index_); } - void Add(uint32_t virtual_method_index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + void Add(uint32_t virtual_method_index) SHARED_REQUIRES(Locks::mutator_lock_) { ArtMethod* local_method = klass_->GetVirtualMethodDuringLinking( virtual_method_index, image_pointer_size_); const char* name = local_method->GetInterfaceMethodIfProxy(image_pointer_size_)->GetName(); @@ -4498,7 +4499,7 @@ class LinkVirtualHashTable { hash_table_[index] = virtual_method_index; } uint32_t FindAndRemove(MethodNameAndSignatureComparator* comparator) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { const char* name = comparator->GetName(); uint32_t hash = ComputeModifiedUtf8Hash(name); size_t index = hash % hash_size_; @@ -4691,7 +4692,8 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass const bool have_interfaces = interfaces.Get() != nullptr; const size_t num_interfaces = have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces(); - const size_t method_size = ArtMethod::ObjectSize(image_pointer_size_); + const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_); + const size_t method_size = ArtMethod::Size(image_pointer_size_); if (num_interfaces == 0) { if (super_ifcount == 0) { // Class implements no interfaces. @@ -4881,7 +4883,7 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass const bool super_interface = is_super && extend_super_iftable; auto method_array(hs2.NewHandle(iftable->GetMethodArray(i))); - ArtMethod* input_virtual_methods = nullptr; + LengthPrefixedArray<ArtMethod>* input_virtual_methods = nullptr; Handle<mirror::PointerArray> input_vtable_array = NullHandle<mirror::PointerArray>(); int32_t input_array_length = 0; if (super_interface) { @@ -4916,8 +4918,7 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass // matter which direction we go. We walk it backward anyway.) for (k = input_array_length - 1; k >= 0; --k) { ArtMethod* vtable_method = input_virtual_methods != nullptr ? - reinterpret_cast<ArtMethod*>( - reinterpret_cast<uintptr_t>(input_virtual_methods) + method_size * k) : + &input_virtual_methods->At(k, method_size, method_alignment) : input_vtable_array->GetElementPtrSize<ArtMethod*>(k, image_pointer_size_); ArtMethod* vtable_method_for_name_comparison = vtable_method->GetInterfaceMethodIfProxy(image_pointer_size_); @@ -4973,21 +4974,30 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass const size_t old_method_count = klass->NumVirtualMethods(); const size_t new_method_count = old_method_count + miranda_methods.size(); // Attempt to realloc to save RAM if possible. - ArtMethod* old_virtuals = klass->GetVirtualMethodsPtr(); + LengthPrefixedArray<ArtMethod>* old_virtuals = klass->GetVirtualMethodsPtr(); // The Realloced virtual methods aren't visiblef from the class roots, so there is no issue // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since // CopyFrom has internal read barriers. - auto* virtuals = reinterpret_cast<ArtMethod*>(runtime->GetLinearAlloc()->Realloc( - self, old_virtuals, old_method_count * method_size, new_method_count * method_size)); + const size_t old_size = old_virtuals != nullptr + ? LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count, + method_size, + method_alignment) + : 0u; + const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count, + method_size, + method_alignment); + auto* virtuals = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>( + runtime->GetLinearAlloc()->Realloc(self, old_virtuals, old_size, new_size)); if (UNLIKELY(virtuals == nullptr)) { self->AssertPendingOOMException(); + self->EndAssertNoThreadSuspension(old_cause); return false; } ScopedArenaUnorderedMap<ArtMethod*, ArtMethod*> move_table(allocator.Adapter()); if (virtuals != old_virtuals) { // Maps from heap allocated miranda method to linear alloc miranda method. - StrideIterator<ArtMethod> out(reinterpret_cast<uintptr_t>(virtuals), method_size); + StrideIterator<ArtMethod> out = virtuals->Begin(method_size, method_alignment); // Copy over the old methods + miranda methods. for (auto& m : klass->GetVirtualMethods(image_pointer_size_)) { move_table.emplace(&m, &*out); @@ -4997,8 +5007,7 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass ++out; } } - StrideIterator<ArtMethod> out( - reinterpret_cast<uintptr_t>(virtuals) + old_method_count * method_size, method_size); + StrideIterator<ArtMethod> out(virtuals->Begin(method_size, method_alignment) + old_method_count); // Copy over miranda methods before copying vtable since CopyOf may cause thread suspension and // we want the roots of the miranda methods to get visited. for (ArtMethod* mir_method : miranda_methods) { @@ -5007,7 +5016,8 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass move_table.emplace(mir_method, &*out); ++out; } - UpdateClassVirtualMethods(klass.Get(), virtuals, new_method_count); + virtuals->SetLength(new_method_count); + UpdateClassVirtualMethods(klass.Get(), virtuals); // Done copying methods, they are all roots in the class now, so we can end the no thread // suspension assert. self->EndAssertNoThreadSuspension(old_cause); @@ -5020,8 +5030,7 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass self->AssertPendingOOMException(); return false; } - out = StrideIterator<ArtMethod>( - reinterpret_cast<uintptr_t>(virtuals) + old_method_count * method_size, method_size); + out = virtuals->Begin(method_size, method_alignment) + old_method_count; size_t vtable_pos = old_vtable_count; for (size_t i = old_method_count; i < new_method_count; ++i) { // Leave the declaring class alone as type indices are relative to it @@ -5075,7 +5084,7 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass } // Put some random garbage in old virtuals to help find stale pointers. if (virtuals != old_virtuals) { - memset(old_virtuals, 0xFEu, ArtMethod::ObjectSize(image_pointer_size_) * old_method_count); + memset(old_virtuals, 0xFEu, old_size); } } else { self->EndAssertNoThreadSuspension(old_cause); @@ -5100,7 +5109,7 @@ bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, si } struct LinkFieldsComparator { - explicit LinkFieldsComparator() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + explicit LinkFieldsComparator() SHARED_REQUIRES(Locks::mutator_lock_) { } // No thread safety analysis as will be called from STL. Checked lock held in constructor. bool operator()(ArtField* field1, ArtField* field2) @@ -5137,7 +5146,8 @@ bool ClassLinker::LinkFields(Thread* self, Handle<mirror::Class> klass, bool is_ size_t* class_size) { self->AllowThreadSuspension(); const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields(); - ArtField* const fields = is_static ? klass->GetSFields() : klass->GetIFields(); + LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() : + klass->GetIFieldsPtr(); // Initialize field_offset MemberOffset field_offset(0); @@ -5160,7 +5170,7 @@ bool ClassLinker::LinkFields(Thread* self, Handle<mirror::Class> klass, bool is_ const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension( "Naked ArtField references in deque"); for (size_t i = 0; i < num_fields; i++) { - grouped_and_sorted_fields.push_back(&fields[i]); + grouped_and_sorted_fields.push_back(&fields->At(i)); } std::sort(grouped_and_sorted_fields.begin(), grouped_and_sorted_fields.end(), LinkFieldsComparator()); @@ -5183,7 +5193,7 @@ bool ClassLinker::LinkFields(Thread* self, Handle<mirror::Class> klass, bool is_ field_offset = MemberOffset(RoundUp(field_offset.Uint32Value(), 4)); AddFieldGap(old_offset.Uint32Value(), field_offset.Uint32Value(), &gaps); } - DCHECK(IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(field_offset.Uint32Value())); + DCHECK_ALIGNED(field_offset.Uint32Value(), sizeof(mirror::HeapReference<mirror::Object>)); grouped_and_sorted_fields.pop_front(); num_reference_fields++; field->SetOffset(field_offset); @@ -5205,7 +5215,8 @@ bool ClassLinker::LinkFields(Thread* self, Handle<mirror::Class> klass, bool is_ // We know there are no non-reference fields in the Reference classes, and we know // that 'referent' is alphabetically last, so this is easy... CHECK_EQ(num_reference_fields, num_fields) << PrettyClass(klass.Get()); - CHECK_STREQ(fields[num_fields - 1].GetName(), "referent") << PrettyClass(klass.Get()); + CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent") + << PrettyClass(klass.Get()); --num_reference_fields; } @@ -5239,15 +5250,15 @@ bool ClassLinker::LinkFields(Thread* self, Handle<mirror::Class> klass, bool is_ sizeof(mirror::HeapReference<mirror::Object>)); MemberOffset current_ref_offset = start_ref_offset; for (size_t i = 0; i < num_fields; i++) { - ArtField* field = &fields[i]; + ArtField* field = &fields->At(i); VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance") << " class=" << PrettyClass(klass.Get()) << " field=" << PrettyField(field) << " offset=" << field->GetOffsetDuringLinking(); if (i != 0) { - ArtField* const prev_field = &fields[i - 1]; + ArtField* const prev_field = &fields->At(i - 1); // NOTE: The field names can be the same. This is not possible in the Java language // but it's valid Java/dex bytecode and for example proguard can generate such bytecode. - CHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0); + DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0); } Primitive::Type type = field->GetTypeAsPrimitiveType(); bool is_primitive = type != Primitive::kPrimNot; @@ -5597,23 +5608,22 @@ const char* ClassLinker::MethodShorty(uint32_t method_idx, ArtMethod* referrer, return dex_file.GetMethodShorty(method_id, length); } -void ClassLinker::DumpAllClasses(int flags) { - if (dex_cache_image_class_lookup_required_) { - MoveImageClassesToClassTable(); - } - // TODO: at the time this was written, it wasn't safe to call PrettyField with the ClassLinker - // lock held, because it might need to resolve a field's type, which would try to take the lock. - std::vector<mirror::Class*> all_classes; - { - ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); - for (GcRoot<mirror::Class>& it : class_table_) { - all_classes.push_back(it.Read()); - } - } +class DumpClassVisitor : public ClassVisitor { + public: + explicit DumpClassVisitor(int flags) : flags_(flags) {} - for (size_t i = 0; i < all_classes.size(); ++i) { - all_classes[i]->DumpClass(std::cerr, flags); + bool Visit(mirror::Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) { + klass->DumpClass(LOG(ERROR), flags_); + return true; } + + private: + const int flags_; +}; + +void ClassLinker::DumpAllClasses(int flags) { + DumpClassVisitor visitor(flags); + VisitClasses(&visitor); } static OatFile::OatMethod CreateOatMethod(const void* code) { @@ -5647,30 +5657,48 @@ void ClassLinker::SetEntryPointsToCompiledCode(ArtMethod* method, const void* method_code) const { OatFile::OatMethod oat_method = CreateOatMethod(method_code); oat_method.LinkMethod(method); - method->SetEntryPointFromInterpreter(artInterpreterToCompiledCodeBridge); } void ClassLinker::SetEntryPointsToInterpreter(ArtMethod* method) const { if (!method->IsNative()) { - method->SetEntryPointFromInterpreter(artInterpreterToInterpreterBridge); method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge()); } else { const void* quick_method_code = GetQuickGenericJniStub(); OatFile::OatMethod oat_method = CreateOatMethod(quick_method_code); oat_method.LinkMethod(method); - method->SetEntryPointFromInterpreter(artInterpreterToCompiledCodeBridge); } } void ClassLinker::DumpForSigQuit(std::ostream& os) { - Thread* self = Thread::Current(); + ScopedObjectAccess soa(Thread::Current()); if (dex_cache_image_class_lookup_required_) { - ScopedObjectAccess soa(self); MoveImageClassesToClassTable(); } - ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); - os << "Zygote loaded classes=" << pre_zygote_class_table_.Size() << " post zygote classes=" - << class_table_.Size() << "\n"; + ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_); + os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes=" + << NumNonZygoteClasses() << "\n"; +} + +size_t ClassLinker::NumZygoteClasses() const { + size_t sum = boot_class_table_.NumZygoteClasses(); + for (const GcRoot<mirror::ClassLoader>& root : class_loaders_) { + ClassTable* const class_table = root.Read()->GetClassTable(); + if (class_table != nullptr) { + sum += class_table->NumZygoteClasses(); + } + } + return sum; +} + +size_t ClassLinker::NumNonZygoteClasses() const { + size_t sum = boot_class_table_.NumNonZygoteClasses(); + for (const GcRoot<mirror::ClassLoader>& root : class_loaders_) { + ClassTable* const class_table = root.Read()->GetClassTable(); + if (class_table != nullptr) { + sum += class_table->NumNonZygoteClasses(); + } + } + return sum; } size_t ClassLinker::NumLoadedClasses() { @@ -5679,7 +5707,7 @@ size_t ClassLinker::NumLoadedClasses() { } ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); // Only return non zygote classes since these are the ones which apps which care about. - return class_table_.Size(); + return NumNonZygoteClasses(); } pid_t ClassLinker::GetClassesLockOwner() { @@ -5750,43 +5778,6 @@ const char* ClassLinker::GetClassRootDescriptor(ClassRoot class_root) { return descriptor; } -std::size_t ClassLinker::ClassDescriptorHashEquals::operator()(const GcRoot<mirror::Class>& root) - const { - std::string temp; - return ComputeModifiedUtf8Hash(root.Read()->GetDescriptor(&temp)); -} - -bool ClassLinker::ClassDescriptorHashEquals::operator()(const GcRoot<mirror::Class>& a, - const GcRoot<mirror::Class>& b) const { - if (a.Read()->GetClassLoader() != b.Read()->GetClassLoader()) { - return false; - } - std::string temp; - return a.Read()->DescriptorEquals(b.Read()->GetDescriptor(&temp)); -} - -std::size_t ClassLinker::ClassDescriptorHashEquals::operator()( - const std::pair<const char*, mirror::ClassLoader*>& element) const { - return ComputeModifiedUtf8Hash(element.first); -} - -bool ClassLinker::ClassDescriptorHashEquals::operator()( - const GcRoot<mirror::Class>& a, const std::pair<const char*, mirror::ClassLoader*>& b) const { - if (a.Read()->GetClassLoader() != b.second) { - return false; - } - return a.Read()->DescriptorEquals(b.first); -} - -bool ClassLinker::ClassDescriptorHashEquals::operator()(const GcRoot<mirror::Class>& a, - const char* descriptor) const { - return a.Read()->DescriptorEquals(descriptor); -} - -std::size_t ClassLinker::ClassDescriptorHashEquals::operator()(const char* descriptor) const { - return ComputeModifiedUtf8Hash(descriptor); -} - bool ClassLinker::MayBeCalledWithDirectCodePointer(ArtMethod* m) { if (Runtime::Current()->UseJit()) { // JIT can have direct code pointers from any method to any other method. @@ -5910,7 +5901,10 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self, std::vector<const DexFi } ArtMethod* ClassLinker::CreateRuntimeMethod() { - ArtMethod* method = AllocArtMethodArray(Thread::Current(), 1); + const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_); + const size_t method_size = ArtMethod::Size(image_pointer_size_); + LengthPrefixedArray<ArtMethod>* method_array = AllocArtMethodArray(Thread::Current(), 1); + ArtMethod* method = &method_array->At(0, method_size, method_alignment); CHECK(method != nullptr); method->SetDexMethodIndex(DexFile::kDexNoIndex); CHECK(method->IsRuntimeMethod()); |