diff options
author | 2025-02-04 09:49:57 +0000 | |
---|---|---|
committer | 2025-02-17 02:22:37 -0800 | |
commit | 34c2dc98241193a9d17e1b0edcb82fbe62dfba43 (patch) | |
tree | 26651d29bf61589e898a3162e08d8a75d33c14ca /runtime/class_linker.cc | |
parent | fef892b06f105d9497a3a3a480f84b11f2c7f02e (diff) |
Do most of `ClassLinker::LoadClass()` in native state.
Load class data to temporary storage without holding the
mutator lock to avoid holding up any checkpoints. Then
copy the data to the actual `Class` structures.
For abstract methods, use the "quick to interpreter bridge"
directly instead of going through the boot image trampoline.
Note: The `ArtMethod::{hotness_count_,imt_index_}` was
previously documented as unused and zero-initialized for
abstract class (non-interface) methods in
https://android-review.googlesource.com/3477431 .
This unused value was actually initialized to the initial
hotness threshold but we're making it zero-initialized now.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 329196666
Change-Id: I07a040944e9137481897b5d9313d56e623a3894f
Diffstat (limited to 'runtime/class_linker.cc')
-rw-r--r-- | runtime/class_linker.cc | 491 |
1 files changed, 329 insertions, 162 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index c2c2e2c928..078a5c8383 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -3934,33 +3934,108 @@ class ClassLinker::MethodAnnotationsIterator { const dex::MethodAnnotationsItem* const end_; }; -void ClassLinker::LoadField(const ClassAccessor::Field& field, - Handle<mirror::Class> klass, - ArtField* dst) { - const uint32_t field_idx = field.GetIndex(); - dst->SetDexFieldIndex(field_idx); - dst->SetDeclaringClass(klass.Get()); +class ClassLinker::LoadClassHelper { + public: + LoadClassHelper( + Runtime* runtime, const DexFile& dex_file, bool is_interface) + : runtime_(runtime), + dex_file_(dex_file), + hotness_count_(runtime->GetJITOptions()->GetWarmupThreshold()), + is_aot_compiler_(runtime->IsAotCompiler()), + is_interface_(is_interface), + stack_(runtime->GetArenaPool()), + allocator_(&stack_), + num_direct_methods_(0u), + has_finalizer_(false) {} + + // Note: This function can take a long time and therefore it should not be called while holding + // the mutator lock. Otherwise we can experience an occasional suspend request timeout. + void Load(const ClassAccessor& accessor, + const dex::ClassDef& dex_class_def, + const OatFile::OatClass& oat_class); + + void Commit(Handle<mirror::Class> klass, + PointerSize pointer_size, + LengthPrefixedArray<ArtField>* fields, + LengthPrefixedArray<ArtMethod>* methods) + REQUIRES_SHARED(Locks::mutator_lock_); + + uint32_t NumFields() const { + return dchecked_integral_cast<uint32_t>(fields_.size()); + } + + uint32_t NumMethods() const { + return dchecked_integral_cast<uint32_t>(methods_.size()); + } + + private: + struct ArtFieldData { + uint32_t access_flags; + uint32_t dex_field_index; + }; + + struct ArtMethodData { + uint32_t access_flags; + uint32_t dex_method_index; + uint16_t method_index; + uint16_t imt_index_or_hotness_count; + const void* data; + const void* entrypoint; + }; + + ALWAYS_INLINE + static void LoadField(const ClassAccessor::Field& field, /*out*/ ArtFieldData* dst); + + ALWAYS_INLINE + void LoadMethod(const ClassAccessor::Method& method, + /*inout*/ MethodAnnotationsIterator* mai, + /*out*/ ArtMethodData* dst); + + ALWAYS_INLINE + void LinkCode(ArtMethodData* method, + uint32_t class_def_method_index, + /*inout*/ OatClassCodeIterator* occi); + + ALWAYS_INLINE + void FillFields(ObjPtr<mirror::Class> klass, /*out*/ LengthPrefixedArray<ArtField>* fields) + REQUIRES_SHARED(Locks::mutator_lock_); + + template <PointerSize kPointerSize> + ALWAYS_INLINE + void FillMethods(ObjPtr<mirror::Class> klass, /*out*/ LengthPrefixedArray<ArtMethod>* methods) + REQUIRES_SHARED(Locks::mutator_lock_); + + Runtime* const runtime_; + const DexFile& dex_file_; + const uint16_t hotness_count_; + const bool is_aot_compiler_; + const bool is_interface_; + ArenaStack stack_; + ScopedArenaAllocator allocator_; + + ArrayRef<ArtFieldData> fields_; + ArrayRef<ArtMethodData> methods_; + uint32_t num_direct_methods_; + bool has_finalizer_; +}; + +inline void ClassLinker::LoadClassHelper::LoadField(const ClassAccessor::Field& field, + /*out*/ ArtFieldData* dst) { + dst->dex_field_index = field.GetIndex(); // Get access flags from the DexFile and set hiddenapi runtime access flags. - dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field)); + dst->access_flags = field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field); } -void ClassLinker::LoadMethod(const DexFile& dex_file, - const ClassAccessor::Method& method, - ObjPtr<mirror::Class> klass, - /*inout*/ MethodAnnotationsIterator* mai, - /*out*/ ArtMethod* dst) { - ScopedAssertNoThreadSuspension sants(__FUNCTION__); - - const uint32_t dex_method_idx = method.GetIndex(); - const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx); +inline void ClassLinker::LoadClassHelper::LoadMethod(const ClassAccessor::Method& method, + /*inout*/ MethodAnnotationsIterator* mai, + /*out*/ ArtMethodData* dst) { + const uint32_t dex_method_index = method.GetIndex(); + const dex::MethodId& method_id = dex_file_.GetMethodId(dex_method_index); uint32_t name_utf16_length; - const char* method_name = dex_file.GetStringDataAndUtf16Length(method_id.name_idx_, - &name_utf16_length); - std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_)); - - dst->SetDexMethodIndex(dex_method_idx); - dst->SetDeclaringClass(klass); + const char* method_name = dex_file_.GetStringDataAndUtf16Length(method_id.name_idx_, + &name_utf16_length); + std::string_view shorty = dex_file_.GetShortyView(dex_file_.GetProtoId(method_id.proto_idx_)); // Get access flags from the DexFile and set hiddenapi runtime access flags. uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method); @@ -3977,7 +4052,7 @@ void ClassLinker::LoadMethod(const DexFile& dex_file, // When initializing without a boot image, `Object` and `Enum` shall have the finalizable // flag cleared immediately after loading these classes, see `InitWithoutImage()`. if (shorty == "V") { - klass->SetFinalizable(); + has_finalizer_ = true; } } else if (method_name[0] == '<') { // Fix broken access flags for initializers. Bug 11157540. @@ -3986,50 +4061,49 @@ void ClassLinker::LoadMethod(const DexFile& dex_file, has_ascii_name("<clinit>", sizeof("<clinit>") - 1u)) << method_name; if (UNLIKELY((access_flags & kAccConstructor) == 0)) { LOG(WARNING) << method_name << " didn't have expected constructor access flag in class " - << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation(); + << PrettyDescriptor(dex_file_.GetMethodDeclaringClassDescriptor(dex_method_index)) + << " in dex file " << dex_file_.GetLocation(); access_flags |= kAccConstructor; } } access_flags |= GetNterpFastPathFlags(shorty, access_flags, kRuntimeQuickCodeISA); + uint16_t imt_index_or_hotness_count = hotness_count_; + const void* data = nullptr; if (UNLIKELY((access_flags & kAccNative) != 0u)) { // Check if the native method is annotated with @FastNative or @CriticalNative. - const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_idx); + const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_index); if (method_annotations != nullptr) { access_flags |= - annotations::GetNativeMethodAnnotationAccessFlags(dex_file, *method_annotations); + annotations::GetNativeMethodAnnotationAccessFlags(dex_file_, *method_annotations); } - dst->SetAccessFlags(access_flags); - DCHECK(!dst->IsAbstract()); - DCHECK(!dst->HasCodeItem()); + DCHECK(!ArtMethod::IsAbstract(access_flags)); + DCHECK(!ArtMethod::NeedsCodeItem(access_flags)); DCHECK_EQ(method.GetCodeItemOffset(), 0u); - dst->SetDataPtrSize(nullptr, image_pointer_size_); // JNI stub/trampoline not linked yet. + DCHECK(data == nullptr); // JNI stub/trampoline not linked yet. } else if ((access_flags & kAccAbstract) != 0u) { - dst->SetAccessFlags(access_flags); - // Must be done after SetAccessFlags since IsAbstract depends on it. - DCHECK(dst->IsAbstract()); - if (klass->IsInterface()) { - dst->CalculateAndSetImtIndex(); - } - DCHECK(!dst->HasCodeItem()); + DCHECK(ArtMethod::IsAbstract(access_flags)); + imt_index_or_hotness_count = is_interface_ + ? ImTable::GetImtIndexForAbstractMethod(dex_file_, dex_method_index) + : /* unused */ 0u; + DCHECK(!ArtMethod::NeedsCodeItem(access_flags)); DCHECK_EQ(method.GetCodeItemOffset(), 0u); - dst->SetDataPtrSize(nullptr, image_pointer_size_); // Single implementation not set yet. + DCHECK(data == nullptr); // Single implementation not set yet. } else { - const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_idx); + const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_index); if (method_annotations != nullptr && - annotations::MethodIsNeverCompile(dex_file, *method_annotations)) { + annotations::MethodIsNeverCompile(dex_file_, *method_annotations)) { access_flags |= kAccCompileDontBother; } - dst->SetAccessFlags(access_flags); - DCHECK(!dst->IsAbstract()); - DCHECK(dst->HasCodeItem()); + DCHECK(!ArtMethod::IsAbstract(access_flags)); + DCHECK(ArtMethod::NeedsCodeItem(access_flags)); uint32_t code_item_offset = method.GetCodeItemOffset(); DCHECK_NE(code_item_offset, 0u); - if (Runtime::Current()->IsAotCompiler()) { - dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_); + if (is_aot_compiler_) { + data = reinterpret_cast32<void*>(code_item_offset); } else { - dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset)); + data = dex_file_.GetCodeItem(code_item_offset); } } @@ -4038,46 +4112,223 @@ void ClassLinker::LoadMethod(const DexFile& dex_file, !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) { DCHECK(!ArtMethod::IsAbstract(access_flags)); DCHECK(!ArtMethod::IsIntrinsic(access_flags)); - dst->SetMemorySharedMethod(); - dst->SetHotCounter(); + access_flags = ArtMethod::SetMemorySharedMethod(access_flags); + imt_index_or_hotness_count = 0u; // Mark the method as hot. } + + dst->access_flags = access_flags; + dst->dex_method_index = dex_method_index; + dst->method_index = 0u; + dst->imt_index_or_hotness_count = imt_index_or_hotness_count; + dst->data = data; + dst->entrypoint = nullptr; } -inline void ClassLinker::LinkCode(ArtMethod* method, - uint32_t class_def_method_index, - /*inout*/ OatClassCodeIterator* occi) { - ScopedAssertNoThreadSuspension sants(__FUNCTION__); - Runtime* const runtime = Runtime::Current(); - if (runtime->IsAotCompiler()) { +inline void ClassLinker::LoadClassHelper::LinkCode(ArtMethodData* method, + uint32_t class_def_method_index, + /*inout*/ OatClassCodeIterator* occi) { + if (is_aot_compiler_) { // The following code only applies to a non-compiler runtime. return; } // Method shouldn't have already been linked. - DCHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), nullptr); - DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx. + DCHECK_EQ(method->entrypoint, nullptr); - if (!method->IsInvokable()) { - EnsureThrowsInvocationError(this, method); + if (!ArtMethod::IsInvokable(method->access_flags)) { + method->entrypoint = GetQuickToInterpreterBridge(); occi->SkipAbstract(class_def_method_index); return; } const void* quick_code = occi->GetAndAdvance(class_def_method_index); - if (method->IsNative() && quick_code == nullptr) { - const void* boot_jni_stub = FindBootJniStub(method); + if (ArtMethod::IsNative(method->access_flags) && quick_code == nullptr) { + std::string_view shorty = dex_file_.GetMethodShortyView(method->dex_method_index); + const void* boot_jni_stub = + runtime_->GetClassLinker()->FindBootJniStub(method->access_flags, shorty); if (boot_jni_stub != nullptr) { // Use boot JNI stub if found. quick_code = boot_jni_stub; } } - runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code); + method->entrypoint = quick_code; - if (method->IsNative()) { + if (ArtMethod::IsNative(method->access_flags)) { // Set up the dlsym lookup stub. Do not go through `UnregisterNative()` // as the extra processing for @CriticalNative is not needed yet. - method->SetEntryPointFromJni( - method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub()); + method->data = ArtMethod::IsCriticalNative(method->access_flags) + ? GetJniDlsymLookupCriticalStub() + : GetJniDlsymLookupStub(); + } +} + +void ClassLinker::LoadClassHelper::Load(const ClassAccessor& accessor, + const dex::ClassDef& dex_class_def, + const OatFile::OatClass& oat_class) { + DCHECK(fields_.empty()); + DCHECK(methods_.empty()); + DCHECK_EQ(num_direct_methods_, 0u); + DCHECK(!has_finalizer_); + + size_t num_fields = accessor.NumFields(); + size_t num_methods = accessor.NumMethods(); + ArrayRef<ArtFieldData> fields(allocator_.AllocArray<ArtFieldData>(num_fields), num_fields); + ArrayRef<ArtMethodData> methods(allocator_.AllocArray<ArtMethodData>(num_methods), num_methods); + + size_t num_loaded_fields = 0u; + size_t num_sfields = 0u; + size_t num_ifields = 0u; + uint32_t last_static_field_idx = 0u; + uint32_t last_instance_field_idx = 0u; + + OatClassCodeIterator occi(oat_class); + size_t class_def_method_index = 0; + uint32_t last_dex_method_index = dex::kDexNoIndex; + size_t last_class_def_method_index = 0; + + // Initialize separate `MethodAnnotationsIterator`s for direct and virtual methods. + MethodAnnotationsIterator mai_direct(dex_file_, dex_file_.GetAnnotationsDirectory(dex_class_def)); + MethodAnnotationsIterator mai_virtual = mai_direct; + + // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the + // methods needs to decode all of the fields. + accessor.VisitFieldsAndMethods([&]( + // We allow duplicate definitions of the same field in a class_data_item + // but ignore the repeated indexes here, b/21868015. + const ClassAccessor::Field& field) { + uint32_t field_idx = field.GetIndex(); + DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier. + if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) { + LoadField(field, &fields[num_loaded_fields]); + ++num_loaded_fields; + ++num_sfields; + last_static_field_idx = field_idx; + } + }, [&](const ClassAccessor::Field& field) { + uint32_t field_idx = field.GetIndex(); + DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier. + if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) { + LoadField(field, &fields[num_loaded_fields]); + ++num_loaded_fields; + ++num_ifields; + last_instance_field_idx = field_idx; + } + }, [&](const ClassAccessor::Method& method) { + ArtMethodData* method_data = &methods[class_def_method_index]; + LoadMethod(method, &mai_direct, method_data); + LinkCode(method_data, class_def_method_index, &occi); + uint32_t it_method_index = method.GetIndex(); + if (last_dex_method_index == it_method_index) { + // duplicate case + method_data->method_index = last_class_def_method_index; + } else { + method_data->method_index = class_def_method_index; + last_dex_method_index = it_method_index; + last_class_def_method_index = class_def_method_index; + } + ++class_def_method_index; + }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) { + ArtMethodData* method_data = &methods[class_def_method_index]; + LoadMethod(method, &mai_virtual, method_data); + LinkCode(method_data, class_def_method_index, &occi); + DCHECK_EQ(method_data->method_index, 0u); // Shall be updated in `LinkMethods()`. + ++class_def_method_index; + }); + + if (UNLIKELY(num_loaded_fields != num_fields)) { + LOG(WARNING) << "Duplicate fields in class " + << PrettyDescriptor(dex_file_.GetFieldDeclaringClassDescriptor(fields[0].dex_field_index)) + << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields() + << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields() + << ")"; + DCHECK_LT(num_loaded_fields, num_fields); + fields = fields.SubArray(/*pos=*/ 0u, num_loaded_fields); + } + + // Sort the fields by dex field index to facilitate fast lookups. + std::sort(fields.begin(), + fields.end(), + [](ArtFieldData& lhs, ArtFieldData& rhs) { + return lhs.dex_field_index < rhs.dex_field_index; + }); + + fields_ = fields; + methods_ = methods; + num_direct_methods_ = accessor.NumDirectMethods(); +} + +void ClassLinker::LoadClassHelper::FillFields(ObjPtr<mirror::Class> klass, + LengthPrefixedArray<ArtField>* fields) { + DCHECK_IMPLIES(!fields_.empty(), fields != nullptr); + DCHECK_EQ(fields_.size(), (fields != nullptr) ? fields->size() : 0u); + for (size_t i = 0, size = fields_.size(); i != size; ++i) { + const ArtFieldData& src = fields_[i]; + ArtField* dst = &fields->At(i); + dst->SetDeclaringClass(klass); + dst->SetAccessFlags(src.access_flags); + dst->SetDexFieldIndex(src.dex_field_index); + // The `ArtField::offset_` shall be set in `LinkFields()`. + } +} + +template <PointerSize kPointerSize> +void ClassLinker::LoadClassHelper::FillMethods(ObjPtr<mirror::Class> klass, + LengthPrefixedArray<ArtMethod>* methods) { + DCHECK_IMPLIES(!methods_.empty(), methods != nullptr); + DCHECK_EQ(methods_.size(), (methods != nullptr) ? methods->size() : 0u); + static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize); + static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize); + instrumentation::Instrumentation* instr = + is_aot_compiler_ ? nullptr : runtime_->GetInstrumentation(); + for (size_t i = 0, size = methods_.size(); i != size; ++i) { + const ArtMethodData& src = methods_[i]; + ArtMethod* dst = &methods->At(i, kMethodSize, kMethodAlignment); + dst->SetDeclaringClass(klass); + uint32_t access_flags = src.access_flags; + dst->SetAccessFlags(access_flags); + dst->SetDexMethodIndex(src.dex_method_index); + dst->SetMethodIndex(src.method_index); + // Note: We set the appropriate field of the union (`imt_index_` or `hotness_count_`) + // as required by the C++ standard but we expect the C++ compiler to optimize away + // the condition and just copy the `imt_index_or_hotness_count` directly. + if (ArtMethod::IsInvokable(access_flags)) { + dst->SetHotnessCount(src.imt_index_or_hotness_count); + } else { + // For abstract non-interface methods, the value shall not be used. + DCHECK_IMPLIES(!is_interface_, src.imt_index_or_hotness_count == 0u); + dst->SetImtIndex(src.imt_index_or_hotness_count); + } + DCHECK_IMPLIES(dst->IsMemorySharedMethod(), !dst->IsAbstract()); + DCHECK_IMPLIES(dst->IsMemorySharedMethod(), dst->CounterIsHot()); + DCHECK_IMPLIES(!dst->IsAbstract() && !dst->IsMemorySharedMethod(), + dst->GetCounter() == hotness_count_); + dst->SetDataPtrSize(src.data, kPointerSize); + if (instr != nullptr) { + DCHECK_IMPLIES(dst->IsNative(), dst->GetEntryPointFromJniPtrSize(kPointerSize) == src.data); + if (ArtMethod::IsInvokable(access_flags)) { + instr->InitializeMethodsCode(dst, src.entrypoint); + } else { + DCHECK_EQ(src.entrypoint, GetQuickToInterpreterBridge()); + dst->SetEntryPointFromQuickCompiledCodePtrSize(src.entrypoint, kPointerSize); + } + } + } +} + +void ClassLinker::LoadClassHelper::Commit(Handle<mirror::Class> klass, + PointerSize pointer_size, + LengthPrefixedArray<ArtField>* fields, + LengthPrefixedArray<ArtMethod>* methods) { + FillFields(klass.Get(), fields); + if (pointer_size == PointerSize::k64) { + FillMethods<PointerSize::k64>(klass.Get(), methods); + } else { + FillMethods<PointerSize::k32>(klass.Get(), methods); + } + klass->SetFieldsPtr(fields); + klass->SetMethodsPtr(methods, num_direct_methods_, methods_.size() - num_direct_methods_); + if (has_finalizer_) { + klass->SetFinalizable(); } } @@ -4094,110 +4345,26 @@ void ClassLinker::LoadClass(Thread* self, } Runtime* const runtime = Runtime::Current(); { - // Note: We cannot have thread suspension until the field and method arrays are setup or else - // Class::VisitFieldRoots may miss some fields or methods. - ScopedAssertNoThreadSuspension nts(__FUNCTION__); - // Load static fields. - // We allow duplicate definitions of the same field in a class_data_item - // but ignore the repeated indexes here, b/21868015. - LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); - LengthPrefixedArray<ArtField>* fields = - AllocArtFieldArray(self, - allocator, - accessor.NumStaticFields() + accessor.NumInstanceFields()); - size_t num_fields = 0u; - size_t num_sfields = 0u; - size_t num_ifields = 0u; - uint32_t last_static_field_idx = 0u; - uint32_t last_instance_field_idx = 0u; - - // Methods bool has_oat_class = false; const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler()) ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class) : OatFile::OatClass::Invalid(); - OatClassCodeIterator occi(oat_class); - klass->SetMethodsPtr( - AllocArtMethodArray(self, allocator, accessor.NumMethods()), - accessor.NumDirectMethods(), - accessor.NumVirtualMethods()); - size_t class_def_method_index = 0; - uint32_t last_dex_method_index = dex::kDexNoIndex; - size_t last_class_def_method_index = 0; - - // Initialize separate `MethodAnnotationsIterator`s for direct and virtual methods. - MethodAnnotationsIterator mai_direct(dex_file, dex_file.GetAnnotationsDirectory(dex_class_def)); - MethodAnnotationsIterator mai_virtual = mai_direct; - - uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold(); - // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the - // methods needs to decode all of the fields. - accessor.VisitFieldsAndMethods([&]( - const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) { - uint32_t field_idx = field.GetIndex(); - DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier. - if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) { - LoadField(field, klass, &fields->At(num_fields)); - ++num_fields; - ++num_sfields; - last_static_field_idx = field_idx; - } - }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) { - uint32_t field_idx = field.GetIndex(); - DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier. - if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) { - LoadField(field, klass, &fields->At(num_fields)); - ++num_fields; - ++num_ifields; - last_instance_field_idx = field_idx; - } - }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) { - ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index, - image_pointer_size_); - LoadMethod(dex_file, method, klass.Get(), &mai_direct, art_method); - LinkCode(art_method, class_def_method_index, &occi); - uint32_t it_method_index = method.GetIndex(); - if (last_dex_method_index == it_method_index) { - // duplicate case - art_method->SetMethodIndex(last_class_def_method_index); - } else { - art_method->SetMethodIndex(class_def_method_index); - last_dex_method_index = it_method_index; - last_class_def_method_index = class_def_method_index; - } - art_method->ResetCounter(hotness_threshold); - ++class_def_method_index; - }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) { - ArtMethod* art_method = klass->GetVirtualMethodUnchecked( - class_def_method_index - accessor.NumDirectMethods(), - image_pointer_size_); - art_method->ResetCounter(hotness_threshold); - LoadMethod(dex_file, method, klass.Get(), &mai_virtual, art_method); - LinkCode(art_method, class_def_method_index, &occi); - ++class_def_method_index; - }); - - if (UNLIKELY(num_fields != accessor.NumFields())) { - LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor() - << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields() - << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields() - << ")"; - // NOTE: Not shrinking the over-allocated fields, just setting size. - if (fields != nullptr) { - fields->SetSize(num_fields); - } - } - if (fields != nullptr) { - // Sort the fields by dex field index to facilitate fast lookups. - std::sort(fields->begin(), - fields->end(), - [](ArtField& lhs, ArtField& rhs) { - return lhs.GetDexFieldIndex() < rhs.GetDexFieldIndex(); - }); + LoadClassHelper helper(runtime, dex_file, klass->IsInterface()); + { + ScopedThreadSuspension sts(self, ThreadState::kNative); + helper.Load(accessor, dex_class_def, oat_class); } - // Set the field array. - klass->SetFieldsPtr(fields); + // Note: We cannot have thread suspension until the field and method arrays are setup or else + // Class::VisitFieldRoots may miss some fields or methods. + ScopedAssertNoThreadSuspension nts(__FUNCTION__); + + LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); + LengthPrefixedArray<ArtField>* fields = + AllocArtFieldArray(self, allocator, helper.NumFields()); + LengthPrefixedArray<ArtMethod>* methods = + AllocArtMethodArray(self, allocator, helper.NumMethods()); + helper.Commit(klass, image_pointer_size_, fields, methods); } // Ensure that the card is marked so that remembered sets pick up native roots. WriteBarrier::ForEveryFieldWrite(klass.Get()); |