diff options
| -rw-r--r-- | dex2oat/dex2oat_test.cc | 3 | ||||
| -rw-r--r-- | dex2oat/linker/image_writer.cc | 206 | ||||
| -rw-r--r-- | dex2oat/linker/image_writer.h | 35 | ||||
| -rw-r--r-- | runtime/class_linker.cc | 121 | ||||
| -rw-r--r-- | runtime/class_linker.h | 4 | ||||
| -rw-r--r-- | runtime/common_runtime_test.h | 6 | ||||
| -rw-r--r-- | runtime/image.h | 32 |
7 files changed, 233 insertions, 174 deletions
diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc index fbad1af55a..898940a948 100644 --- a/dex2oat/dex2oat_test.cc +++ b/dex2oat/dex2oat_test.cc @@ -2074,9 +2074,6 @@ TEST_F(Dex2oatTest, AppImageNoProfile) { } TEST_F(Dex2oatTest, AppImageResolveStrings) { - if (!ClassLinker::kAppImageMayContainStrings) { - TEST_DISABLED(); - } using Hotness = ProfileCompilationInfo::MethodHotness; // Create a profile with the startup method marked. ScratchFile profile_file; diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc index ddc9f433cf..60a4a32e7e 100644 --- a/dex2oat/linker/image_writer.cc +++ b/dex2oat/linker/image_writer.cc @@ -224,7 +224,7 @@ bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) { // Used to store information that will later be used to calculate image // offsets to string references in the AppImage. - std::vector<RefInfoPair> string_ref_info; + std::vector<HeapReferencePointerInfo> string_ref_info; if (ClassLinker::kAppImageMayContainStrings && compile_app_image_) { // Count the number of string fields so we can allocate the appropriate // amount of space in the image section. @@ -273,36 +273,31 @@ bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) { TimingLogger::ScopedTiming t("AppImage:CalculateImageOffsets", timings); ScopedObjectAccess soa(self); - size_t managed_string_refs = 0, - native_string_refs = 0; + size_t managed_string_refs = 0; + size_t native_string_refs = 0; /* * Iterate over the string reference info and calculate image offsets. - * The first element of the pair is the object the reference belongs to - * and the second element is the offset to the field. If the offset has - * a native ref tag 1) the object is a DexCache and 2) the offset needs - * to be calculated using the relocation information for the DexCache's - * strings array. + * The first element of the pair is either the object the reference belongs + * to or the beginning of the native reference array it is located in. In + * the first case the second element is the offset of the field relative to + * the object's base address. In the second case, it is the index of the + * StringDexCacheType object in the array. */ - for (const RefInfoPair& ref_info : string_ref_info) { - uint32_t image_offset; + for (const HeapReferencePointerInfo& ref_info : string_ref_info) { + uint32_t base_offset; - if (HasNativeRefTag(ref_info.second)) { + if (HasDexCacheNativeRefTag(ref_info.first)) { ++native_string_refs; - - // Only DexCaches can contain native references to Java strings. - ObjPtr<mirror::DexCache> dex_cache(ref_info.first->AsDexCache()); - - // No need to set or clear native ref tags. The existing tag will be - // carried forward. - image_offset = native_object_relocations_[dex_cache->GetStrings()].offset + - ref_info.second; + auto* obj_ptr = reinterpret_cast<mirror::Object*>(ClearDexCacheNativeRefTag( + ref_info.first)); + base_offset = SetDexCacheNativeRefTag(GetImageOffset(obj_ptr)); } else { ++managed_string_refs; - image_offset = GetImageOffset(ref_info.first) + ref_info.second; + base_offset = GetImageOffset(reinterpret_cast<mirror::Object*>(ref_info.first)); } - string_reference_offsets_.push_back(image_offset); + string_reference_offsets_.emplace_back(base_offset, ref_info.second); } CHECK_EQ(image_infos_.back().num_string_references_, @@ -316,18 +311,16 @@ bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) { // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and // bin size sums being calculated. TimingLogger::ScopedTiming t("AllocMemory", timings); - if (!AllocMemory()) { - return false; - } - - return true; + return AllocMemory(); } class ImageWriter::CollectStringReferenceVisitor { public: explicit CollectStringReferenceVisitor(const ImageWriter& image_writer) - : dex_cache_string_ref_counter_(0), - image_writer_(image_writer) {} + : image_writer_(image_writer), + curr_obj_(nullptr), + string_ref_info_(0), + dex_cache_string_ref_counter_(0) {} // Used to prevent repeated null checks in the code that calls the visitor. ALWAYS_INLINE @@ -353,7 +346,7 @@ class ImageWriter::CollectStringReferenceVisitor { } } - // Collects info for Java fields that reference Java Strings. + // Collects info for managed fields that reference managed Strings. ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj, MemberOffset member_offset, @@ -364,7 +357,8 @@ class ImageWriter::CollectStringReferenceVisitor { member_offset); if (image_writer_.IsValidAppImageStringReference(referred_obj)) { - string_ref_info_.emplace_back(obj.Ptr(), member_offset.Uint32Value()); + string_ref_info_.emplace_back(reinterpret_cast<uintptr_t>(obj.Ptr()), + member_offset.Uint32Value()); } } @@ -375,84 +369,104 @@ class ImageWriter::CollectStringReferenceVisitor { operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } + void AddStringRefInfo(uint32_t first, uint32_t second) { + string_ref_info_.emplace_back(first, second); + } + + std::vector<HeapReferencePointerInfo>&& MoveRefInfo() { + return std::move(string_ref_info_); + } + // Used by the wrapper function to obtain a native reference count. - size_t GetDexCacheStringRefCounter() const { + size_t GetDexCacheStringRefCount() const { return dex_cache_string_ref_counter_; } - // Resets the native reference count. - void ResetDexCacheStringRefCounter() { + void SetObject(ObjPtr<mirror::Object> obj) { + curr_obj_ = obj; dex_cache_string_ref_counter_ = 0; } - ObjPtr<mirror::Object> curr_obj_; - mutable std::vector<RefInfoPair> string_ref_info_; - private: - mutable size_t dex_cache_string_ref_counter_; const ImageWriter& image_writer_; + ObjPtr<mirror::Object> curr_obj_; + mutable std::vector<HeapReferencePointerInfo> string_ref_info_; + mutable size_t dex_cache_string_ref_counter_; }; -std::vector<ImageWriter::RefInfoPair> ImageWriter::CollectStringReferenceInfo() const +std::vector<ImageWriter::HeapReferencePointerInfo> ImageWriter::CollectStringReferenceInfo() const REQUIRES_SHARED(Locks::mutator_lock_) { gc::Heap* const heap = Runtime::Current()->GetHeap(); CollectStringReferenceVisitor visitor(*this); + /* + * References to managed strings can occur either in the managed heap or in + * native memory regions. Information about managed references is collected + * by the CollectStringReferenceVisitor and directly added to the internal + * info vector. + * + * Native references to managed strings can only occur through DexCache + * objects. This is verified by VerifyNativeGCRootInvariants(). Due to the + * fact that these native references are encapsulated in std::atomic objects + * the VisitReferences() function can't pass the visiting object the address + * of the reference. Instead, the VisitReferences() function loads the + * reference into a temporary variable and passes that address to the + * visitor. As a consequence of this we can't uniquely identify the location + * of the string reference in the visitor. + * + * Due to these limitations, the visitor will only count the number of + * managed strings reachable through the native references of a DexCache + * object. If there are any such strings, this function will then iterate + * over the native references, test the string for membership in the + * AppImage, and add the tagged DexCache pointer and string array offset to + * the info vector if necessary. + */ heap->VisitObjects([this, &visitor](ObjPtr<mirror::Object> object) REQUIRES_SHARED(Locks::mutator_lock_) { if (!IsInBootImage(object.Ptr())) { - // Many native GC roots are wrapped in std::atomics. Due to the - // semantics of atomic objects we can't actually visit the addresses of - // the native GC roots. Instead the visiting functions will pass the - // visitor the address of a temporary copy of the native GC root and, if - // it is changed, copy it back into the original location. - // - // This analysis requires the actual address of the native GC root so - // we will only count them in the visitor and then collect them manually - // afterwards. This count will then be used to verify that we collected - // all native GC roots. - visitor.curr_obj_ = object; - if (object->IsDexCache()) { - object->VisitReferences</* kVisitNativeRoots */ true, - kVerifyNone, - kWithoutReadBarrier>(visitor, visitor); - - ObjPtr<mirror::DexCache> dex_cache = object->AsDexCache(); - size_t new_native_ref_counter = 0; - - for (size_t string_index = 0; string_index < dex_cache->NumStrings(); ++string_index) { - mirror::StringDexCachePair dc_pair = dex_cache->GetStrings()[string_index].load(); - mirror::Object* referred_obj = dc_pair.object.AddressWithoutBarrier()->AsMirrorPtr(); + visitor.SetObject(object); - if (IsValidAppImageStringReference(referred_obj)) { - ++new_native_ref_counter; - - uint32_t string_vector_offset = - (string_index * sizeof(mirror::StringDexCachePair)) + - offsetof(mirror::StringDexCachePair, object); - - visitor.string_ref_info_.emplace_back(object.Ptr(), - SetNativeRefTag(string_vector_offset)); + if (object->IsDexCache()) { + object->VisitReferences</* kVisitNativeRoots= */ true, + kVerifyNone, + kWithoutReadBarrier>(visitor, visitor); + + if (visitor.GetDexCacheStringRefCount() > 0) { + size_t string_info_collected = 0; + + ObjPtr<mirror::DexCache> dex_cache = object->AsDexCache(); + DCHECK_LE(visitor.GetDexCacheStringRefCount(), dex_cache->NumStrings()); + + for (uint32_t index = 0; index < dex_cache->NumStrings(); ++index) { + // GetResolvedString() can't be used here due to the circular + // nature of the cache and the collision detection this requires. + ObjPtr<mirror::String> referred_string = + dex_cache->GetStrings()[index].load().object.Read(); + + if (IsValidAppImageStringReference(referred_string)) { + ++string_info_collected; + visitor.AddStringRefInfo( + SetDexCacheNativeRefTag(reinterpret_cast<uintptr_t>(object.Ptr())), index); + } } - } - CHECK_EQ(visitor.GetDexCacheStringRefCounter(), new_native_ref_counter); + DCHECK_EQ(string_info_collected, visitor.GetDexCacheStringRefCount()); + } } else { - object->VisitReferences</* kVisitNativeRoots */ false, - kVerifyNone, - kWithoutReadBarrier>(visitor, visitor); + object->VisitReferences</* kVisitNativeRoots= */ false, + kVerifyNone, + kWithoutReadBarrier>(visitor, visitor); } - - visitor.ResetDexCacheStringRefCounter(); } }); - return std::move(visitor.string_ref_info_); + return visitor.MoveRefInfo(); } class ImageWriter::NativeGCRootInvariantVisitor { public: explicit NativeGCRootInvariantVisitor(const ImageWriter& image_writer) : + curr_obj_(nullptr), class_violation_(false), class_loader_violation_(false), image_writer_(image_writer) {} ALWAYS_INLINE @@ -469,12 +483,12 @@ class ImageWriter::NativeGCRootInvariantVisitor { ObjPtr<mirror::Object> referred_obj = root->AsMirrorPtr(); if (curr_obj_->IsClass()) { - class_violation = class_violation || - image_writer_.IsValidAppImageStringReference(referred_obj); + class_violation_ = class_violation_ || + image_writer_.IsValidAppImageStringReference(referred_obj); } else if (curr_obj_->IsClassLoader()) { - class_loader_violation = class_loader_violation || - image_writer_.IsValidAppImageStringReference(referred_obj); + class_loader_violation_ = class_loader_violation_ || + image_writer_.IsValidAppImageStringReference(referred_obj); } else if (!curr_obj_->IsDexCache()) { LOG(FATAL) << "Dex2Oat:AppImage | " << @@ -495,12 +509,12 @@ class ImageWriter::NativeGCRootInvariantVisitor { // Returns true iff the only reachable native string references are through DexCache objects. bool InvariantsHold() const { - return !(class_violation || class_loader_violation); + return !(class_violation_ || class_loader_violation_); } ObjPtr<mirror::Object> curr_obj_; - mutable bool class_violation = false, - class_loader_violation = false; + mutable bool class_violation_; + mutable bool class_loader_violation_; private: const ImageWriter& image_writer_; @@ -516,9 +530,9 @@ void ImageWriter::VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mu visitor.curr_obj_ = object; if (!IsInBootImage(object.Ptr())) { - object->VisitReferences</* kVisitNativeRefernces */ true, - kVerifyNone, - kWithoutReadBarrier>(visitor, visitor); + object->VisitReferences</* kVisitNativeReferences= */ true, + kVerifyNone, + kWithoutReadBarrier>(visitor, visitor); } }); @@ -529,12 +543,12 @@ void ImageWriter::VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mu * Build the error string */ - if (UNLIKELY(visitor.class_violation)) { + if (UNLIKELY(visitor.class_violation_)) { error_str << "Class"; error = true; } - if (UNLIKELY(visitor.class_loader_violation)) { + if (UNLIKELY(visitor.class_loader_violation_)) { if (error) { error_str << ", "; } @@ -543,8 +557,8 @@ void ImageWriter::VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mu } CHECK(visitor.InvariantsHold()) << - "Native GC root invariant failure. String refs reachable through the following objects: " << - error_str.str(); + "Native GC root invariant failure. String ref invariants don't hold for the following " << + "object types: " << error_str.str(); } void ImageWriter::CopyMetadata() { @@ -554,7 +568,7 @@ void ImageWriter::CopyMetadata() { const ImageInfo& image_info = image_infos_.back(); std::vector<ImageSection> image_sections = image_info.CreateImageSections().second; - uint32_t* sfo_section_base = reinterpret_cast<uint32_t*>( + auto* sfo_section_base = reinterpret_cast<AppImageReferenceOffsetInfo*>( image_info.image_.Begin() + image_sections[ImageHeader::kSectionStringReferenceOffsets].Offset()); @@ -2315,9 +2329,15 @@ std::pair<size_t, std::vector<ImageSection>> ImageWriter::ImageInfo::CreateImage // Round up to the alignment of the offsets we are going to store. cur_pos = RoundUp(class_table_section.End(), sizeof(uint32_t)); + // The size of string_reference_offsets_ can't be used here because it hasn't + // been filled with AppImageReferenceOffsetInfo objects yet. The + // num_string_references_ value is calculated separately, before we can + // compute the actual offsets. const ImageSection& string_reference_offsets = sections[ImageHeader::kSectionStringReferenceOffsets] = - ImageSection(cur_pos, sizeof(uint32_t) * num_string_references_); + ImageSection(cur_pos, + sizeof(typename decltype(string_reference_offsets_)::value_type) * + num_string_references_); // Return the number of bytes described by these sections, and the sections // themselves. diff --git a/dex2oat/linker/image_writer.h b/dex2oat/linker/image_writer.h index 7bdaebef2a..9bde027449 100644 --- a/dex2oat/linker/image_writer.h +++ b/dex2oat/linker/image_writer.h @@ -581,19 +581,27 @@ class ImageWriter final { REQUIRES_SHARED(Locks::mutator_lock_); /* - * A pair containing the information necessary to calculate the position of a - * managed object's field or native reference inside an AppImage. + * This type holds the information necessary for calculating + * AppImageReferenceOffsetInfo values after the object relocations have been + * computed. * - * The first element of this pair is a raw mirror::Object pointer because its - * usage will cross a suspend point and ObjPtr would produce a false positive. + * The first element will always be a pointer to a managed object. If the + * pointer has been tagged (testable with HasDexCacheNativeRefTag) it + * indicates that the referenced object is a DexCache object that requires + * special handling during loading and the second element has no meaningful + * value. If the pointer isn't tagged then the second element is an + * object-relative offset to a field containing a string reference. * - * The second element is an offset either into the object or into the string - * array of a DexCache object. + * Note that it is possible for an untagged DexCache pointer to occur in the + * first position if it has a managed reference that needs to be updated. * * TODO (chriswailes): Add a note indicating the source line where we ensure * that no moving garbage collection will occur. + * + * TODO (chriswailes): Replace with std::variant once ART is building with + * C++17 */ - typedef std::pair<mirror::Object*, uint32_t> RefInfoPair; + typedef std::pair<uintptr_t, uint32_t> HeapReferencePointerInfo; /* * Collects the info necessary for calculating image offsets to string field @@ -613,23 +621,18 @@ class ImageWriter final { * references that will be included in the AppImage. This allows use to both * allocate enough memory for soring the offsets and correctly calculate the * offsets of various objects into the image. Once the image offset - * calculations are done for Java objects the reference object/offset pairs + * calculations are done for managed objects the reference object/offset pairs * are translated to image offsets. The CopyMetadata function then copies * these offsets into the image. - * - * A vector containing pairs of object pointers and offsets. The offsets are - * tagged to indicate if the offset is for a field of a mirror object or a - * native reference. If the offset is tagged as a native reference it must - * have come from a DexCache's string array. */ - std::vector<RefInfoPair> CollectStringReferenceInfo() const + std::vector<HeapReferencePointerInfo> CollectStringReferenceInfo() const REQUIRES_SHARED(Locks::mutator_lock_); /* * Ensures that assumptions about native GC roots and AppImages hold. * * This function verifies the following condition(s): - * - Native references to Java strings are only reachable through DexCache + * - Native references to managed strings are only reachable through DexCache * objects */ void VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mutator_lock_); @@ -772,7 +775,7 @@ class ImageWriter final { mirror::ObjectArray<mirror::Object>* boot_image_live_objects_; // Offsets into the image that indicate where string references are recorded. - std::vector<uint32_t> string_reference_offsets_; + std::vector<AppImageReferenceOffsetInfo> string_reference_offsets_; // Which mode the image is stored as, see image.h const ImageHeader::StorageMode image_storage_mode_; diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index c18abab8cb..ae812b80cf 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1178,28 +1178,33 @@ class VerifyDeclaringClassVisitor : public ArtMethodVisitor { /* * A class used to ensure that all strings in an AppImage have been properly - * interned. + * interned, and is only ever run in debug mode. */ class VerifyStringInterningVisitor { public: explicit VerifyStringInterningVisitor(const gc::space::ImageSpace& space) : - uninterned_string_found_(false), space_(space), intern_table_(*Runtime::Current()->GetInternTable()) {} - ALWAYS_INLINE void TestObject(ObjPtr<mirror::Object> referred_obj) const REQUIRES_SHARED(Locks::mutator_lock_) { if (referred_obj != nullptr && space_.HasAddress(referred_obj.Ptr()) && referred_obj->IsString()) { ObjPtr<mirror::String> referred_str = referred_obj->AsString(); - uninterned_string_found_ = uninterned_string_found_ || - (intern_table_.LookupStrong(Thread::Current(), referred_str) != referred_str); + + if (kIsDebugBuild) { + // Saved to temporary variables to aid in debugging. + ObjPtr<mirror::String> strong_lookup_result = + intern_table_.LookupStrong(Thread::Current(), referred_str); + ObjPtr<mirror::String> weak_lookup_result = + intern_table_.LookupWeak(Thread::Current(), referred_str); + + DCHECK((strong_lookup_result == referred_str) || (weak_lookup_result == referred_str)); + } } } - ALWAYS_INLINE void VisitRootIfNonNull( mirror::CompressedReference<mirror::Object>* root) const REQUIRES_SHARED(Locks::mutator_lock_) { @@ -1208,14 +1213,12 @@ class VerifyStringInterningVisitor { } } - ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const REQUIRES_SHARED(Locks::mutator_lock_) { TestObject(root->AsMirrorPtr()); } // Visit Class Fields - ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const @@ -1237,7 +1240,6 @@ class VerifyStringInterningVisitor { operator()(ref, mirror::Reference::ReferentOffset(), false); } - mutable bool uninterned_string_found_; const gc::space::ImageSpace& space_; InternTable& intern_table_; }; @@ -1247,13 +1249,14 @@ class VerifyStringInterningVisitor { * properly interned. To be considered properly interned a reference must * point to the same version of the string that the intern table does. */ -bool VerifyStringInterning(gc::space::ImageSpace& space) REQUIRES_SHARED(Locks::mutator_lock_) { +void VerifyStringInterning(gc::space::ImageSpace& space) REQUIRES_SHARED(Locks::mutator_lock_) { const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap(); const ImageHeader& image_header = space.GetImageHeader(); const uint8_t* target_base = space.GetMemMap()->Begin(); const ImageSection& objects_section = image_header.GetObjectsSection(); - uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset()); - uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End()); + + auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset()); + auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End()); VerifyStringInterningVisitor visitor(space); bitmap->VisitMarkedRange(objects_begin, @@ -1262,21 +1265,19 @@ bool VerifyStringInterning(gc::space::ImageSpace& space) REQUIRES_SHARED(Locks:: REQUIRES_SHARED(Locks::mutator_lock_) { if (space.HasAddress(obj)) { if (obj->IsDexCache()) { - obj->VisitReferences</*kVisitNativeRoots=*/ true, - kVerifyNone, - kWithoutReadBarrier>(visitor, visitor); + obj->VisitReferences</* kVisitNativeRoots= */ true, + kVerifyNone, + kWithoutReadBarrier>(visitor, visitor); } else { // Don't visit native roots for non-dex-cache as they can't contain // native references to strings. This is verified during compilation // by ImageWriter::VerifyNativeGCRootInvariants. - obj->VisitReferences</*kVisitNativeRoots=*/ false, - kVerifyNone, - kWithoutReadBarrier>(visitor, visitor); + obj->VisitReferences</* kVisitNativeRoots= */ false, + kVerifyNone, + kWithoutReadBarrier>(visitor, visitor); } } }); - - return !visitor.uninterned_string_found_; } // new_class_set is the set of classes that were read from the class table section in the image. @@ -1293,7 +1294,7 @@ class AppImageLoadingHelper { REQUIRES(!Locks::dex_lock_) REQUIRES_SHARED(Locks::mutator_lock_); - static void AddImageInternTable(gc::space::ImageSpace* space) + static void HandleAppImageStrings(gc::space::ImageSpace* space) REQUIRES_SHARED(Locks::mutator_lock_); static void UpdateInternStrings( @@ -1377,8 +1378,11 @@ void AppImageLoadingHelper::Update( } if (ClassLinker::kAppImageMayContainStrings) { - AddImageInternTable(space); - DCHECK(VerifyStringInterning(*space)); + HandleAppImageStrings(space); + + if (kIsDebugBuild) { + VerifyStringInterning(*space); + } } if (kVerifyArtMethodDeclaringClasses) { @@ -1393,51 +1397,76 @@ void AppImageLoadingHelper::UpdateInternStrings( gc::space::ImageSpace* space, const SafeMap<mirror::String*, mirror::String*>& intern_remap) { const uint8_t* target_base = space->Begin(); - const ImageSection& sro_section = space->GetImageHeader().GetImageStringReferenceOffsetsSection(); - const size_t num_string_offsets = sro_section.Size() / sizeof(uint32_t); + const ImageSection& sro_section = + space->GetImageHeader().GetImageStringReferenceOffsetsSection(); + const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo); VLOG(image) << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = " << num_string_offsets; - const uint32_t* sro_base = - reinterpret_cast<const uint32_t*>(target_base + sro_section.Offset()); + const auto* sro_base = + reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset()); for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) { - if (HasNativeRefTag(sro_base[offset_index])) { - void* raw_field_addr = space->Begin() + ClearNativeRefTag(sro_base[offset_index]); - mirror::CompressedReference<mirror::Object>* objref_addr = - reinterpret_cast<mirror::CompressedReference<mirror::Object>*>(raw_field_addr); - mirror::String* referred_string = objref_addr->AsMirrorPtr()->AsString(); + uint32_t base_offset = sro_base[offset_index].first; + + if (HasDexCacheNativeRefTag(base_offset)) { + base_offset = ClearDexCacheNativeRefTag(base_offset); + DCHECK_ALIGNED(base_offset, 2); + + ObjPtr<mirror::DexCache> dex_cache = + reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset); + uint32_t string_index = sro_base[offset_index].second; + + mirror::StringDexCachePair source = dex_cache->GetStrings()[string_index].load(); + ObjPtr<mirror::String> referred_string = source.object.Read(); DCHECK(referred_string != nullptr); - auto it = intern_remap.find(referred_string); + auto it = intern_remap.find(referred_string.Ptr()); if (it != intern_remap.end()) { - objref_addr->Assign(it->second); + // This doesn't use SetResolvedString to maintain consistency with how + // we load the string. The index from the source string must be + // re-used due to the circular nature of the cache. Because we are not + // using a helper function we need to mark the GC card manually. + WriteBarrier::ForEveryFieldWrite(dex_cache); + dex_cache->GetStrings()[string_index].store( + mirror::StringDexCachePair(it->second, source.index)); } + } else { - void* raw_field_addr = space->Begin() + sro_base[offset_index]; - mirror::HeapReference<mirror::Object>* objref_addr = - reinterpret_cast<mirror::HeapReference<mirror::Object>*>(raw_field_addr); - mirror::String* referred_string = objref_addr->AsMirrorPtr()->AsString(); - DCHECK(referred_string != nullptr); + uint32_t raw_member_offset = sro_base[offset_index].second; + DCHECK_ALIGNED(base_offset, 2); + DCHECK_ALIGNED(raw_member_offset, 2); + + ObjPtr<mirror::Object> obj_ptr = + reinterpret_cast<mirror::Object*>(space->Begin() + base_offset); + MemberOffset member_offset(raw_member_offset); + ObjPtr<mirror::String> referred_string = + obj_ptr->GetFieldObject<mirror::String, + kVerifyNone, + kWithoutReadBarrier, + /* kIsVolatile= */ false>(member_offset); + DCHECK(referred_string != nullptr); - auto it = intern_remap.find(referred_string); + auto it = intern_remap.find(referred_string.Ptr()); if (it != intern_remap.end()) { - objref_addr->Assign<false>(it->second); + obj_ptr->SetFieldObject</* kTransactionActive= */ false, + /* kCheckTransaction= */ false, + kVerifyNone, + /* kIsVolatile= */ false>(member_offset, it->second); } } } } -void AppImageLoadingHelper::AddImageInternTable(gc::space::ImageSpace* space) { +void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) { // Iterate over the string reference offsets stored in the image and intern // the strings they point to. ScopedTrace timing("AppImage:InternString"); Thread* const self = Thread::Current(); - Runtime* const runtime = Runtime::Current(); - InternTable* const intern_table = runtime->GetInternTable(); + InternTable* const intern_table = Runtime::Current()->GetInternTable(); // Add the intern table, removing any conflicts. For conflicts, store the new address in a map // for faster lookup. @@ -1445,7 +1474,7 @@ void AppImageLoadingHelper::AddImageInternTable(gc::space::ImageSpace* space) { SafeMap<mirror::String*, mirror::String*> intern_remap; intern_table->AddImageStringsToTable(space, [&](InternTable::UnorderedSet& interns) REQUIRES_SHARED(Locks::mutator_lock_) { - VLOG(image) << "AppImage:StringsInInternTable = " << interns.size(); + VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size(); for (auto it = interns.begin(); it != interns.end(); ) { ObjPtr<mirror::String> string = it->Read(); ObjPtr<mirror::String> existing = intern_table->LookupWeak(self, string); @@ -1461,7 +1490,7 @@ void AppImageLoadingHelper::AddImageInternTable(gc::space::ImageSpace* space) { } }); - VLOG(image) << "AppImage:ConflictingInternStrings = " << intern_remap.size(); + VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size(); // For debug builds, always run the code below to get coverage. if (kIsDebugBuild || !intern_remap.empty()) { diff --git a/runtime/class_linker.h b/runtime/class_linker.h index cc17d57228..2c7566304c 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -111,9 +111,7 @@ class AllocatorVisitor { class ClassLinker { public: - // Disabled until AppImageLoadingHelper::UpdateInternStrings does the missing GC card marks. - // Bug: 117846779 - static constexpr bool kAppImageMayContainStrings = false; + static constexpr bool kAppImageMayContainStrings = true; explicit ClassLinker(InternTable* intern_table); virtual ~ClassLinker(); diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h index 774f19e7cd..c48ab3629c 100644 --- a/runtime/common_runtime_test.h +++ b/runtime/common_runtime_test.h @@ -191,12 +191,6 @@ class CheckJniAbortCatcher { DISALLOW_COPY_AND_ASSIGN(CheckJniAbortCatcher); }; -#define TEST_DISABLED() \ - if ((true)) { \ - printf("WARNING: TEST DISABLED\n"); \ - return; \ - } - #define TEST_DISABLED_FOR_ARM() \ if (kRuntimeISA == InstructionSet::kArm || kRuntimeISA == InstructionSet::kThumb2) { \ printf("WARNING: TEST DISABLED FOR ARM\n"); \ diff --git a/runtime/image.h b/runtime/image.h index bd903daa6e..0dec5f71ab 100644 --- a/runtime/image.h +++ b/runtime/image.h @@ -442,11 +442,27 @@ class PACKED(4) ImageHeader { }; /* - * Tags the last bit. Used by AppImage logic to differentiate between managed - * and native references. + * This type holds the information necessary to fix up AppImage string + * references. + * + * The first element of the pair is an offset into the image space. If the + * offset is tagged (testable using HasDexCacheNativeRefTag) it indicates the location + * of a DexCache object that has one or more native references to managed + * strings that need to be fixed up. In this case the second element has no + * meaningful value. + * + * If the first element isn't tagged then it indicates the location of a + * managed object with a field that needs fixing up. In this case the second + * element of the pair is an object-relative offset to the field in question. + */ +typedef std::pair<uint32_t, uint32_t> AppImageReferenceOffsetInfo; + +/* + * Tags the last bit. Used by AppImage logic to differentiate between pointers + * to managed objects and pointers to native reference arrays. */ template<typename T> -T SetNativeRefTag(T val) { +T SetDexCacheNativeRefTag(T val) { static_assert(std::is_integral<T>::value, "Expected integral type."); return val | 1u; @@ -454,10 +470,11 @@ T SetNativeRefTag(T val) { /* * Retrieves the value of the last bit. Used by AppImage logic to - * differentiate between managed and native references. + * differentiate between pointers to managed objects and pointers to native + * reference arrays. */ template<typename T> -bool HasNativeRefTag(T val) { +bool HasDexCacheNativeRefTag(T val) { static_assert(std::is_integral<T>::value, "Expected integral type."); return (val & 1u) == 1u; @@ -465,10 +482,11 @@ bool HasNativeRefTag(T val) { /* * Sets the last bit of the value to 0. Used by AppImage logic to - * differentiate between managed and native references. + * differentiate between pointers to managed objects and pointers to native + * reference arrays. */ template<typename T> -T ClearNativeRefTag(T val) { +T ClearDexCacheNativeRefTag(T val) { static_assert(std::is_integral<T>::value, "Expected integral type."); return val & ~1u; |