diff options
Diffstat (limited to 'compiler/image_writer.cc')
| -rw-r--r-- | compiler/image_writer.cc | 384 |
1 files changed, 251 insertions, 133 deletions
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index 117d1131b5..d129249d63 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -238,10 +238,11 @@ bool ImageWriter::Write(int image_fd, case ImageHeader::kStorageModeLZ4: { const size_t compressed_max_size = LZ4_compressBound(image_data_size); compressed_data.reset(new char[compressed_max_size]); - data_size = LZ4_compress( + data_size = LZ4_compress_default( reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader), &compressed_data[0], - image_data_size); + image_data_size, + compressed_max_size); break; } @@ -713,7 +714,8 @@ void ImageWriter::ComputeLazyFieldsForImageClasses() { class_linker->VisitClassesWithoutClassesLock(&visitor); } -static bool IsBootClassLoaderClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_) { +static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass) + REQUIRES_SHARED(Locks::mutator_lock_) { return klass->GetClassLoader() == nullptr; } @@ -721,33 +723,33 @@ bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) { return IsBootClassLoaderClass(klass) && !IsInBootImage(klass); } -bool ImageWriter::PruneAppImageClass(mirror::Class* klass) { +bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) { bool early_exit = false; std::unordered_set<mirror::Class*> visited; return PruneAppImageClassInternal(klass, &early_exit, &visited); } bool ImageWriter::PruneAppImageClassInternal( - mirror::Class* klass, + ObjPtr<mirror::Class> klass, bool* early_exit, std::unordered_set<mirror::Class*>* visited) { DCHECK(early_exit != nullptr); DCHECK(visited != nullptr); DCHECK(compile_app_image_); - if (klass == nullptr || IsInBootImage(klass)) { + if (klass == nullptr || IsInBootImage(klass.Ptr())) { return false; } - auto found = prune_class_memo_.find(klass); + auto found = prune_class_memo_.find(klass.Ptr()); if (found != prune_class_memo_.end()) { // Already computed, return the found value. return found->second; } // Circular dependencies, return false but do not store the result in the memoization table. - if (visited->find(klass) != visited->end()) { + if (visited->find(klass.Ptr()) != visited->end()) { *early_exit = true; return false; } - visited->emplace(klass); + visited->emplace(klass.Ptr()); bool result = IsBootClassLoaderClass(klass); std::string temp; // Prune if not an image class, this handles any broken sets of image classes such as having a @@ -811,20 +813,20 @@ bool ImageWriter::PruneAppImageClassInternal( dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end(); } // Erase the element we stored earlier since we are exiting the function. - auto it = visited->find(klass); + auto it = visited->find(klass.Ptr()); DCHECK(it != visited->end()); visited->erase(it); // Only store result if it is true or none of the calls early exited due to circular // dependencies. If visited is empty then we are the root caller, in this case the cycle was in // a child call and we can remember the result. if (result == true || !my_early_exit || visited->empty()) { - prune_class_memo_[klass] = result; + prune_class_memo_[klass.Ptr()] = result; } *early_exit |= my_early_exit; return result; } -bool ImageWriter::KeepClass(Class* klass) { +bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) { if (klass == nullptr) { return false; } @@ -895,15 +897,27 @@ class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor { Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader); class_table->Visit(classes_visitor); removed_class_count_ += classes_visitor.Prune(); + + // Record app image class loader. The fake boot class loader should not get registered + // and we should end up with only one class loader for an app and none for boot image. + if (class_loader != nullptr && class_table != nullptr) { + DCHECK(class_loader_ == nullptr); + class_loader_ = class_loader; + } } size_t GetRemovedClassCount() const { return removed_class_count_; } + ObjPtr<mirror::ClassLoader> GetClassLoader() const REQUIRES_SHARED(Locks::mutator_lock_) { + return class_loader_; + } + private: ImageWriter* const image_writer_; size_t removed_class_count_; + ObjPtr<mirror::ClassLoader> class_loader_; }; void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) { @@ -912,71 +926,149 @@ void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) { Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor); } +void ImageWriter::PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache, + ObjPtr<mirror::ClassLoader> class_loader) { + // To ensure deterministic contents of the hash-based arrays, each slot shall contain + // the candidate with the lowest index. As we're processing entries in increasing index + // order, this means trying to look up the entry for the current index if the slot is + // empty or if it contains a higher index. + + Runtime* runtime = Runtime::Current(); + ClassLinker* class_linker = runtime->GetClassLinker(); + ArtMethod* resolution_method = runtime->GetResolutionMethod(); + const DexFile& dex_file = *dex_cache->GetDexFile(); + // Prune methods. + ArtMethod** resolved_methods = dex_cache->GetResolvedMethods(); + for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) { + ArtMethod* method = + mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_); + DCHECK(method != nullptr) << "Expected resolution method instead of null method"; + mirror::Class* declaring_class = method->GetDeclaringClass(); + // Copied methods may be held live by a class which was not an image class but have a + // declaring class which is an image class. Set it to the resolution method to be safe and + // prevent dangling pointers. + if (method->IsCopied() || !KeepClass(declaring_class)) { + mirror::DexCache::SetElementPtrSize(resolved_methods, + i, + resolution_method, + target_ptr_size_); + } else if (kIsDebugBuild) { + // Check that the class is still in the classes table. + ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); + CHECK(class_linker->ClassInClassTable(declaring_class)) << "Class " + << Class::PrettyClass(declaring_class) << " not in class linker table"; + } + } + // Prune fields and make the contents of the field array deterministic. + mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields(); + dex::TypeIndex last_class_idx; // Initialized to invalid index. + ObjPtr<mirror::Class> last_class = nullptr; + for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) { + uint32_t slot_idx = dex_cache->FieldSlotIndex(i); + auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_); + uint32_t stored_index = pair.index; + ArtField* field = pair.object; + if (field != nullptr && i > stored_index) { + continue; // Already checked. + } + // Check if the referenced class is in the image. Note that we want to check the referenced + // class rather than the declaring class to preserve the semantics, i.e. using a FieldId + // results in resolving the referenced class and that can for example throw OOME. + const DexFile::FieldId& field_id = dex_file.GetFieldId(i); + if (field_id.class_idx_ != last_class_idx) { + last_class_idx = field_id.class_idx_; + last_class = class_linker->LookupResolvedType( + dex_file, last_class_idx, dex_cache, class_loader); + if (last_class != nullptr && !KeepClass(last_class)) { + last_class = nullptr; + } + } + if (field == nullptr || i < stored_index) { + if (last_class != nullptr) { + const char* name = dex_file.StringDataByIdx(field_id.name_idx_); + const char* type = dex_file.StringByTypeIdx(field_id.type_idx_); + field = mirror::Class::FindField(Thread::Current(), last_class, name, type); + if (field != nullptr) { + // If the referenced class is in the image, the defining class must also be there. + DCHECK(KeepClass(field->GetDeclaringClass())); + dex_cache->SetResolvedField(i, field, target_ptr_size_); + } + } + } else { + DCHECK_EQ(i, stored_index); + if (last_class == nullptr) { + dex_cache->ClearResolvedField(stored_index, target_ptr_size_); + } + } + } + // Prune types and make the contents of the type array deterministic. + // This is done after fields and methods as their lookup can touch the types array. + for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) { + dex::TypeIndex type_idx(i); + uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx); + mirror::TypeDexCachePair pair = + dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed); + uint32_t stored_index = pair.index; + ObjPtr<mirror::Class> klass = pair.object.Read(); + if (klass == nullptr || i < stored_index) { + klass = class_linker->LookupResolvedType(dex_file, type_idx, dex_cache, class_loader); + if (klass != nullptr) { + DCHECK_EQ(dex_cache->GetResolvedType(type_idx), klass); + stored_index = i; // For correct clearing below if not keeping the `klass`. + } + } else if (i == stored_index && !KeepClass(klass)) { + dex_cache->ClearResolvedType(dex::TypeIndex(stored_index)); + } + } + // Strings do not need pruning, but the contents of the string array must be deterministic. + for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) { + dex::StringIndex string_idx(i); + uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx); + mirror::StringDexCachePair pair = + dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed); + uint32_t stored_index = pair.index; + ObjPtr<mirror::String> string = pair.object.Read(); + if (string == nullptr || i < stored_index) { + string = class_linker->LookupString(dex_file, string_idx, dex_cache); + DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string); + } + } +} + void ImageWriter::PruneNonImageClasses() { Runtime* runtime = Runtime::Current(); ClassLinker* class_linker = runtime->GetClassLinker(); Thread* self = Thread::Current(); + ScopedAssertNoThreadSuspension sa(__FUNCTION__); // Clear class table strong roots so that dex caches can get pruned. We require pruning the class // path dex caches. class_linker->ClearClassTableStrongRoots(); // Remove the undesired classes from the class roots. + ObjPtr<mirror::ClassLoader> class_loader; { PruneClassLoaderClassesVisitor class_loader_visitor(this); VisitClassLoaders(&class_loader_visitor); VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes"; + class_loader = class_loader_visitor.GetClassLoader(); + DCHECK_EQ(class_loader != nullptr, compile_app_image_); } // Clear references to removed classes from the DexCaches. - ArtMethod* resolution_method = runtime->GetResolutionMethod(); - - ScopedAssertNoThreadSuspension sa(__FUNCTION__); - ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable - ReaderMutexLock mu2(self, *Locks::dex_lock_); - for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { - if (self->IsJWeakCleared(data.weak_root)) { - continue; - } - ObjPtr<mirror::DexCache> dex_cache = self->DecodeJObject(data.weak_root)->AsDexCache(); - for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) { - mirror::TypeDexCachePair pair = - dex_cache->GetResolvedTypes()[i].load(std::memory_order_relaxed); - mirror::Class* klass = pair.object.Read(); - if (klass != nullptr && !KeepClass(klass)) { - dex_cache->ClearResolvedType(dex::TypeIndex(pair.index)); - } - } - ArtMethod** resolved_methods = dex_cache->GetResolvedMethods(); - for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) { - ArtMethod* method = - mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_); - DCHECK(method != nullptr) << "Expected resolution method instead of null method"; - mirror::Class* declaring_class = method->GetDeclaringClass(); - // Copied methods may be held live by a class which was not an image class but have a - // declaring class which is an image class. Set it to the resolution method to be safe and - // prevent dangling pointers. - if (method->IsCopied() || !KeepClass(declaring_class)) { - mirror::DexCache::SetElementPtrSize(resolved_methods, - i, - resolution_method, - target_ptr_size_); - } else { - // Check that the class is still in the classes table. - DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class " - << Class::PrettyClass(declaring_class) << " not in class linker table"; - } - } - ArtField** resolved_fields = dex_cache->GetResolvedFields(); - for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) { - ArtField* field = mirror::DexCache::GetElementPtrSize(resolved_fields, i, target_ptr_size_); - if (field != nullptr && !KeepClass(field->GetDeclaringClass().Ptr())) { - dex_cache->SetResolvedField(i, nullptr, target_ptr_size_); + std::vector<ObjPtr<mirror::DexCache>> dex_caches; + { + ReaderMutexLock mu2(self, *Locks::dex_lock_); + dex_caches.reserve(class_linker->GetDexCachesData().size()); + for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { + if (self->IsJWeakCleared(data.weak_root)) { + continue; } + dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache()); } - // Clean the dex field. It might have been populated during the initialization phase, but - // contains data only valid during a real run. - dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr); + } + for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) { + PruneAndPreloadDexCache(dex_cache, class_loader); } // Drop the array class cache in the ClassLinker, as these are roots holding those classes live. @@ -1246,21 +1338,20 @@ mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack, // live. if (as_klass->ShouldHaveImt()) { ImTable* imt = as_klass->GetImt(target_ptr_size_); - for (size_t i = 0; i < ImTable::kSize; ++i) { - ArtMethod* imt_method = imt->Get(i, target_ptr_size_); - DCHECK(imt_method != nullptr); - if (imt_method->IsRuntimeMethod() && - !IsInBootImage(imt_method) && - !NativeRelocationAssigned(imt_method)) { - AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index); + if (TryAssignImTableOffset(imt, oat_index)) { + // Since imt's can be shared only do this the first time to not double count imt method + // fixups. + for (size_t i = 0; i < ImTable::kSize; ++i) { + ArtMethod* imt_method = imt->Get(i, target_ptr_size_); + DCHECK(imt_method != nullptr); + if (imt_method->IsRuntimeMethod() && + !IsInBootImage(imt_method) && + !NativeRelocationAssigned(imt_method)) { + AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index); + } } } } - - if (as_klass->ShouldHaveImt()) { - ImTable* imt = as_klass->GetImt(target_ptr_size_); - TryAssignImTableOffset(imt, oat_index); - } } else if (obj->IsClassLoader()) { // Register the class loader if it has a class table. // The fake boot class loader should not get registered and we should end up with only one @@ -1294,10 +1385,10 @@ bool ImageWriter::NativeRelocationAssigned(void* ptr) const { return native_object_relocations_.find(ptr) != native_object_relocations_.end(); } -void ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { +bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { // No offset, or already assigned. if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) { - return; + return false; } // If the method is a conflict method we also want to assign the conflict table offset. ImageInfo& image_info = GetImageInfo(oat_index); @@ -1309,6 +1400,7 @@ void ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { image_info.bin_slot_sizes_[kBinImTable], kNativeObjectRelocationTypeIMTable}); image_info.bin_slot_sizes_[kBinImTable] += size; + return true; } void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) { @@ -1407,8 +1499,7 @@ class ImageWriter::VisitReferencesVisitor { ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { - ref->SetReferent</*kTransactionActive*/false>( - VisitReference(ref->GetReferent<kWithoutReadBarrier>())); + operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } private: @@ -1566,7 +1657,7 @@ void ImageWriter::CalculateNewObjectOffsets() { // Calculate size of the dex cache arrays slot and prepare offsets. PrepareDexCacheArraySlots(); - // Calculate the sizes of the intern tables and class tables. + // Calculate the sizes of the intern tables, class tables, and fixup tables. for (ImageInfo& image_info : image_infos_) { // Calculate how big the intern table will be after being serialized. InternTable* const intern_table = image_info.intern_table_.get(); @@ -1574,12 +1665,11 @@ void ImageWriter::CalculateNewObjectOffsets() { if (intern_table->StrongSize() != 0u) { image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr); } + // Calculate the size of the class table. ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); - CHECK_EQ(class_loaders_.size(), compile_app_image_ ? 1u : 0u); - mirror::ClassLoader* class_loader = compile_app_image_ ? *class_loaders_.begin() : nullptr; - DCHECK_EQ(image_info.class_table_->NumZygoteClasses(class_loader), 0u); - if (image_info.class_table_->NumNonZygoteClasses(class_loader) != 0u) { + DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u); + if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) { image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr); } } @@ -1595,7 +1685,7 @@ void ImageWriter::CalculateNewObjectOffsets() { break; } case kBinDexCacheArray: - bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment()); + bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_)); break; case kBinImTable: case kBinIMTConflictTable: { @@ -1628,8 +1718,6 @@ void ImageWriter::CalculateNewObjectOffsets() { // Transform each object's bin slot into an offset which will be used to do the final copy. heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this); - // DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); - size_t i = 0; for (ImageInfo& image_info : image_infos_) { image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get())); @@ -1643,8 +1731,6 @@ void ImageWriter::CalculateNewObjectOffsets() { ImageInfo& image_info = GetImageInfo(relocation.oat_index); relocation.offset += image_info.bin_slot_offsets_[bin_type]; } - - // Note that image_info.image_end_ is left at end of used mirror object section. } size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const { @@ -1686,7 +1772,6 @@ size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) c ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays]; *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray], bin_slot_sizes_[kBinDexCacheArray]); - // Round up to the alignment the string table expects. See HashSet::WriteToMemory. size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t)); // Calculate the size of the interned strings. @@ -1778,18 +1863,18 @@ class ImageWriter::FixupRootVisitor : public RootVisitor { explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) { } - void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) + void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED, + size_t count ATTRIBUTE_UNUSED, + const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { - for (size_t i = 0; i < count; ++i) { - *roots[i] = image_writer_->GetImageAddress(*roots[i]); - } + LOG(FATAL) << "Unsupported"; } void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { for (size_t i = 0; i < count; ++i) { - roots[i]->Assign(image_writer_->GetImageAddress(roots[i]->AsMirrorPtr())); + image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr()); } } @@ -1800,7 +1885,9 @@ class ImageWriter::FixupRootVisitor : public RootVisitor { void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) { for (size_t i = 0; i < ImTable::kSize; ++i) { ArtMethod* method = orig->Get(i, target_ptr_size_); - copy->Set(i, NativeLocationInImage(method), target_ptr_size_); + void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_)); + CopyAndFixupPointer(address, method); + DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method)); } } @@ -1809,10 +1896,13 @@ void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConfli for (size_t i = 0; i < count; ++i) { ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_); ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_); - copy->SetInterfaceMethod(i, target_ptr_size_, NativeLocationInImage(interface_method)); - copy->SetImplementationMethod(i, - target_ptr_size_, - NativeLocationInImage(implementation_method)); + CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method); + CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_), + implementation_method); + DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_), + NativeLocationInImage(interface_method)); + DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_), + NativeLocationInImage(implementation_method)); } } @@ -1831,8 +1921,9 @@ void ImageWriter::CopyAndFixupNativeData(size_t oat_index) { switch (relocation.type) { case kNativeObjectRelocationTypeArtField: { memcpy(dest, pair.first, sizeof(ArtField)); - reinterpret_cast<ArtField*>(dest)->SetDeclaringClass( - GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr())); + CopyReference( + reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(), + reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr()); break; } case kNativeObjectRelocationTypeRuntimeMethod: @@ -1924,9 +2015,8 @@ void ImageWriter::CopyAndFixupNativeData(size_t oat_index) { // above comment for intern tables. ClassTable temp_class_table; temp_class_table.ReadFromMemory(class_table_memory_ptr); - ObjPtr<mirror::ClassLoader> class_loader = GetClassLoader(); - CHECK_EQ(temp_class_table.NumZygoteClasses(class_loader), - table->NumNonZygoteClasses(class_loader) + table->NumZygoteClasses(class_loader)); + CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(), + table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses()); UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown)); temp_class_table.VisitRoots(visitor); } @@ -1950,8 +2040,10 @@ void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) { reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj); } -void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, - mirror::Class* klass, Bin array_type) { +void ImageWriter::FixupPointerArray(mirror::Object* dst, + mirror::PointerArray* arr, + mirror::Class* klass, + Bin array_type) { CHECK(klass->IsArrayClass()); CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr; // Fixup int and long pointers for the ArtMethod or ArtField arrays. @@ -1960,7 +2052,7 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a auto* dest_array = down_cast<mirror::PointerArray*>(dst); for (size_t i = 0, count = num_elements; i < count; ++i) { void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_); - if (elem != nullptr && !IsInBootImage(elem)) { + if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) { auto it = native_object_relocations_.find(elem); if (UNLIKELY(it == native_object_relocations_.end())) { if (it->second.IsArtMethodRelocation()) { @@ -1976,12 +2068,9 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a << Class::PrettyClass(field->GetDeclaringClass()); } UNREACHABLE(); - } else { - ImageInfo& image_info = GetImageInfo(it->second.oat_index); - elem = image_info.image_begin_ + it->second.offset; } } - dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_); + CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem); } } @@ -2029,22 +2118,19 @@ class ImageWriter::FixupVisitor { void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const - REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset); - // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the - // image. - copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( - offset, - image_writer_->GetImageAddress(ref.Ptr())); + // Copy the reference and record the fixup if necessary. + image_writer_->CopyReference( + copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset), + ref.Ptr()); } // java.lang.ref.Reference visitor. void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { - copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( - mirror::Reference::ReferentOffset(), - image_writer_->GetImageAddress(ref->GetReferent())); + operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } protected: @@ -2122,7 +2208,10 @@ class ImageWriter::NativeLocationVisitor { explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {} template <typename T> - T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) { + T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) { + if (dest_addr != nullptr) { + image_writer_->CopyAndFixupPointer(dest_addr, ptr); + } return image_writer_->NativeLocationInImage(ptr); } @@ -2185,10 +2274,10 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { } } - -class ImageAddressVisitor { +class ImageWriter::ImageAddressVisitorForDexCacheArray { public: - explicit ImageAddressVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {} + explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer) + : image_writer_(image_writer) {} template <typename T> T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) { @@ -2199,9 +2288,9 @@ class ImageAddressVisitor { ImageWriter* const image_writer_; }; - void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache) { + ImageAddressVisitorForDexCacheArray fixup_visitor(this); // Though the DexCache array fields are usually treated as native pointers, we set the full // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e. @@ -2211,8 +2300,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(), NativeLocationInImage(orig_strings), PointerSize::k64); - orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), - ImageAddressVisitor(this)); + orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor); } mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes(); if (orig_types != nullptr) { @@ -2220,7 +2308,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, NativeLocationInImage(orig_types), PointerSize::k64); orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache), - ImageAddressVisitor(this)); + fixup_visitor); } ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods(); if (orig_methods != nullptr) { @@ -2235,16 +2323,18 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_); } } - ArtField** orig_fields = orig_dex_cache->GetResolvedFields(); + mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields(); if (orig_fields != nullptr) { copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(), NativeLocationInImage(orig_fields), PointerSize::k64); - ArtField** copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache); + mirror::FieldDexCacheType* copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache); for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) { - ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_); - ArtField* copy = NativeLocationInImage(orig); - mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_); + mirror::FieldDexCachePair orig = + mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_); + mirror::FieldDexCachePair copy = orig; + copy.object = NativeLocationInImage(orig.object); + mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_); } } mirror::MethodTypeDexCacheType* orig_method_types = orig_dex_cache->GetResolvedMethodTypes(); @@ -2253,7 +2343,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, NativeLocationInImage(orig_method_types), PointerSize::k64); orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache), - ImageAddressVisitor(this)); + fixup_visitor); } GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites(); if (orig_call_sites != nullptr) { @@ -2261,7 +2351,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, NativeLocationInImage(orig_call_sites), PointerSize::k64); orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache), - ImageAddressVisitor(this)); + fixup_visitor); } // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving @@ -2369,7 +2459,8 @@ void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, memcpy(copy, orig, ArtMethod::Size(target_ptr_size_)); - copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked())); + CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked()); + ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_); copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_); @@ -2481,7 +2572,7 @@ size_t ImageWriter::GetOatIndex(mirror::Object* obj) const { return GetDefaultOatIndex(); } auto it = oat_index_map_.find(obj); - DCHECK(it != oat_index_map_.end()); + DCHECK(it != oat_index_map_.end()) << obj; return it->second; } @@ -2582,4 +2673,31 @@ ImageWriter::ImageInfo::ImageInfo() : intern_table_(new InternTable), class_table_(new ClassTable) {} +void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest, + ObjPtr<mirror::Object> src) { + dest->Assign(GetImageAddress(src.Ptr())); +} + +void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest, + ObjPtr<mirror::Object> src) { + dest->Assign(GetImageAddress(src.Ptr())); +} + +void ImageWriter::CopyAndFixupPointer(void** target, void* value) { + void* new_value = value; + if (value != nullptr && !IsInBootImage(value)) { + auto it = native_object_relocations_.find(value); + CHECK(it != native_object_relocations_.end()) << value; + const NativeObjectRelocation& relocation = it->second; + ImageInfo& image_info = GetImageInfo(relocation.oat_index); + new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset); + } + if (target_ptr_size_ == PointerSize::k32) { + *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value); + } else { + *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value); + } +} + + } // namespace art |