diff options
-rw-r--r-- | compiler/image_writer.cc | 154 | ||||
-rw-r--r-- | compiler/image_writer.h | 22 | ||||
-rw-r--r-- | patchoat/patchoat.cc | 2 | ||||
-rw-r--r-- | runtime/art_field.h | 4 | ||||
-rw-r--r-- | runtime/art_method.h | 4 | ||||
-rw-r--r-- | runtime/class_table.h | 1 | ||||
-rw-r--r-- | runtime/gc/space/image_space.cc | 5 | ||||
-rw-r--r-- | runtime/imt_conflict_table.h | 16 | ||||
-rw-r--r-- | runtime/imtable.h | 8 | ||||
-rw-r--r-- | runtime/mirror/array.h | 7 | ||||
-rw-r--r-- | runtime/mirror/class-inl.h | 4 |
11 files changed, 156 insertions, 71 deletions
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index d156644484..d129249d63 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -1338,21 +1338,20 @@ mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack, // live. if (as_klass->ShouldHaveImt()) { ImTable* imt = as_klass->GetImt(target_ptr_size_); - for (size_t i = 0; i < ImTable::kSize; ++i) { - ArtMethod* imt_method = imt->Get(i, target_ptr_size_); - DCHECK(imt_method != nullptr); - if (imt_method->IsRuntimeMethod() && - !IsInBootImage(imt_method) && - !NativeRelocationAssigned(imt_method)) { - AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index); + if (TryAssignImTableOffset(imt, oat_index)) { + // Since imt's can be shared only do this the first time to not double count imt method + // fixups. + for (size_t i = 0; i < ImTable::kSize; ++i) { + ArtMethod* imt_method = imt->Get(i, target_ptr_size_); + DCHECK(imt_method != nullptr); + if (imt_method->IsRuntimeMethod() && + !IsInBootImage(imt_method) && + !NativeRelocationAssigned(imt_method)) { + AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index); + } } } } - - if (as_klass->ShouldHaveImt()) { - ImTable* imt = as_klass->GetImt(target_ptr_size_); - TryAssignImTableOffset(imt, oat_index); - } } else if (obj->IsClassLoader()) { // Register the class loader if it has a class table. // The fake boot class loader should not get registered and we should end up with only one @@ -1386,10 +1385,10 @@ bool ImageWriter::NativeRelocationAssigned(void* ptr) const { return native_object_relocations_.find(ptr) != native_object_relocations_.end(); } -void ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { +bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { // No offset, or already assigned. if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) { - return; + return false; } // If the method is a conflict method we also want to assign the conflict table offset. ImageInfo& image_info = GetImageInfo(oat_index); @@ -1401,6 +1400,7 @@ void ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { image_info.bin_slot_sizes_[kBinImTable], kNativeObjectRelocationTypeIMTable}); image_info.bin_slot_sizes_[kBinImTable] += size; + return true; } void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) { @@ -1499,8 +1499,7 @@ class ImageWriter::VisitReferencesVisitor { ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { - ref->SetReferent</*kTransactionActive*/false>( - VisitReference(ref->GetReferent<kWithoutReadBarrier>())); + operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } private: @@ -1658,7 +1657,7 @@ void ImageWriter::CalculateNewObjectOffsets() { // Calculate size of the dex cache arrays slot and prepare offsets. PrepareDexCacheArraySlots(); - // Calculate the sizes of the intern tables and class tables. + // Calculate the sizes of the intern tables, class tables, and fixup tables. for (ImageInfo& image_info : image_infos_) { // Calculate how big the intern table will be after being serialized. InternTable* const intern_table = image_info.intern_table_.get(); @@ -1666,6 +1665,7 @@ void ImageWriter::CalculateNewObjectOffsets() { if (intern_table->StrongSize() != 0u) { image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr); } + // Calculate the size of the class table. ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u); @@ -1718,8 +1718,6 @@ void ImageWriter::CalculateNewObjectOffsets() { // Transform each object's bin slot into an offset which will be used to do the final copy. heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this); - // DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); - size_t i = 0; for (ImageInfo& image_info : image_infos_) { image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get())); @@ -1733,8 +1731,6 @@ void ImageWriter::CalculateNewObjectOffsets() { ImageInfo& image_info = GetImageInfo(relocation.oat_index); relocation.offset += image_info.bin_slot_offsets_[bin_type]; } - - // Note that image_info.image_end_ is left at end of used mirror object section. } size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const { @@ -1776,7 +1772,6 @@ size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) c ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays]; *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray], bin_slot_sizes_[kBinDexCacheArray]); - // Round up to the alignment the string table expects. See HashSet::WriteToMemory. size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t)); // Calculate the size of the interned strings. @@ -1868,18 +1863,18 @@ class ImageWriter::FixupRootVisitor : public RootVisitor { explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) { } - void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) + void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED, + size_t count ATTRIBUTE_UNUSED, + const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { - for (size_t i = 0; i < count; ++i) { - *roots[i] = image_writer_->GetImageAddress(*roots[i]); - } + LOG(FATAL) << "Unsupported"; } void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { for (size_t i = 0; i < count; ++i) { - roots[i]->Assign(image_writer_->GetImageAddress(roots[i]->AsMirrorPtr())); + image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr()); } } @@ -1890,7 +1885,9 @@ class ImageWriter::FixupRootVisitor : public RootVisitor { void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) { for (size_t i = 0; i < ImTable::kSize; ++i) { ArtMethod* method = orig->Get(i, target_ptr_size_); - copy->Set(i, NativeLocationInImage(method), target_ptr_size_); + void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_)); + CopyAndFixupPointer(address, method); + DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method)); } } @@ -1899,10 +1896,13 @@ void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConfli for (size_t i = 0; i < count; ++i) { ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_); ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_); - copy->SetInterfaceMethod(i, target_ptr_size_, NativeLocationInImage(interface_method)); - copy->SetImplementationMethod(i, - target_ptr_size_, - NativeLocationInImage(implementation_method)); + CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method); + CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_), + implementation_method); + DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_), + NativeLocationInImage(interface_method)); + DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_), + NativeLocationInImage(implementation_method)); } } @@ -1921,8 +1921,9 @@ void ImageWriter::CopyAndFixupNativeData(size_t oat_index) { switch (relocation.type) { case kNativeObjectRelocationTypeArtField: { memcpy(dest, pair.first, sizeof(ArtField)); - reinterpret_cast<ArtField*>(dest)->SetDeclaringClass( - GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr())); + CopyReference( + reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(), + reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr()); break; } case kNativeObjectRelocationTypeRuntimeMethod: @@ -2039,8 +2040,10 @@ void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) { reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj); } -void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, - mirror::Class* klass, Bin array_type) { +void ImageWriter::FixupPointerArray(mirror::Object* dst, + mirror::PointerArray* arr, + mirror::Class* klass, + Bin array_type) { CHECK(klass->IsArrayClass()); CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr; // Fixup int and long pointers for the ArtMethod or ArtField arrays. @@ -2049,7 +2052,7 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a auto* dest_array = down_cast<mirror::PointerArray*>(dst); for (size_t i = 0, count = num_elements; i < count; ++i) { void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_); - if (elem != nullptr && !IsInBootImage(elem)) { + if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) { auto it = native_object_relocations_.find(elem); if (UNLIKELY(it == native_object_relocations_.end())) { if (it->second.IsArtMethodRelocation()) { @@ -2065,12 +2068,9 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a << Class::PrettyClass(field->GetDeclaringClass()); } UNREACHABLE(); - } else { - ImageInfo& image_info = GetImageInfo(it->second.oat_index); - elem = image_info.image_begin_ + it->second.offset; } } - dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_); + CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem); } } @@ -2118,22 +2118,19 @@ class ImageWriter::FixupVisitor { void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const - REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset); - // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the - // image. - copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( - offset, - image_writer_->GetImageAddress(ref.Ptr())); + // Copy the reference and record the fixup if necessary. + image_writer_->CopyReference( + copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset), + ref.Ptr()); } // java.lang.ref.Reference visitor. void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { - copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( - mirror::Reference::ReferentOffset(), - image_writer_->GetImageAddress(ref->GetReferent())); + operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } protected: @@ -2211,7 +2208,10 @@ class ImageWriter::NativeLocationVisitor { explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {} template <typename T> - T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) { + T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) { + if (dest_addr != nullptr) { + image_writer_->CopyAndFixupPointer(dest_addr, ptr); + } return image_writer_->NativeLocationInImage(ptr); } @@ -2274,10 +2274,10 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { } } - -class ImageAddressVisitor { +class ImageWriter::ImageAddressVisitorForDexCacheArray { public: - explicit ImageAddressVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {} + explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer) + : image_writer_(image_writer) {} template <typename T> T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) { @@ -2288,9 +2288,9 @@ class ImageAddressVisitor { ImageWriter* const image_writer_; }; - void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache) { + ImageAddressVisitorForDexCacheArray fixup_visitor(this); // Though the DexCache array fields are usually treated as native pointers, we set the full // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e. @@ -2300,8 +2300,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(), NativeLocationInImage(orig_strings), PointerSize::k64); - orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), - ImageAddressVisitor(this)); + orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor); } mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes(); if (orig_types != nullptr) { @@ -2309,7 +2308,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, NativeLocationInImage(orig_types), PointerSize::k64); orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache), - ImageAddressVisitor(this)); + fixup_visitor); } ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods(); if (orig_methods != nullptr) { @@ -2333,7 +2332,8 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) { mirror::FieldDexCachePair orig = mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_); - mirror::FieldDexCachePair copy(NativeLocationInImage(orig.object), orig.index); + mirror::FieldDexCachePair copy = orig; + copy.object = NativeLocationInImage(orig.object); mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_); } } @@ -2343,7 +2343,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, NativeLocationInImage(orig_method_types), PointerSize::k64); orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache), - ImageAddressVisitor(this)); + fixup_visitor); } GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites(); if (orig_call_sites != nullptr) { @@ -2351,7 +2351,7 @@ void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, NativeLocationInImage(orig_call_sites), PointerSize::k64); orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache), - ImageAddressVisitor(this)); + fixup_visitor); } // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving @@ -2459,7 +2459,8 @@ void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, memcpy(copy, orig, ArtMethod::Size(target_ptr_size_)); - copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked())); + CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked()); + ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_); copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_); @@ -2571,7 +2572,7 @@ size_t ImageWriter::GetOatIndex(mirror::Object* obj) const { return GetDefaultOatIndex(); } auto it = oat_index_map_.find(obj); - DCHECK(it != oat_index_map_.end()); + DCHECK(it != oat_index_map_.end()) << obj; return it->second; } @@ -2672,4 +2673,31 @@ ImageWriter::ImageInfo::ImageInfo() : intern_table_(new InternTable), class_table_(new ClassTable) {} +void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest, + ObjPtr<mirror::Object> src) { + dest->Assign(GetImageAddress(src.Ptr())); +} + +void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest, + ObjPtr<mirror::Object> src) { + dest->Assign(GetImageAddress(src.Ptr())); +} + +void ImageWriter::CopyAndFixupPointer(void** target, void* value) { + void* new_value = value; + if (value != nullptr && !IsInBootImage(value)) { + auto it = native_object_relocations_.find(value); + CHECK(it != native_object_relocations_.end()) << value; + const NativeObjectRelocation& relocation = it->second; + ImageInfo& image_info = GetImageInfo(relocation.oat_index); + new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset); + } + if (target_ptr_size_ == PointerSize::k32) { + *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value); + } else { + *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value); + } +} + + } // namespace art diff --git a/compiler/image_writer.h b/compiler/image_writer.h index 16aff61dab..39113c8143 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -38,8 +38,9 @@ #include "image.h" #include "lock_word.h" #include "mem_map.h" -#include "oat_file.h" #include "mirror/dex_cache.h" +#include "obj_ptr.h" +#include "oat_file.h" #include "os.h" #include "safe_map.h" #include "utils.h" @@ -317,6 +318,12 @@ class ImageWriter FINAL { // Number of image class table bytes. size_t class_table_bytes_ = 0; + // Number of object fixup bytes. + size_t object_fixup_bytes_ = 0; + + // Number of pointer fixup bytes. + size_t pointer_fixup_bytes_ = 0; + // Intern table associated with this image for serialization. std::unique_ptr<InternTable> intern_table_; @@ -464,7 +471,8 @@ class ImageWriter FINAL { size_t oat_index) REQUIRES_SHARED(Locks::mutator_lock_); - void TryAssignImTableOffset(ImTable* imt, size_t oat_index) REQUIRES_SHARED(Locks::mutator_lock_); + // Return true if imt was newly inserted. + bool TryAssignImTableOffset(ImTable* imt, size_t oat_index) REQUIRES_SHARED(Locks::mutator_lock_); // Assign the offset for an IMT conflict table. Does nothing if the table already has a native // relocation. @@ -534,6 +542,14 @@ class ImageWriter FINAL { // Return true if there already exists a native allocation for an object. bool NativeRelocationAssigned(void* ptr) const; + void CopyReference(mirror::HeapReference<mirror::Object>* dest, ObjPtr<mirror::Object> src) + REQUIRES_SHARED(Locks::mutator_lock_); + + void CopyReference(mirror::CompressedReference<mirror::Object>* dest, ObjPtr<mirror::Object> src) + REQUIRES_SHARED(Locks::mutator_lock_); + + void CopyAndFixupPointer(void** target, void* value); + const CompilerDriver& compiler_driver_; // Beginning target image address for the first image. @@ -608,9 +624,11 @@ class ImageWriter FINAL { class FixupRootVisitor; class FixupVisitor; class GetRootsVisitor; + class ImageAddressVisitorForDexCacheArray; class NativeLocationVisitor; class PruneClassesVisitor; class PruneClassLoaderClassesVisitor; + class RegisterBootClassPathClassesVisitor; class VisitReferencesVisitor; DISALLOW_COPY_AND_ASSIGN(ImageWriter); diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc index dfaae7d864..0c2717f207 100644 --- a/patchoat/patchoat.cc +++ b/patchoat/patchoat.cc @@ -485,7 +485,7 @@ class PatchOat::RelocatedPointerVisitor { explicit RelocatedPointerVisitor(PatchOat* patch_oat) : patch_oat_(patch_oat) {} template <typename T> - T* operator()(T* ptr) const { + T* operator()(T* ptr, void** dest_addr ATTRIBUTE_UNUSED = 0) const { return patch_oat_->RelocatedAddressOfPointer(ptr); } diff --git a/runtime/art_field.h b/runtime/art_field.h index 75dd981136..666ed8a868 100644 --- a/runtime/art_field.h +++ b/runtime/art_field.h @@ -47,6 +47,10 @@ class ArtField FINAL { void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class) REQUIRES_SHARED(Locks::mutator_lock_); + mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() { + return declaring_class_.AddressWithoutBarrier(); + } + uint32_t GetAccessFlags() REQUIRES_SHARED(Locks::mutator_lock_); void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) { diff --git a/runtime/art_method.h b/runtime/art_method.h index 8f09cc6d03..51b65760a1 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -73,6 +73,10 @@ class ArtMethod FINAL { ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked() REQUIRES_SHARED(Locks::mutator_lock_); + mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() { + return declaring_class_.AddressWithoutBarrier(); + } + void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class) REQUIRES_SHARED(Locks::mutator_lock_); diff --git a/runtime/class_table.h b/runtime/class_table.h index 79f5aea399..430edbba4e 100644 --- a/runtime/class_table.h +++ b/runtime/class_table.h @@ -256,6 +256,7 @@ class ClassTable { } private: + // Only copies classes. void CopyWithoutLocks(const ClassTable& source_table) NO_THREAD_SAFETY_ANALYSIS; void InsertWithoutLocks(ObjPtr<mirror::Class> klass) NO_THREAD_SAFETY_ANALYSIS; diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc index 568f8d62a8..662efe2c8d 100644 --- a/runtime/gc/space/image_space.cc +++ b/runtime/gc/space/image_space.cc @@ -886,7 +886,7 @@ class ImageSpaceLoader { explicit FixupObjectAdapter(Args... args) : FixupVisitor(args...) {} template <typename T> - T* operator()(T* obj) const { + T* operator()(T* obj, void** dest_addr ATTRIBUTE_UNUSED = nullptr) const { return ForwardObject(obj); } }; @@ -976,7 +976,8 @@ class ImageSpaceLoader { ForwardObject(obj)); } - void operator()(mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS { + void operator()(mirror::Object* obj) const + NO_THREAD_SAFETY_ANALYSIS { if (visited_->Test(obj)) { // Already visited. return; diff --git a/runtime/imt_conflict_table.h b/runtime/imt_conflict_table.h index fdd10fefc4..35868642e1 100644 --- a/runtime/imt_conflict_table.h +++ b/runtime/imt_conflict_table.h @@ -81,6 +81,14 @@ class ImtConflictTable { return GetMethod(index * kMethodCount + kMethodImplementation, pointer_size); } + void** AddressOfInterfaceMethod(size_t index, PointerSize pointer_size) { + return AddressOfMethod(index * kMethodCount + kMethodInterface, pointer_size); + } + + void** AddressOfImplementationMethod(size_t index, PointerSize pointer_size) { + return AddressOfMethod(index * kMethodCount + kMethodImplementation, pointer_size); + } + // Return true if two conflict tables are the same. bool Equals(ImtConflictTable* other, PointerSize pointer_size) const { size_t num = NumEntries(pointer_size); @@ -169,6 +177,14 @@ class ImtConflictTable { } private: + void** AddressOfMethod(size_t index, PointerSize pointer_size) { + if (pointer_size == PointerSize::k64) { + return reinterpret_cast<void**>(&data64_[index]); + } else { + return reinterpret_cast<void**>(&data32_[index]); + } + } + ArtMethod* GetMethod(size_t index, PointerSize pointer_size) const { if (pointer_size == PointerSize::k64) { return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data64_[index])); diff --git a/runtime/imtable.h b/runtime/imtable.h index b7066bd521..aa0a5043b5 100644 --- a/runtime/imtable.h +++ b/runtime/imtable.h @@ -37,9 +37,13 @@ class ImTable { // (non-marker) interfaces. static constexpr size_t kSize = IMT_SIZE; + uint8_t* AddressOfElement(size_t index, PointerSize pointer_size) { + return reinterpret_cast<uint8_t*>(this) + OffsetOfElement(index, pointer_size); + } + ArtMethod* Get(size_t index, PointerSize pointer_size) { DCHECK_LT(index, kSize); - uint8_t* ptr = reinterpret_cast<uint8_t*>(this) + OffsetOfElement(index, pointer_size); + uint8_t* ptr = AddressOfElement(index, pointer_size); if (pointer_size == PointerSize::k32) { uint32_t value = *reinterpret_cast<uint32_t*>(ptr); return reinterpret_cast<ArtMethod*>(value); @@ -51,7 +55,7 @@ class ImTable { void Set(size_t index, ArtMethod* method, PointerSize pointer_size) { DCHECK_LT(index, kSize); - uint8_t* ptr = reinterpret_cast<uint8_t*>(this) + OffsetOfElement(index, pointer_size); + uint8_t* ptr = AddressOfElement(index, pointer_size); if (pointer_size == PointerSize::k32) { uintptr_t value = reinterpret_cast<uintptr_t>(method); DCHECK_EQ(static_cast<uint32_t>(value), value); // Check that we dont lose any non 0 bits. diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h index 16cf30f1e2..51d9d24619 100644 --- a/runtime/mirror/array.h +++ b/runtime/mirror/array.h @@ -198,6 +198,13 @@ class PointerArray : public Array { T GetElementPtrSize(uint32_t idx, PointerSize ptr_size) REQUIRES_SHARED(Locks::mutator_lock_); + void** ElementAddress(size_t index, PointerSize ptr_size) REQUIRES_SHARED(Locks::mutator_lock_) { + DCHECK_LT(index, static_cast<size_t>(GetLength())); + return reinterpret_cast<void**>(reinterpret_cast<uint8_t*>(this) + + Array::DataOffset(static_cast<size_t>(ptr_size)).Uint32Value() + + static_cast<size_t>(ptr_size) * index); + } + template<bool kTransactionActive = false, bool kUnchecked = false> void SetElementPtrSize(uint32_t idx, uint64_t element, PointerSize ptr_size) REQUIRES_SHARED(Locks::mutator_lock_); diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h index 003b03b2f9..be3b937f3e 100644 --- a/runtime/mirror/class-inl.h +++ b/runtime/mirror/class-inl.h @@ -1097,7 +1097,9 @@ inline void Class::FixupNativePointers(Class* dest, if (!IsTemp() && ShouldHaveEmbeddedVTable<kVerifyNone, kReadBarrierOption>()) { for (int32_t i = 0, count = GetEmbeddedVTableLength(); i < count; ++i) { ArtMethod* method = GetEmbeddedVTableEntry(i, pointer_size); - ArtMethod* new_method = visitor(method); + void** dest_addr = reinterpret_cast<void**>(reinterpret_cast<uintptr_t>(dest) + + EmbeddedVTableEntryOffset(i, pointer_size).Uint32Value()); + ArtMethod* new_method = visitor(method, dest_addr); if (method != new_method) { dest->SetEmbeddedVTableEntryUnchecked(i, new_method, pointer_size); } |