diff options
Diffstat (limited to 'compiler/image_writer.cc')
| -rw-r--r-- | compiler/image_writer.cc | 50 |
1 files changed, 33 insertions, 17 deletions
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index a03ff755ab..c4fe2cbeac 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -244,8 +244,8 @@ void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot DCHECK(object != nullptr); DCHECK_NE(image_objects_offset_begin_, 0u); - size_t previous_bin_sizes = bin_slot_previous_sizes_[bin_slot.GetBin()]; - size_t new_offset = image_objects_offset_begin_ + previous_bin_sizes + bin_slot.GetIndex(); + size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()]; + size_t new_offset = bin_slot_offset + bin_slot.GetIndex(); DCHECK_ALIGNED(new_offset, kObjectAlignment); SetImageOffset(object, new_offset); @@ -866,8 +866,10 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { } bool any_dirty = false; size_t count = 0; + const size_t method_alignment = ArtMethod::ObjectAlignment(target_ptr_size_); const size_t method_size = ArtMethod::ObjectSize(target_ptr_size_); - auto iteration_range = MakeIterationRangeFromLengthPrefixedArray(array, method_size); + auto iteration_range = + MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment); for (auto& m : iteration_range) { any_dirty = any_dirty || WillMethodBeDirty(&m); ++count; @@ -876,7 +878,9 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { kNativeObjectRelocationTypeArtMethodClean; Bin bin_type = BinTypeForNativeRelocationType(type); // Forward the entire array at once, but header first. - const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0, method_size); + const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0, + method_size, + method_alignment); auto it = native_object_relocations_.find(array); CHECK(it == native_object_relocations_.end()) << "Method array " << array << " already forwarded"; @@ -972,9 +976,10 @@ void ImageWriter::CalculateNewObjectOffsets() { size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)]; native_object_relocations_.emplace(&image_method_array_, NativeObjectRelocation { offset, image_method_type }); + size_t method_alignment = ArtMethod::ObjectAlignment(target_ptr_size_); const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize( - 0, ArtMethod::ObjectSize(target_ptr_size_)); - CHECK_ALIGNED(array_size, 8u); + 0, ArtMethod::ObjectSize(target_ptr_size_), method_alignment); + CHECK_ALIGNED_PARAM(array_size, method_alignment); offset += array_size; for (auto* m : image_methods_) { CHECK(m != nullptr); @@ -982,13 +987,21 @@ void ImageWriter::CalculateNewObjectOffsets() { AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean); } - // Calculate cumulative bin slot sizes. - size_t previous_sizes = 0u; + // Calculate bin slot offsets. + size_t bin_offset = image_objects_offset_begin_; for (size_t i = 0; i != kBinSize; ++i) { - bin_slot_previous_sizes_[i] = previous_sizes; - previous_sizes += bin_slot_sizes_[i]; + bin_slot_offsets_[i] = bin_offset; + bin_offset += bin_slot_sizes_[i]; + if (i == kBinArtField) { + static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields."); + static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4."); + DCHECK_ALIGNED(bin_offset, 4u); + DCHECK(method_alignment == 4u || method_alignment == 8u); + bin_offset = RoundUp(bin_offset, method_alignment); + } } - DCHECK_EQ(previous_sizes, GetBinSizeSum()); + // NOTE: There may be additional padding between the bin slots and the intern table. + DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); // Transform each object's bin slot into an offset which will be used to do the final copy. @@ -1002,7 +1015,7 @@ void ImageWriter::CalculateNewObjectOffsets() { for (auto& pair : native_object_relocations_) { NativeObjectRelocation& relocation = pair.second; Bin bin_type = BinTypeForNativeRelocationType(relocation.type); - relocation.offset += image_objects_offset_begin_ + bin_slot_previous_sizes_[bin_type]; + relocation.offset += bin_slot_offsets_[bin_type]; } // Calculate how big the intern table will be after being serialized. @@ -1029,15 +1042,15 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { // Add field section. auto* field_section = §ions[ImageHeader::kSectionArtFields]; *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]); - CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtField], - field_section->Offset()); + CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset()); cur_pos = field_section->End(); + // Round up to the alignment the required by the method section. + cur_pos = RoundUp(cur_pos, ArtMethod::ObjectAlignment(target_ptr_size_)); // Add method section. auto* methods_section = §ions[ImageHeader::kSectionArtMethods]; *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]); - CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtMethodClean], - methods_section->Offset()); + CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset()); cur_pos = methods_section->End(); // Round up to the alignment the string table expects. See HashSet::WriteToMemory. cur_pos = RoundUp(cur_pos, sizeof(uint64_t)); @@ -1135,7 +1148,10 @@ void ImageWriter::CopyAndFixupNativeData() { } case kNativeObjectRelocationTypeArtMethodArrayClean: case kNativeObjectRelocationTypeArtMethodArrayDirty: { - memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0)); + memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize( + 0, + ArtMethod::ObjectSize(target_ptr_size_), + ArtMethod::ObjectAlignment(target_ptr_size_))); break; } } |