Clean up ImageHeader section getters.
Test: m art-test-host-gtest
Test: testrunner.py --host
Change-Id: Iaea7ce6f9bc3ff3c6d9bb6fb598aa62333ebf30c
diff --git a/compiler/image_test.h b/compiler/image_test.h
index daa4b11..15f79cb 100644
--- a/compiler/image_test.h
+++ b/compiler/image_test.h
@@ -398,7 +398,7 @@
ImageHeader image_header;
ASSERT_EQ(file->ReadFully(&image_header, sizeof(image_header)), true);
ASSERT_TRUE(image_header.IsValid());
- const auto& bitmap_section = image_header.GetImageSection(ImageHeader::kSectionImageBitmap);
+ const auto& bitmap_section = image_header.GetImageBitmapSection();
ASSERT_GE(bitmap_section.Offset(), sizeof(image_header));
ASSERT_NE(0U, bitmap_section.Size());
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 6ee9cc6..12f477a 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -298,8 +298,7 @@
// Write out the image bitmap at the page aligned start of the image end, also uncompressed for
// convenience.
- const ImageSection& bitmap_section = image_header->GetImageSection(
- ImageHeader::kSectionImageBitmap);
+ const ImageSection& bitmap_section = image_header->GetImageBitmapSection();
// Align up since data size may be unaligned if the image is compressed.
size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
if (!is_compressed) {
@@ -2114,8 +2113,7 @@
// Write the intern table into the image.
if (image_info.intern_table_bytes_ > 0) {
- const ImageSection& intern_table_section = image_header->GetImageSection(
- ImageHeader::kSectionInternedStrings);
+ const ImageSection& intern_table_section = image_header->GetInternedStringsSection();
InternTable* const intern_table = image_info.intern_table_.get();
uint8_t* const intern_table_memory_ptr =
image_info.image_->Begin() + intern_table_section.Offset();
@@ -2134,8 +2132,7 @@
// Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
// class loaders. Writing multiple class tables into the image is currently unsupported.
if (image_info.class_table_bytes_ > 0u) {
- const ImageSection& class_table_section = image_header->GetImageSection(
- ImageHeader::kSectionClassTable);
+ const ImageSection& class_table_section = image_header->GetClassTableSection();
uint8_t* const class_table_memory_ptr =
image_info.image_->Begin() + class_table_section.Offset();
ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc
index db70e78..e10b171 100644
--- a/dex2oat/dex2oat_test.cc
+++ b/dex2oat/dex2oat_test.cc
@@ -840,7 +840,7 @@
bool success = file->ReadFully(&image_header, sizeof(image_header));
ASSERT_TRUE(success);
ASSERT_TRUE(image_header.IsValid());
- EXPECT_GT(image_header.GetImageSection(ImageHeader::kSectionObjects).Size(), 0u);
+ EXPECT_GT(image_header.GetObjectsSection().Size(), 0u);
}
}
diff --git a/imgdiag/imgdiag.cc b/imgdiag/imgdiag.cc
index 9ffc414..00e2a89 100644
--- a/imgdiag/imgdiag.cc
+++ b/imgdiag/imgdiag.cc
@@ -1209,7 +1209,7 @@
const uint8_t* image_begin_unaligned = image_header_.GetImageBegin();
const uint8_t* image_mirror_end_unaligned = image_begin_unaligned +
- image_header_.GetImageSection(ImageHeader::kSectionObjects).Size();
+ image_header_.GetObjectsSection().Size();
const uint8_t* image_end_unaligned = image_begin_unaligned + image_header_.GetImageSize();
// Adjust range to nearest page
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 6f833c6..6e4c68b 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1972,16 +1972,13 @@
stats_.file_bytes += uncompressed_size - data_size;
}
size_t header_bytes = sizeof(ImageHeader);
- const auto& object_section = image_header_.GetImageSection(ImageHeader::kSectionObjects);
- const auto& field_section = image_header_.GetImageSection(ImageHeader::kSectionArtFields);
+ const auto& object_section = image_header_.GetObjectsSection();
+ const auto& field_section = image_header_.GetFieldsSection();
const auto& method_section = image_header_.GetMethodsSection();
- const auto& dex_cache_arrays_section = image_header_.GetImageSection(
- ImageHeader::kSectionDexCacheArrays);
- const auto& intern_section = image_header_.GetImageSection(
- ImageHeader::kSectionInternedStrings);
- const auto& class_table_section = image_header_.GetImageSection(
- ImageHeader::kSectionClassTable);
- const auto& bitmap_section = image_header_.GetImageSection(ImageHeader::kSectionImageBitmap);
+ const auto& dex_cache_arrays_section = image_header_.GetDexCacheArraysSection();
+ const auto& intern_section = image_header_.GetInternedStringsSection();
+ const auto& class_table_section = image_header_.GetClassTableSection();
+ const auto& bitmap_section = image_header_.GetImageBitmapSection();
stats_.header_bytes = header_bytes;
@@ -2235,8 +2232,7 @@
auto it = dex_caches_.find(obj);
if (it != dex_caches_.end()) {
auto* dex_cache = down_cast<mirror::DexCache*>(obj);
- const auto& field_section = image_header_.GetImageSection(
- ImageHeader::kSectionArtFields);
+ const auto& field_section = image_header_.GetFieldsSection();
const auto& method_section = image_header_.GetMethodsSection();
size_t num_methods = dex_cache->NumResolvedMethods();
if (num_methods != 0u) {
diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc
index efa2969..f54e81f 100644
--- a/patchoat/patchoat.cc
+++ b/patchoat/patchoat.cc
@@ -468,7 +468,7 @@
};
void PatchOat::PatchInternedStrings(const ImageHeader* image_header) {
- const auto& section = image_header->GetImageSection(ImageHeader::kSectionInternedStrings);
+ const auto& section = image_header->GetInternedStringsSection();
InternTable temp_table;
// Note that we require that ReadFromMemory does not make an internal copy of the elements.
// This also relies on visit roots not doing any verification which could fail after we update
@@ -479,7 +479,7 @@
}
void PatchOat::PatchClassTable(const ImageHeader* image_header) {
- const auto& section = image_header->GetImageSection(ImageHeader::kSectionClassTable);
+ const auto& section = image_header->GetClassTableSection();
if (section.Size() == 0) {
return;
}
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 93b6dd1..d22482f 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1454,8 +1454,7 @@
const auto& image_header = space->GetImageHeader();
const auto bitmap = space->GetMarkBitmap(); // bitmap of objects
const uint8_t* target_base = space->GetMemMap()->Begin();
- const ImageSection& objects_section =
- image_header.GetImageSection(ImageHeader::kSectionObjects);
+ const ImageSection& objects_section = image_header.GetObjectsSection();
uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
@@ -1664,8 +1663,7 @@
bool contains = false;
for (auto space : spaces_) {
auto offset = reinterpret_cast<uint8_t*>(arr) - space->Begin();
- if (space->GetImageHeader().GetImageSection(
- ImageHeader::kSectionDexCacheArrays).Contains(offset)) {
+ if (space->GetImageHeader().GetDexCacheArraysSection().Contains(offset)) {
contains = true;
break;
}
@@ -1980,8 +1978,7 @@
// In this case, madvise away the dex cache arrays section of the image to reduce RAM usage and
// mark as PROT_NONE to catch any invalid accesses.
if (forward_dex_cache_arrays) {
- const ImageSection& dex_cache_section = header.GetImageSection(
- ImageHeader::kSectionDexCacheArrays);
+ const ImageSection& dex_cache_section = header.GetDexCacheArraysSection();
uint8_t* section_begin = AlignUp(space->Begin() + dex_cache_section.Offset(), kPageSize);
uint8_t* section_end = AlignDown(space->Begin() + dex_cache_section.End(), kPageSize);
if (section_begin < section_end) {
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 1a48b46..732c707 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -579,7 +579,7 @@
}
}
- const auto& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
+ const auto& bitmap_section = image_header->GetImageBitmapSection();
// The location we want to map from is the first aligned page after the end of the stored
// (possibly compressed) data.
const size_t image_bitmap_offset = RoundUp(sizeof(ImageHeader) + image_header->GetDataSize(),
@@ -644,7 +644,7 @@
image_filename,
bitmap_index));
// Bitmap only needs to cover until the end of the mirror objects section.
- const ImageSection& image_objects = image_header->GetImageSection(ImageHeader::kSectionObjects);
+ const ImageSection& image_objects = image_header->GetObjectsSection();
// We only want the mirror object, not the ArtFields and ArtMethods.
uint8_t* const image_end = map->Begin() + image_objects.End();
std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap;
@@ -1229,7 +1229,7 @@
}
ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
// Need to update the image to be at the target base.
- const ImageSection& objects_section = image_header.GetImageSection(ImageHeader::kSectionObjects);
+ const ImageSection& objects_section = image_header.GetObjectsSection();
uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
FixupObjectAdapter fixup_adapter(boot_image, boot_oat, app_image, app_oat);
@@ -1358,7 +1358,7 @@
}
// In the app image case, the image methods are actually in the boot image.
image_header.RelocateImageMethods(boot_image.Delta());
- const auto& class_table_section = image_header.GetImageSection(ImageHeader::kSectionClassTable);
+ const auto& class_table_section = image_header.GetClassTableSection();
if (class_table_section.Size() > 0u) {
// Note that we require that ReadFromMemory does not make an internal copy of the elements.
// This also relies on visit roots not doing any verification which could fail after we update
diff --git a/runtime/image-inl.h b/runtime/image-inl.h
index da18ae5..935a1b6 100644
--- a/runtime/image-inl.h
+++ b/runtime/image-inl.h
@@ -51,7 +51,7 @@
inline void ImageHeader::VisitPackedImTables(const Visitor& visitor,
uint8_t* base,
PointerSize pointer_size) const {
- const ImageSection& section = GetImageSection(kSectionImTables);
+ const ImageSection& section = GetImTablesSection();
for (size_t pos = 0; pos < section.Size();) {
ImTable* imt = reinterpret_cast<ImTable*>(base + section.Offset() + pos);
for (size_t i = 0; i < ImTable::kSize; ++i) {
@@ -69,7 +69,7 @@
inline void ImageHeader::VisitPackedImtConflictTables(const Visitor& visitor,
uint8_t* base,
PointerSize pointer_size) const {
- const ImageSection& section = GetImageSection(kSectionIMTConflictTables);
+ const ImageSection& section = GetIMTConflictTablesSection();
for (size_t pos = 0; pos < section.Size(); ) {
auto* table = reinterpret_cast<ImtConflictTable*>(base + section.Offset() + pos);
table->Visit([&visitor](const std::pair<ArtMethod*, ArtMethod*>& methods) {
diff --git a/runtime/image.cc b/runtime/image.cc
index 1f7e0f3..8debc71 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -140,11 +140,6 @@
image_methods_[index] = reinterpret_cast<uint64_t>(method);
}
-const ImageSection& ImageHeader::GetImageSection(ImageSections index) const {
- CHECK_LT(static_cast<size_t>(index), kSectionCount);
- return sections_[index];
-}
-
std::ostream& operator<<(std::ostream& os, const ImageSection& section) {
return os << "size=" << section.Size() << " range=" << section.Offset() << "-" << section.End();
}
diff --git a/runtime/image.h b/runtime/image.h
index 7bb796c..42abffc 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -216,7 +216,18 @@
ArtMethod* GetImageMethod(ImageMethod index) const;
void SetImageMethod(ImageMethod index, ArtMethod* method);
- const ImageSection& GetImageSection(ImageSections index) const;
+ const ImageSection& GetImageSection(ImageSections index) const {
+ DCHECK_LT(static_cast<size_t>(index), kSectionCount);
+ return sections_[index];
+ }
+
+ const ImageSection& GetObjectsSection() const {
+ return GetImageSection(kSectionObjects);
+ }
+
+ const ImageSection& GetFieldsSection() const {
+ return GetImageSection(ImageHeader::kSectionArtFields);
+ }
const ImageSection& GetMethodsSection() const {
return GetImageSection(kSectionArtMethods);
@@ -226,8 +237,28 @@
return GetImageSection(kSectionRuntimeMethods);
}
- const ImageSection& GetFieldsSection() const {
- return GetImageSection(ImageHeader::kSectionArtFields);
+ const ImageSection& GetImTablesSection() const {
+ return GetImageSection(kSectionImTables);
+ }
+
+ const ImageSection& GetIMTConflictTablesSection() const {
+ return GetImageSection(kSectionIMTConflictTables);
+ }
+
+ const ImageSection& GetDexCacheArraysSection() const {
+ return GetImageSection(kSectionDexCacheArrays);
+ }
+
+ const ImageSection& GetInternedStringsSection() const {
+ return GetImageSection(kSectionInternedStrings);
+ }
+
+ const ImageSection& GetClassTableSection() const {
+ return GetImageSection(kSectionClassTable);
+ }
+
+ const ImageSection& GetImageBitmapSection() const {
+ return GetImageSection(kSectionImageBitmap);
}
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc
index f4da5a4..5b93d3b 100644
--- a/runtime/intern_table.cc
+++ b/runtime/intern_table.cc
@@ -182,7 +182,7 @@
for (gc::space::ImageSpace* image_space : image_spaces) {
const ImageHeader* const header = &image_space->GetImageHeader();
// Check if we have the interned strings section.
- const ImageSection& section = header->GetImageSection(ImageHeader::kSectionInternedStrings);
+ const ImageSection& section = header->GetInternedStringsSection();
if (section.Size() > 0) {
AddTableFromMemoryLocked(image_space->Begin() + section.Offset());
}