summaryrefslogtreecommitdiff
path: root/compiler/image_writer.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/image_writer.cc')
-rw-r--r--compiler/image_writer.cc290
1 files changed, 225 insertions, 65 deletions
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 3d9e7e7cda..9545c83eaf 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -17,6 +17,7 @@
#include "image_writer.h"
#include <sys/stat.h>
+#include <lz4.h>
#include <memory>
#include <numeric>
@@ -225,27 +226,72 @@ bool ImageWriter::Write(int image_fd,
return EXIT_FAILURE;
}
- // Write out the image + fields + methods.
+ std::unique_ptr<char[]> compressed_data;
+ // Image data size excludes the bitmap and the header.
ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
- const auto write_count = image_header->GetImageSize();
- if (!image_file->WriteFully(image_->Begin(), write_count)) {
- PLOG(ERROR) << "Failed to write image file " << image_filename;
+ const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
+ char* image_data = reinterpret_cast<char*>(image_->Begin()) + sizeof(ImageHeader);
+ size_t data_size;
+ const char* image_data_to_write;
+
+ CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
+ switch (image_storage_mode_) {
+ case ImageHeader::kStorageModeLZ4: {
+ size_t compressed_max_size = LZ4_compressBound(image_data_size);
+ compressed_data.reset(new char[compressed_max_size]);
+ data_size = LZ4_compress(
+ reinterpret_cast<char*>(image_->Begin()) + sizeof(ImageHeader),
+ &compressed_data[0],
+ image_data_size);
+ image_data_to_write = &compressed_data[0];
+ VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size;
+ break;
+ }
+ case ImageHeader::kStorageModeUncompressed: {
+ data_size = image_data_size;
+ image_data_to_write = image_data;
+ break;
+ }
+ default: {
+ LOG(FATAL) << "Unsupported";
+ UNREACHABLE();
+ }
+ }
+
+ // Write header first, as uncompressed.
+ image_header->data_size_ = data_size;
+ if (!image_file->WriteFully(image_->Begin(), sizeof(ImageHeader))) {
+ PLOG(ERROR) << "Failed to write image file header " << image_filename;
image_file->Erase();
return false;
}
- // Write out the image bitmap at the page aligned start of the image end.
+ // Write out the image + fields + methods.
+ const bool is_compressed = compressed_data != nullptr;
+ if (!image_file->WriteFully(image_data_to_write, data_size)) {
+ PLOG(ERROR) << "Failed to write image file data " << image_filename;
+ image_file->Erase();
+ return false;
+ }
+
+ // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
+ // convenience.
const ImageSection& bitmap_section = image_header->GetImageSection(
ImageHeader::kSectionImageBitmap);
- CHECK_ALIGNED(bitmap_section.Offset(), kPageSize);
+ // Align up since data size may be unaligned if the image is compressed.
+ size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
+ if (!is_compressed) {
+ CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
+ }
if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
- bitmap_section.Size(), bitmap_section.Offset())) {
+ bitmap_section.Size(),
+ bitmap_position_in_file)) {
PLOG(ERROR) << "Failed to write image file " << image_filename;
image_file->Erase();
return false;
}
-
- CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()));
+ CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
+ static_cast<size_t>(image_file->GetLength()));
if (image_file->FlushCloseOrErase() != 0) {
PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
return false;
@@ -330,10 +376,20 @@ void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
}
void ImageWriter::PrepareDexCacheArraySlots() {
+ // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
+ uint32_t size = 0u;
+ for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
+ dex_cache_array_starts_.Put(dex_file, size);
+ DexCacheArraysLayout layout(target_ptr_size_, dex_file);
+ size += layout.Size();
+ }
+ // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
+ // when AssignImageBinSlot() assigns their indexes out or order.
+ bin_slot_sizes_[kBinDexCacheArray] = size;
+
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Thread* const self = Thread::Current();
ReaderMutexLock mu(self, *class_linker->DexLock());
- uint32_t size = 0u;
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
mirror::DexCache* dex_cache =
down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
@@ -341,22 +397,18 @@ void ImageWriter::PrepareDexCacheArraySlots() {
continue;
}
const DexFile* dex_file = dex_cache->GetDexFile();
- dex_cache_array_starts_.Put(dex_file, size);
DexCacheArraysLayout layout(target_ptr_size_, dex_file);
DCHECK(layout.Valid());
+ uint32_t start = dex_cache_array_starts_.Get(dex_file);
DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), size + layout.TypesOffset());
+ AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), start + layout.TypesOffset());
DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), size + layout.MethodsOffset());
+ AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), start + layout.MethodsOffset());
DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), size + layout.FieldsOffset());
+ AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), start + layout.FieldsOffset());
DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetStrings(), size + layout.StringsOffset());
- size += layout.Size();
+ AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset());
}
- // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
- // when AssignImageBinSlot() assigns their indexes out or order.
- bin_slot_sizes_[kBinDexCacheArray] = size;
}
void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset) {
@@ -534,7 +586,10 @@ ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const
}
bool ImageWriter::AllocMemory() {
- const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_,
+ const size_t length = RoundUp(image_objects_offset_begin_ +
+ GetBinSizeSum() +
+ intern_table_bytes_ +
+ class_table_bytes_,
kPageSize);
std::string error_msg;
image_.reset(MemMap::MapAnonymous("image writer image",
@@ -586,6 +641,17 @@ bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
}
bool ImageWriter::ContainsBootClassLoaderNonImageClass(mirror::Class* klass) {
+ bool early_exit = false;
+ std::unordered_set<mirror::Class*> visited;
+ return ContainsBootClassLoaderNonImageClassInternal(klass, &early_exit, &visited);
+}
+
+bool ImageWriter::ContainsBootClassLoaderNonImageClassInternal(
+ mirror::Class* klass,
+ bool* early_exit,
+ std::unordered_set<mirror::Class*>* visited) {
+ DCHECK(early_exit != nullptr);
+ DCHECK(visited != nullptr);
if (klass == nullptr) {
return false;
}
@@ -594,14 +660,22 @@ bool ImageWriter::ContainsBootClassLoaderNonImageClass(mirror::Class* klass) {
// Already computed, return the found value.
return found->second;
}
- // Place holder value to prevent infinite recursion.
- prune_class_memo_.emplace(klass, false);
+ // Circular dependencies, return false but do not store the result in the memoization table.
+ if (visited->find(klass) != visited->end()) {
+ *early_exit = true;
+ return false;
+ }
+ visited->emplace(klass);
bool result = IsBootClassLoaderNonImageClass(klass);
+ bool my_early_exit = false; // Only for ourselves, ignore caller.
if (!result) {
// Check interfaces since these wont be visited through VisitReferences.)
mirror::IfTable* if_table = klass->GetIfTable();
for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
- result = result || ContainsBootClassLoaderNonImageClass(if_table->GetInterface(i));
+ result = result || ContainsBootClassLoaderNonImageClassInternal(
+ if_table->GetInterface(i),
+ &my_early_exit,
+ visited);
}
}
// Check static fields and their classes.
@@ -615,16 +689,38 @@ bool ImageWriter::ContainsBootClassLoaderNonImageClass(mirror::Class* klass) {
mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
if (ref != nullptr) {
if (ref->IsClass()) {
- result = result || ContainsBootClassLoaderNonImageClass(ref->AsClass());
+ result = result ||
+ ContainsBootClassLoaderNonImageClassInternal(
+ ref->AsClass(),
+ &my_early_exit,
+ visited);
}
- result = result || ContainsBootClassLoaderNonImageClass(ref->GetClass());
+ result = result ||
+ ContainsBootClassLoaderNonImageClassInternal(
+ ref->GetClass(),
+ &my_early_exit,
+ visited);
}
field_offset = MemberOffset(field_offset.Uint32Value() +
sizeof(mirror::HeapReference<mirror::Object>));
}
}
- result = result || ContainsBootClassLoaderNonImageClass(klass->GetSuperClass());
- prune_class_memo_[klass] = result;
+ result = result ||
+ ContainsBootClassLoaderNonImageClassInternal(
+ klass->GetSuperClass(),
+ &my_early_exit,
+ visited);
+ // Erase the element we stored earlier since we are exiting the function.
+ auto it = visited->find(klass);
+ DCHECK(it != visited->end());
+ visited->erase(it);
+ // Only store result if it is true or none of the calls early exited due to circular
+ // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
+ // a child call and we can remember the result.
+ if (result == true || !my_early_exit || visited->empty()) {
+ prune_class_memo_[klass] = result;
+ }
+ *early_exit |= my_early_exit;
return result;
}
@@ -934,44 +1030,42 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
}
}
// Visit and assign offsets for methods.
- LengthPrefixedArray<ArtMethod>* method_arrays[] = {
- as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
- };
- for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
- if (array == nullptr) {
- continue;
- }
+ size_t num_methods = as_klass->NumMethods();
+ if (num_methods != 0) {
bool any_dirty = false;
- size_t count = 0;
- const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
- const size_t method_size = ArtMethod::Size(target_ptr_size_);
- auto iteration_range =
- MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
- for (auto& m : iteration_range) {
- any_dirty = any_dirty || WillMethodBeDirty(&m);
- ++count;
+ for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
+ if (WillMethodBeDirty(&m)) {
+ any_dirty = true;
+ break;
+ }
}
NativeObjectRelocationType type = any_dirty
? kNativeObjectRelocationTypeArtMethodDirty
: kNativeObjectRelocationTypeArtMethodClean;
Bin bin_type = BinTypeForNativeRelocationType(type);
// Forward the entire array at once, but header first.
+ const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
+ const size_t method_size = ArtMethod::Size(target_ptr_size_);
const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
method_size,
method_alignment);
+ LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
auto it = native_object_relocations_.find(array);
- CHECK(it == native_object_relocations_.end()) << "Method array " << array
- << " already forwarded";
+ CHECK(it == native_object_relocations_.end())
+ << "Method array " << array << " already forwarded";
size_t& offset = bin_slot_sizes_[bin_type];
DCHECK(!IsInBootImage(array));
- native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
- any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
- kNativeObjectRelocationTypeArtMethodArrayClean });
+ native_object_relocations_.emplace(
+ array, NativeObjectRelocation {
+ offset,
+ any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty
+ : kNativeObjectRelocationTypeArtMethodArrayClean
+ });
offset += header_size;
- for (auto& m : iteration_range) {
+ for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
AssignMethodOffset(&m, type);
}
- (any_dirty ? dirty_methods_ : clean_methods_) += count;
+ (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
}
} else if (h_obj->IsObjectArray()) {
// Walk elements of an object array.
@@ -983,6 +1077,14 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
WalkFieldsInOrder(value);
}
}
+ } else if (h_obj->IsClassLoader()) {
+ // Register the class loader if it has a class table.
+ // The fake boot class loader should not get registered and we should end up with only one
+ // class loader.
+ mirror::ClassLoader* class_loader = h_obj->AsClassLoader();
+ if (class_loader->GetClassTable() != nullptr) {
+ class_loaders_.insert(class_loader);
+ }
}
}
}
@@ -1107,10 +1209,29 @@ void ImageWriter::CalculateNewObjectOffsets() {
}
// Calculate how big the intern table will be after being serialized.
- auto* const intern_table = Runtime::Current()->GetInternTable();
+ InternTable* const intern_table = runtime->GetInternTable();
CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
+ // Write out the class table.
+ ClassLinker* class_linker = runtime->GetClassLinker();
+ if (boot_image_space_ == nullptr) {
+ // Compiling the boot image, add null class loader.
+ class_loaders_.insert(nullptr);
+ }
+ // class_loaders_ usually will not be empty, but may be empty if we attempt to create an image
+ // with no classes.
+ if (class_loaders_.size() == 1u) {
+ // Only write the class table if we have exactly one class loader. There may be cases where
+ // there are multiple class loaders if a class path is passed to dex2oat.
+ ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
+ for (mirror::ClassLoader* loader : class_loaders_) {
+ ClassTable* table = class_linker->ClassTableForClassLoader(loader);
+ CHECK(table != nullptr);
+ class_table_bytes_ += table->WriteToMemory(nullptr);
+ }
+ }
+
// Note that image_end_ is left at end of used mirror object section.
}
@@ -1152,6 +1273,14 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
*interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
cur_pos = interned_strings_section->End();
+ // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
+ cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
+ // Calculate the size of the class table section.
+ auto* class_table_section = &sections[ImageHeader::kSectionClassTable];
+ *class_table_section = ImageSection(cur_pos, class_table_bytes_);
+ cur_pos = class_table_section->End();
+ // Image end goes right before the start of the image bitmap.
+ const size_t image_end = static_cast<uint32_t>(cur_pos);
// Finally bitmap section.
const size_t bitmap_bytes = image_bitmap_->Size();
auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
@@ -1165,10 +1294,10 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
}
LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
}
- const size_t image_end = static_cast<uint32_t>(interned_strings_section->End());
CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
"Oat file should be right after the image.";
- // Create the header.
+ // Create the header, leave 0 for data size since we will fill this in as we are writing the
+ // image.
new (image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_begin_),
image_end,
sections,
@@ -1179,7 +1308,9 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
PointerToLowMemUInt32(oat_data_end),
PointerToLowMemUInt32(oat_file_end),
target_ptr_size_,
- compile_pic_);
+ compile_pic_,
+ image_storage_mode_,
+ /*data_size*/0u);
}
ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
@@ -1276,23 +1407,52 @@ void ImageWriter::CopyAndFixupNativeData() {
}
image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
}
+ FixupRootVisitor root_visitor(this);
+
// Write the intern table into the image.
const ImageSection& intern_table_section = image_header->GetImageSection(
ImageHeader::kSectionInternedStrings);
- InternTable* const intern_table = Runtime::Current()->GetInternTable();
- uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset();
- const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr);
+ Runtime* const runtime = Runtime::Current();
+ InternTable* const intern_table = runtime->GetInternTable();
+ uint8_t* const intern_table_memory_ptr = image_->Begin() + intern_table_section.Offset();
+ const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
+ CHECK_EQ(intern_table_bytes, intern_table_bytes_);
// Fixup the pointers in the newly written intern table to contain image addresses.
- InternTable temp_table;
+ InternTable temp_intern_table;
// Note that we require that ReadFromMemory does not make an internal copy of the elements so that
// the VisitRoots() will update the memory directly rather than the copies.
// This also relies on visit roots not doing any verification which could fail after we update
// the roots to be the image addresses.
- temp_table.ReadFromMemory(memory_ptr);
- CHECK_EQ(temp_table.Size(), intern_table->Size());
- FixupRootVisitor visitor(this);
- temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots);
- CHECK_EQ(intern_table_bytes, intern_table_bytes_);
+ temp_intern_table.ReadFromMemory(intern_table_memory_ptr);
+ CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
+ temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
+
+ // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
+ // class loaders. Writing multiple class tables into the image is currently unsupported.
+ if (class_table_bytes_ > 0u) {
+ ClassLinker* const class_linker = runtime->GetClassLinker();
+ const ImageSection& class_table_section = image_header->GetImageSection(
+ ImageHeader::kSectionClassTable);
+ uint8_t* const class_table_memory_ptr = image_->Begin() + class_table_section.Offset();
+ ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
+ size_t class_table_bytes = 0;
+ for (mirror::ClassLoader* loader : class_loaders_) {
+ ClassTable* table = class_linker->ClassTableForClassLoader(loader);
+ CHECK(table != nullptr);
+ uint8_t* memory_ptr = class_table_memory_ptr + class_table_bytes;
+ class_table_bytes += table->WriteToMemory(memory_ptr);
+ // Fixup the pointers in the newly written class table to contain image addresses. See
+ // above comment for intern tables.
+ ClassTable temp_class_table;
+ temp_class_table.ReadFromMemory(memory_ptr);
+ CHECK_EQ(temp_class_table.NumZygoteClasses(), table->NumNonZygoteClasses() +
+ table->NumZygoteClasses());
+ BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(&root_visitor,
+ RootInfo(kRootUnknown));
+ temp_class_table.VisitRoots(buffered_visitor);
+ }
+ CHECK_EQ(class_table_bytes, class_table_bytes_);
+ }
}
void ImageWriter::CopyAndFixupObjects() {
@@ -1506,8 +1666,7 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
- } else if (klass->IsSubClass(down_cast<mirror::Class*>(
- class_linker->GetClassRoot(ClassLinker::kJavaLangClassLoader)))) {
+ } else if (klass->IsClassLoaderClass()) {
// If src is a ClassLoader, set the class table to null so that it gets recreated by the
// ClassLoader.
down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
@@ -1793,7 +1952,8 @@ uint8_t* ImageWriter::GetOatFileBegin() const {
bin_slot_sizes_[kBinArtMethodDirty] +
bin_slot_sizes_[kBinArtMethodClean] +
bin_slot_sizes_[kBinDexCacheArray] +
- intern_table_bytes_;
+ intern_table_bytes_ +
+ class_table_bytes_;
return image_begin_ + RoundUp(image_end_ + native_sections_size, kPageSize);
}