summaryrefslogtreecommitdiff
path: root/runtime/image.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/image.cc')
-rw-r--r--runtime/image.cc262
1 files changed, 244 insertions, 18 deletions
diff --git a/runtime/image.cc b/runtime/image.cc
index b39ffe5690..bb1701f8fe 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -17,7 +17,12 @@
#include "image.h"
#include <lz4.h>
+#include <lz4hc.h>
#include <sstream>
+#include <sys/stat.h>
+#include <zlib.h>
+
+#include "android-base/stringprintf.h"
#include "base/bit_utils.h"
#include "base/length_prefixed_array.h"
@@ -29,8 +34,8 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-// Last change: Math.fma(double, double, double) intrinsic.
-const uint8_t ImageHeader::kImageVersion[] = { '1', '0', '6', '\0' };
+// Last change: Add DexCacheSection.
+const uint8_t ImageHeader::kImageVersion[] = { '1', '0', '8', '\0' };
ImageHeader::ImageHeader(uint32_t image_reservation_size,
uint32_t component_count,
@@ -65,12 +70,14 @@ ImageHeader::ImageHeader(uint32_t image_reservation_size,
image_roots_(image_roots),
pointer_size_(pointer_size) {
CHECK_EQ(image_begin, RoundUp(image_begin, kPageSize));
- CHECK_EQ(oat_file_begin, RoundUp(oat_file_begin, kPageSize));
- CHECK_EQ(oat_data_begin, RoundUp(oat_data_begin, kPageSize));
- CHECK_LT(image_roots, oat_file_begin);
- CHECK_LE(oat_file_begin, oat_data_begin);
- CHECK_LT(oat_data_begin, oat_data_end);
- CHECK_LE(oat_data_end, oat_file_end);
+ if (oat_checksum != 0u) {
+ CHECK_EQ(oat_file_begin, RoundUp(oat_file_begin, kPageSize));
+ CHECK_EQ(oat_data_begin, RoundUp(oat_data_begin, kPageSize));
+ CHECK_LT(image_roots, oat_file_begin);
+ CHECK_LE(oat_file_begin, oat_data_begin);
+ CHECK_LT(oat_data_begin, oat_data_end);
+ CHECK_LE(oat_data_end, oat_file_end);
+ }
CHECK(ValidPointerSize(pointer_size_)) << pointer_size_;
memcpy(magic_, kImageMagic, sizeof(kImageMagic));
memcpy(version_, kImageVersion, sizeof(kImageVersion));
@@ -127,14 +134,16 @@ bool ImageHeader::IsValid() const {
if (image_begin_ >= image_begin_ + image_size_) {
return false;
}
- if (oat_file_begin_ > oat_file_end_) {
- return false;
- }
- if (oat_data_begin_ > oat_data_end_) {
- return false;
- }
- if (oat_file_begin_ >= oat_data_begin_) {
- return false;
+ if (oat_checksum_ != 0u) {
+ if (oat_file_begin_ > oat_file_end_) {
+ return false;
+ }
+ if (oat_data_begin_ > oat_data_end_) {
+ return false;
+ }
+ if (oat_file_begin_ >= oat_data_begin_) {
+ return false;
+ }
}
return true;
}
@@ -170,6 +179,23 @@ PointerSize ImageHeader::GetPointerSize() const {
return ConvertToPointerSize(pointer_size_);
}
+bool LZ4_decompress_safe_checked(const char* source,
+ char* dest,
+ int compressed_size,
+ int max_decompressed_size,
+ /*out*/ size_t* decompressed_size_checked,
+ /*out*/ std::string* error_msg) {
+ int decompressed_size = LZ4_decompress_safe(source, dest, compressed_size, max_decompressed_size);
+ if (UNLIKELY(decompressed_size < 0)) {
+ *error_msg = android::base::StringPrintf("LZ4_decompress_safe() returned negative size: %d",
+ decompressed_size);
+ return false;
+ } else {
+ *decompressed_size_checked = static_cast<size_t>(decompressed_size);
+ return true;
+ }
+}
+
bool ImageHeader::Block::Decompress(uint8_t* out_ptr,
const uint8_t* in_ptr,
std::string* error_msg) const {
@@ -182,11 +208,17 @@ bool ImageHeader::Block::Decompress(uint8_t* out_ptr,
case kStorageModeLZ4:
case kStorageModeLZ4HC: {
// LZ4HC and LZ4 have same internal format, both use LZ4_decompress.
- const size_t decompressed_size = LZ4_decompress_safe(
+ size_t decompressed_size;
+ bool ok = LZ4_decompress_safe_checked(
reinterpret_cast<const char*>(in_ptr) + data_offset_,
reinterpret_cast<char*>(out_ptr) + image_offset_,
data_size_,
- image_size_);
+ image_size_,
+ &decompressed_size,
+ error_msg);
+ if (!ok) {
+ return false;
+ }
CHECK_EQ(decompressed_size, image_size_);
break;
}
@@ -211,10 +243,204 @@ const char* ImageHeader::GetImageSectionName(ImageSections index) {
case kSectionInternedStrings: return "InternedStrings";
case kSectionClassTable: return "ClassTable";
case kSectionStringReferenceOffsets: return "StringReferenceOffsets";
+ case kSectionDexCacheArrays: return "DexCacheArrays";
case kSectionMetadata: return "Metadata";
case kSectionImageBitmap: return "ImageBitmap";
case kSectionCount: return nullptr;
}
}
+// If `image_storage_mode` is compressed, compress data from `source`
+// into `storage`, and return an array pointing to the compressed.
+// If the mode is uncompressed, just return an array pointing to `source`.
+static ArrayRef<const uint8_t> MaybeCompressData(ArrayRef<const uint8_t> source,
+ ImageHeader::StorageMode image_storage_mode,
+ /*out*/ dchecked_vector<uint8_t>* storage) {
+ const uint64_t compress_start_time = NanoTime();
+
+ switch (image_storage_mode) {
+ case ImageHeader::kStorageModeLZ4: {
+ storage->resize(LZ4_compressBound(source.size()));
+ size_t data_size = LZ4_compress_default(
+ reinterpret_cast<char*>(const_cast<uint8_t*>(source.data())),
+ reinterpret_cast<char*>(storage->data()),
+ source.size(),
+ storage->size());
+ storage->resize(data_size);
+ break;
+ }
+ case ImageHeader::kStorageModeLZ4HC: {
+ // Bound is same as non HC.
+ storage->resize(LZ4_compressBound(source.size()));
+ size_t data_size = LZ4_compress_HC(
+ reinterpret_cast<const char*>(const_cast<uint8_t*>(source.data())),
+ reinterpret_cast<char*>(storage->data()),
+ source.size(),
+ storage->size(),
+ LZ4HC_CLEVEL_MAX);
+ storage->resize(data_size);
+ break;
+ }
+ case ImageHeader::kStorageModeUncompressed: {
+ return source;
+ }
+ default: {
+ LOG(FATAL) << "Unsupported";
+ UNREACHABLE();
+ }
+ }
+
+ DCHECK(image_storage_mode == ImageHeader::kStorageModeLZ4 ||
+ image_storage_mode == ImageHeader::kStorageModeLZ4HC);
+ VLOG(image) << "Compressed from " << source.size() << " to " << storage->size() << " in "
+ << PrettyDuration(NanoTime() - compress_start_time);
+ if (kIsDebugBuild) {
+ dchecked_vector<uint8_t> decompressed(source.size());
+ size_t decompressed_size;
+ std::string error_msg;
+ bool ok = LZ4_decompress_safe_checked(
+ reinterpret_cast<char*>(storage->data()),
+ reinterpret_cast<char*>(decompressed.data()),
+ storage->size(),
+ decompressed.size(),
+ &decompressed_size,
+ &error_msg);
+ if (!ok) {
+ LOG(FATAL) << error_msg;
+ UNREACHABLE();
+ }
+ CHECK_EQ(decompressed_size, decompressed.size());
+ CHECK_EQ(memcmp(source.data(), decompressed.data(), source.size()), 0) << image_storage_mode;
+ }
+ return ArrayRef<const uint8_t>(*storage);
+}
+
+bool ImageHeader::WriteData(const ImageFileGuard& image_file,
+ const uint8_t* data,
+ const uint8_t* bitmap_data,
+ ImageHeader::StorageMode image_storage_mode,
+ uint32_t max_image_block_size,
+ bool update_checksum,
+ std::string* error_msg) {
+ const bool is_compressed = image_storage_mode != ImageHeader::kStorageModeUncompressed;
+ dchecked_vector<std::pair<uint32_t, uint32_t>> block_sources;
+ dchecked_vector<ImageHeader::Block> blocks;
+
+ // Add a set of solid blocks such that no block is larger than the maximum size. A solid block
+ // is a block that must be decompressed all at once.
+ auto add_blocks = [&](uint32_t offset, uint32_t size) {
+ while (size != 0u) {
+ const uint32_t cur_size = std::min(size, max_image_block_size);
+ block_sources.emplace_back(offset, cur_size);
+ offset += cur_size;
+ size -= cur_size;
+ }
+ };
+
+ add_blocks(sizeof(ImageHeader), this->GetImageSize() - sizeof(ImageHeader));
+
+ // Checksum of compressed image data and header.
+ uint32_t image_checksum = 0u;
+ if (update_checksum) {
+ image_checksum = adler32(0L, Z_NULL, 0);
+ image_checksum = adler32(image_checksum,
+ reinterpret_cast<const uint8_t*>(this),
+ sizeof(ImageHeader));
+ }
+
+ // Copy and compress blocks.
+ uint32_t out_offset = sizeof(ImageHeader);
+ for (const std::pair<uint32_t, uint32_t> block : block_sources) {
+ ArrayRef<const uint8_t> raw_image_data(data + block.first, block.second);
+ dchecked_vector<uint8_t> compressed_data;
+ ArrayRef<const uint8_t> image_data =
+ MaybeCompressData(raw_image_data, image_storage_mode, &compressed_data);
+
+ if (!is_compressed) {
+ // For uncompressed, preserve alignment since the image will be directly mapped.
+ out_offset = block.first;
+ }
+
+ // Fill in the compressed location of the block.
+ blocks.emplace_back(ImageHeader::Block(
+ image_storage_mode,
+ /*data_offset=*/ out_offset,
+ /*data_size=*/ image_data.size(),
+ /*image_offset=*/ block.first,
+ /*image_size=*/ block.second));
+
+ if (!image_file->PwriteFully(image_data.data(), image_data.size(), out_offset)) {
+ *error_msg = "Failed to write image file data " +
+ image_file->GetPath() + ": " + std::string(strerror(errno));
+ return false;
+ }
+ out_offset += image_data.size();
+ if (update_checksum) {
+ image_checksum = adler32(image_checksum, image_data.data(), image_data.size());
+ }
+ }
+
+ if (is_compressed) {
+ // Align up since the compressed data is not necessarily aligned.
+ out_offset = RoundUp(out_offset, alignof(ImageHeader::Block));
+ CHECK(!blocks.empty());
+ const size_t blocks_bytes = blocks.size() * sizeof(blocks[0]);
+ if (!image_file->PwriteFully(&blocks[0], blocks_bytes, out_offset)) {
+ *error_msg = "Failed to write image blocks " +
+ image_file->GetPath() + ": " + std::string(strerror(errno));
+ return false;
+ }
+ this->blocks_offset_ = out_offset;
+ this->blocks_count_ = blocks.size();
+ out_offset += blocks_bytes;
+ }
+
+ // Data size includes everything except the bitmap.
+ this->data_size_ = out_offset - sizeof(ImageHeader);
+
+ // Update and write the bitmap section. Note that the bitmap section is relative to the
+ // possibly compressed image.
+ ImageSection& bitmap_section = GetImageSection(ImageHeader::kSectionImageBitmap);
+ // Align up since data size may be unaligned if the image is compressed.
+ out_offset = RoundUp(out_offset, kPageSize);
+ bitmap_section = ImageSection(out_offset, bitmap_section.Size());
+
+ if (!image_file->PwriteFully(bitmap_data,
+ bitmap_section.Size(),
+ bitmap_section.Offset())) {
+ *error_msg = "Failed to write image file bitmap " +
+ image_file->GetPath() + ": " + std::string(strerror(errno));
+ return false;
+ }
+
+ int err = image_file->Flush();
+ if (err < 0) {
+ *error_msg = "Failed to flush image file " + image_file->GetPath() + ": " + std::to_string(err);
+ return false;
+ }
+
+ if (update_checksum) {
+ // Calculate the image checksum of the remaining data.
+ image_checksum = adler32(GetImageChecksum(),
+ reinterpret_cast<const uint8_t*>(bitmap_data),
+ bitmap_section.Size());
+ this->SetImageChecksum(image_checksum);
+ }
+
+ if (VLOG_IS_ON(image)) {
+ const size_t separately_written_section_size = bitmap_section.Size();
+ const size_t total_uncompressed_size = image_size_ + separately_written_section_size;
+ const size_t total_compressed_size = out_offset + separately_written_section_size;
+
+ VLOG(compiler) << "UncompressedImageSize = " << total_uncompressed_size;
+ if (total_uncompressed_size != total_compressed_size) {
+ VLOG(compiler) << "CompressedImageSize = " << total_compressed_size;
+ }
+ }
+
+ DCHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()))
+ << "Bitmap should be at the end of the file";
+ return true;
+}
+
} // namespace art