summaryrefslogtreecommitdiff
path: root/oatdump/oatdump.cc
diff options
context:
space:
mode:
Diffstat (limited to 'oatdump/oatdump.cc')
-rw-r--r--oatdump/oatdump.cc221
1 files changed, 199 insertions, 22 deletions
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 148ee88669..69901c13cd 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -529,6 +529,12 @@ class OatDumper {
}
}
+ {
+ os << "OAT FILE STATS:\n";
+ VariableIndentationOutputStream vios(&os);
+ stats_.Dump(vios);
+ }
+
os << std::flush;
return success;
}
@@ -574,6 +580,116 @@ class OatDumper {
return nullptr;
}
+ struct Stats {
+ enum ByteKind {
+ kByteKindCode,
+ kByteKindQuickMethodHeader,
+ kByteKindCodeInfoLocationCatalog,
+ kByteKindCodeInfoDexRegisterMap,
+ kByteKindCodeInfoInlineInfo,
+ kByteKindCodeInfoEncoding,
+ kByteKindCodeInfoOther,
+ kByteKindStackMapNativePc,
+ kByteKindStackMapDexPc,
+ kByteKindStackMapDexRegisterMap,
+ kByteKindStackMapInlineInfo,
+ kByteKindStackMapRegisterMask,
+ kByteKindStackMapMask,
+ kByteKindStackMapOther,
+ kByteKindCount,
+ kByteKindStackMapFirst = kByteKindCodeInfoOther,
+ kByteKindStackMapLast = kByteKindStackMapOther,
+ };
+ int64_t bits[kByteKindCount] = {};
+ // Since code has deduplication, seen tracks already seen pointers to avoid double counting
+ // deduplicated code and tables.
+ std::unordered_set<const void*> seen;
+
+ // Returns true if it was newly added.
+ bool AddBitsIfUnique(ByteKind kind, int64_t count, const void* address) {
+ if (seen.insert(address).second == true) {
+ // True means the address was not already in the set.
+ AddBits(kind, count);
+ return true;
+ }
+ return false;
+ }
+
+ void AddBits(ByteKind kind, int64_t count) {
+ bits[kind] += count;
+ }
+
+ void Dump(VariableIndentationOutputStream& os) {
+ const int64_t sum = std::accumulate(bits, bits + kByteKindCount, 0u);
+ os.Stream() << "Dumping cumulative use of " << sum / kBitsPerByte << " accounted bytes\n";
+ if (sum > 0) {
+ const int64_t stack_map_bits = std::accumulate(bits + kByteKindStackMapFirst,
+ bits + kByteKindStackMapLast + 1,
+ 0u);
+ Dump(os, "Code ", bits[kByteKindCode], sum);
+ Dump(os, "QuickMethodHeader ", bits[kByteKindQuickMethodHeader], sum);
+ Dump(os, "CodeInfoEncoding ", bits[kByteKindCodeInfoEncoding], sum);
+ Dump(os, "CodeInfoLocationCatalog ", bits[kByteKindCodeInfoLocationCatalog], sum);
+ Dump(os, "CodeInfoDexRegisterMap ", bits[kByteKindCodeInfoDexRegisterMap], sum);
+ Dump(os, "CodeInfoInlineInfo ", bits[kByteKindCodeInfoInlineInfo], sum);
+ Dump(os, "CodeInfoStackMap ", stack_map_bits, sum);
+ {
+ ScopedIndentation indent1(&os);
+ Dump(os,
+ "StackMapNativePc ",
+ bits[kByteKindStackMapNativePc],
+ stack_map_bits,
+ "stack map");
+ Dump(os,
+ "StackMapDexPcEncoding ",
+ bits[kByteKindStackMapDexPc],
+ stack_map_bits,
+ "stack map");
+ Dump(os,
+ "StackMapDexRegisterMap ",
+ bits[kByteKindStackMapDexRegisterMap],
+ stack_map_bits,
+ "stack map");
+ Dump(os,
+ "StackMapInlineInfo ",
+ bits[kByteKindStackMapInlineInfo],
+ stack_map_bits,
+ "stack map");
+ Dump(os,
+ "StackMapRegisterMaskEncoding ",
+ bits[kByteKindStackMapRegisterMask],
+ stack_map_bits,
+ "stack map");
+ Dump(os,
+ "StackMapMask ",
+ bits[kByteKindStackMapMask],
+ stack_map_bits,
+ "stack map");
+ Dump(os,
+ "StackMapOther ",
+ bits[kByteKindStackMapOther],
+ stack_map_bits,
+ "stack map");
+ }
+ }
+ os.Stream() << "\n" << std::flush;
+ }
+
+ private:
+ void Dump(VariableIndentationOutputStream& os,
+ const char* name,
+ int64_t size,
+ int64_t total,
+ const char* sum_of = "total") {
+ const double percent = (static_cast<double>(size) / static_cast<double>(total)) * 100;
+ os.Stream() << StringPrintf("%s = %8" PRId64 " (%2.0f%% of %s)\n",
+ name,
+ size / kBitsPerByte,
+ percent,
+ sum_of);
+ }
+ };
+
private:
void AddAllOffsets() {
// We don't know the length of the code for each method, but we need to know where to stop
@@ -1046,7 +1162,9 @@ class OatDumper {
vios->Stream() << "OatQuickMethodHeader ";
uint32_t method_header_offset = oat_method.GetOatQuickMethodHeaderOffset();
const OatQuickMethodHeader* method_header = oat_method.GetOatQuickMethodHeader();
-
+ stats_.AddBitsIfUnique(Stats::kByteKindQuickMethodHeader,
+ sizeof(*method_header) * kBitsPerByte,
+ method_header);
if (options_.absolute_addresses_) {
vios->Stream() << StringPrintf("%p ", method_header);
}
@@ -1118,6 +1236,7 @@ class OatDumper {
const void* code = oat_method.GetQuickCode();
uint32_t aligned_code_begin = AlignCodeOffset(code_offset);
uint64_t aligned_code_end = aligned_code_begin + code_size;
+ stats_.AddBitsIfUnique(Stats::kByteKindCode, code_size * kBitsPerByte, code);
if (options_.absolute_addresses_) {
vios->Stream() << StringPrintf("%p ", code);
@@ -1223,7 +1342,8 @@ class OatDumper {
code_info.Dump(vios,
oat_method.GetCodeOffset(),
code_item.registers_size_,
- options_.dump_code_info_stack_maps_);
+ options_.dump_code_info_stack_maps_,
+ instruction_set_);
}
void DumpVregLocations(std::ostream& os, const OatFile::OatMethod& oat_method,
@@ -1329,21 +1449,22 @@ class OatDumper {
// For identical native PCs, the order from the CodeInfo is preserved.
class StackMapsHelper {
public:
- explicit StackMapsHelper(const uint8_t* raw_code_info)
+ explicit StackMapsHelper(const uint8_t* raw_code_info, InstructionSet instruction_set)
: code_info_(raw_code_info),
encoding_(code_info_.ExtractEncoding()),
number_of_stack_maps_(code_info_.GetNumberOfStackMaps(encoding_)),
indexes_(),
- offset_(static_cast<size_t>(-1)),
- stack_map_index_(0u) {
+ offset_(static_cast<uint32_t>(-1)),
+ stack_map_index_(0u),
+ instruction_set_(instruction_set) {
if (number_of_stack_maps_ != 0u) {
// Check if native PCs are ordered.
bool ordered = true;
StackMap last = code_info_.GetStackMapAt(0u, encoding_);
for (size_t i = 1; i != number_of_stack_maps_; ++i) {
StackMap current = code_info_.GetStackMapAt(i, encoding_);
- if (last.GetNativePcOffset(encoding_.stack_map_encoding) >
- current.GetNativePcOffset(encoding_.stack_map_encoding)) {
+ if (last.GetNativePcOffset(encoding_.stack_map_encoding, instruction_set) >
+ current.GetNativePcOffset(encoding_.stack_map_encoding, instruction_set)) {
ordered = false;
break;
}
@@ -1359,14 +1480,17 @@ class OatDumper {
indexes_.end(),
[this](size_t lhs, size_t rhs) {
StackMap left = code_info_.GetStackMapAt(lhs, encoding_);
- uint32_t left_pc = left.GetNativePcOffset(encoding_.stack_map_encoding);
+ uint32_t left_pc = left.GetNativePcOffset(encoding_.stack_map_encoding,
+ instruction_set_);
StackMap right = code_info_.GetStackMapAt(rhs, encoding_);
- uint32_t right_pc = right.GetNativePcOffset(encoding_.stack_map_encoding);
+ uint32_t right_pc = right.GetNativePcOffset(encoding_.stack_map_encoding,
+ instruction_set_);
// If the PCs are the same, compare indexes to preserve the original order.
return (left_pc < right_pc) || (left_pc == right_pc && lhs < rhs);
});
}
- offset_ = GetStackMapAt(0).GetNativePcOffset(encoding_.stack_map_encoding);
+ offset_ = GetStackMapAt(0).GetNativePcOffset(encoding_.stack_map_encoding,
+ instruction_set_);
}
}
@@ -1378,7 +1502,7 @@ class OatDumper {
return encoding_;
}
- size_t GetOffset() const {
+ uint32_t GetOffset() const {
return offset_;
}
@@ -1389,8 +1513,9 @@ class OatDumper {
void Next() {
++stack_map_index_;
offset_ = (stack_map_index_ == number_of_stack_maps_)
- ? static_cast<size_t>(-1)
- : GetStackMapAt(stack_map_index_).GetNativePcOffset(encoding_.stack_map_encoding);
+ ? static_cast<uint32_t>(-1)
+ : GetStackMapAt(stack_map_index_).GetNativePcOffset(encoding_.stack_map_encoding,
+ instruction_set_);
}
private:
@@ -1406,8 +1531,9 @@ class OatDumper {
const CodeInfoEncoding encoding_;
const size_t number_of_stack_maps_;
dchecked_vector<size_t> indexes_; // Used if stack map native PCs are not ordered.
- size_t offset_;
+ uint32_t offset_;
size_t stack_map_index_;
+ const InstructionSet instruction_set_;
};
void DumpCode(VariableIndentationOutputStream* vios,
@@ -1423,7 +1549,61 @@ class OatDumper {
return;
} else if (!bad_input && IsMethodGeneratedByOptimizingCompiler(oat_method, code_item)) {
// The optimizing compiler outputs its CodeInfo data in the vmap table.
- StackMapsHelper helper(oat_method.GetVmapTable());
+ StackMapsHelper helper(oat_method.GetVmapTable(), instruction_set_);
+ {
+ CodeInfoEncoding encoding(helper.GetEncoding());
+ StackMapEncoding stack_map_encoding(encoding.stack_map_encoding);
+ // helper.GetCodeInfo().GetStackMapAt(0, encoding).;
+ const size_t num_stack_maps = encoding.number_of_stack_maps;
+ std::vector<uint8_t> size_vector;
+ encoding.Compress(&size_vector);
+ if (stats_.AddBitsIfUnique(Stats::kByteKindCodeInfoEncoding,
+ size_vector.size() * kBitsPerByte,
+ oat_method.GetVmapTable())) {
+ stats_.AddBits(
+ Stats::kByteKindStackMapNativePc,
+ stack_map_encoding.GetNativePcEncoding().BitSize() * num_stack_maps);
+ stats_.AddBits(
+ Stats::kByteKindStackMapDexPc,
+ stack_map_encoding.GetDexPcEncoding().BitSize() * num_stack_maps);
+ stats_.AddBits(
+ Stats::kByteKindStackMapDexRegisterMap,
+ stack_map_encoding.GetDexRegisterMapEncoding().BitSize() * num_stack_maps);
+ stats_.AddBits(
+ Stats::kByteKindStackMapInlineInfo,
+ stack_map_encoding.GetInlineInfoEncoding().BitSize() * num_stack_maps);
+ stats_.AddBits(
+ Stats::kByteKindStackMapRegisterMask,
+ stack_map_encoding.GetRegisterMaskEncoding().BitSize() * num_stack_maps);
+ const size_t stack_mask_bits = encoding.stack_map_size_in_bytes * kBitsPerByte -
+ stack_map_encoding.GetStackMaskBitOffset();
+ stats_.AddBits(
+ Stats::kByteKindStackMapMask,
+ stack_mask_bits * num_stack_maps);
+ const size_t stack_map_bits =
+ stack_map_encoding.GetStackMaskBitOffset() + stack_mask_bits;
+ stats_.AddBits(
+ Stats::kByteKindStackMapOther,
+ (encoding.stack_map_size_in_bytes * kBitsPerByte - stack_map_bits) * num_stack_maps);
+ const size_t stack_map_bytes = helper.GetCodeInfo().GetStackMapsSize(encoding);
+ const size_t location_catalog_bytes =
+ helper.GetCodeInfo().GetDexRegisterLocationCatalogSize(encoding);
+ stats_.AddBits(Stats::kByteKindCodeInfoLocationCatalog,
+ kBitsPerByte * location_catalog_bytes);
+ const size_t dex_register_bytes =
+ helper.GetCodeInfo().GetDexRegisterMapsSize(encoding, code_item->registers_size_);
+ stats_.AddBits(
+ Stats::kByteKindCodeInfoDexRegisterMap,
+ kBitsPerByte * dex_register_bytes);
+ const size_t inline_info_bytes =
+ encoding.non_header_size -
+ stack_map_bytes -
+ location_catalog_bytes -
+ dex_register_bytes;
+ stats_.AddBits(Stats::kByteKindCodeInfoInlineInfo,
+ inline_info_bytes * kBitsPerByte);
+ }
+ }
const uint8_t* quick_native_pc = reinterpret_cast<const uint8_t*>(quick_code);
size_t offset = 0;
while (offset < code_size) {
@@ -1436,7 +1616,8 @@ class OatDumper {
helper.GetCodeInfo(),
helper.GetEncoding(),
oat_method.GetCodeOffset(),
- code_item->registers_size_);
+ code_item->registers_size_,
+ instruction_set_);
do {
helper.Next();
// There may be multiple stack maps at a given PC. We display only the first one.
@@ -1460,6 +1641,7 @@ class OatDumper {
const InstructionSet instruction_set_;
std::set<uintptr_t> offsets_;
Disassembler* disassembler_;
+ Stats stats_;
};
class ImageDumper {
@@ -1776,7 +1958,7 @@ class ImageDumper {
os << StringPrintf("%d (0x%x)\n", field->GetShort(obj), field->GetShort(obj));
break;
case Primitive::kPrimBoolean:
- os << StringPrintf("%s (0x%x)\n", field->GetBoolean(obj)? "true" : "false",
+ os << StringPrintf("%s (0x%x)\n", field->GetBoolean(obj) ? "true" : "false",
field->GetBoolean(obj));
break;
case Primitive::kPrimByte:
@@ -2132,7 +2314,6 @@ class ImageDumper {
size_t managed_code_bytes;
size_t managed_code_bytes_ignoring_deduplication;
- size_t managed_to_native_code_bytes;
size_t native_to_managed_code_bytes;
size_t class_initializer_code_bytes;
size_t large_initializer_code_bytes;
@@ -2161,7 +2342,6 @@ class ImageDumper {
alignment_bytes(0),
managed_code_bytes(0),
managed_code_bytes_ignoring_deduplication(0),
- managed_to_native_code_bytes(0),
native_to_managed_code_bytes(0),
class_initializer_code_bytes(0),
large_initializer_code_bytes(0),
@@ -2359,7 +2539,6 @@ class ImageDumper {
os << StringPrintf("oat_file_bytes = %8zd\n"
"managed_code_bytes = %8zd (%2.0f%% of oat file bytes)\n"
- "managed_to_native_code_bytes = %8zd (%2.0f%% of oat file bytes)\n"
"native_to_managed_code_bytes = %8zd (%2.0f%% of oat file bytes)\n\n"
"class_initializer_code_bytes = %8zd (%2.0f%% of oat file bytes)\n"
"large_initializer_code_bytes = %8zd (%2.0f%% of oat file bytes)\n"
@@ -2367,8 +2546,6 @@ class ImageDumper {
oat_file_bytes,
managed_code_bytes,
PercentOfOatBytes(managed_code_bytes),
- managed_to_native_code_bytes,
- PercentOfOatBytes(managed_to_native_code_bytes),
native_to_managed_code_bytes,
PercentOfOatBytes(native_to_managed_code_bytes),
class_initializer_code_bytes,