Merge "Un-disable redefinition tests on JIT."
diff --git a/compiler/debug/elf_debug_line_writer.h b/compiler/debug/elf_debug_line_writer.h
index 18a9165..cdd1e53 100644
--- a/compiler/debug/elf_debug_line_writer.h
+++ b/compiler/debug/elf_debug_line_writer.h
@@ -104,10 +104,10 @@
for (uint32_t s = 0; s < code_info.GetNumberOfStackMaps(encoding); s++) {
StackMap stack_map = code_info.GetStackMapAt(s, encoding);
DCHECK(stack_map.IsValid());
- const uint32_t pc = stack_map.GetNativePcOffset(encoding.stack_map_encoding, isa);
- const int32_t dex = stack_map.GetDexPc(encoding.stack_map_encoding);
+ const uint32_t pc = stack_map.GetNativePcOffset(encoding.stack_map.encoding, isa);
+ const int32_t dex = stack_map.GetDexPc(encoding.stack_map.encoding);
pc2dex_map.push_back({pc, dex});
- if (stack_map.HasDexRegisterMap(encoding.stack_map_encoding)) {
+ if (stack_map.HasDexRegisterMap(encoding.stack_map.encoding)) {
// Guess that the first map with local variables is the end of prologue.
prologue_end = std::min(prologue_end, pc);
}
diff --git a/compiler/debug/elf_debug_loc_writer.h b/compiler/debug/elf_debug_loc_writer.h
index bce5387..cbfdbdd 100644
--- a/compiler/debug/elf_debug_loc_writer.h
+++ b/compiler/debug/elf_debug_loc_writer.h
@@ -104,7 +104,7 @@
for (uint32_t s = 0; s < code_info.GetNumberOfStackMaps(encoding); s++) {
StackMap stack_map = code_info.GetStackMapAt(s, encoding);
DCHECK(stack_map.IsValid());
- if (!stack_map.HasDexRegisterMap(encoding.stack_map_encoding)) {
+ if (!stack_map.HasDexRegisterMap(encoding.stack_map.encoding)) {
// The compiler creates stackmaps without register maps at the start of
// basic blocks in order to keep instruction-accurate line number mapping.
// However, we never stop at those (breakpoint locations always have map).
@@ -112,7 +112,7 @@
// The main reason for this is to save space by avoiding undefined gaps.
continue;
}
- const uint32_t pc_offset = stack_map.GetNativePcOffset(encoding.stack_map_encoding, isa);
+ const uint32_t pc_offset = stack_map.GetNativePcOffset(encoding.stack_map.encoding, isa);
DCHECK_LE(pc_offset, method_info->code_size);
DCHECK_LE(compilation_unit_code_address, method_info->code_address);
const uint32_t low_pc = dchecked_integral_cast<uint32_t>(
@@ -136,7 +136,7 @@
}
// Check that the stack map is in the requested range.
- uint32_t dex_pc = stack_map.GetDexPc(encoding.stack_map_encoding);
+ uint32_t dex_pc = stack_map.GetDexPc(encoding.stack_map.encoding);
if (!(dex_pc_low <= dex_pc && dex_pc < dex_pc_high)) {
// The variable is not in scope at this PC. Therefore omit the entry.
// Note that this is different to None() entry which means in scope, but unknown location.
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 10f5cab..f8e01b7 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -153,56 +153,35 @@
}
size_t StackMapStream::PrepareForFillIn() {
- const size_t stack_mask_size_in_bits = stack_mask_max_ + 1; // Need room for max element too.
- const size_t number_of_stack_masks = PrepareStackMasks(stack_mask_size_in_bits);
- const size_t register_mask_size_in_bits = MinimumBitsToStore(register_mask_max_);
- const size_t number_of_register_masks = PrepareRegisterMasks();
- dex_register_maps_size_ = ComputeDexRegisterMapsSize();
- ComputeInlineInfoEncoding(); // needs dex_register_maps_size_.
- inline_info_size_ = inline_infos_.size() * inline_info_encoding_.GetEntrySize();
+ CodeInfoEncoding encoding;
+ encoding.dex_register_map.num_entries = 0; // TODO: Remove this field.
+ encoding.dex_register_map.num_bytes = ComputeDexRegisterMapsSize();
+ encoding.location_catalog.num_entries = location_catalog_entries_.size();
+ encoding.location_catalog.num_bytes = ComputeDexRegisterLocationCatalogSize();
+ encoding.inline_info.num_entries = inline_infos_.size();
+ ComputeInlineInfoEncoding(&encoding.inline_info.encoding,
+ encoding.dex_register_map.num_bytes);
CodeOffset max_native_pc_offset = ComputeMaxNativePcCodeOffset();
- // The stack map contains compressed native PC offsets.
- const size_t stack_map_size = stack_map_encoding_.SetFromSizes(
+ // Prepare the CodeInfo variable-sized encoding.
+ encoding.stack_mask.encoding.num_bits = stack_mask_max_ + 1; // Need room for max element too.
+ encoding.stack_mask.num_entries = PrepareStackMasks(encoding.stack_mask.encoding.num_bits);
+ encoding.register_mask.encoding.num_bits = MinimumBitsToStore(register_mask_max_);
+ encoding.register_mask.num_entries = PrepareRegisterMasks();
+ encoding.stack_map.num_entries = stack_maps_.size();
+ encoding.stack_map.encoding.SetFromSizes(
+ // The stack map contains compressed native PC offsets.
max_native_pc_offset.CompressedValue(),
dex_pc_max_,
- dex_register_maps_size_,
- inline_info_size_,
- number_of_register_masks,
- number_of_stack_masks);
- stack_maps_size_ = RoundUp(stack_maps_.size() * stack_map_size, kBitsPerByte) / kBitsPerByte;
- dex_register_location_catalog_size_ = ComputeDexRegisterLocationCatalogSize();
- const size_t stack_masks_bits = number_of_stack_masks * stack_mask_size_in_bits;
- const size_t register_masks_bits = number_of_register_masks * register_mask_size_in_bits;
- // Register masks are last, stack masks are right before that last.
- // They are both bit packed / aligned.
- const size_t non_header_size =
- stack_maps_size_ +
- dex_register_location_catalog_size_ +
- dex_register_maps_size_ +
- inline_info_size_ +
- RoundUp(stack_masks_bits + register_masks_bits, kBitsPerByte) / kBitsPerByte;
-
- // Prepare the CodeInfo variable-sized encoding.
- CodeInfoEncoding code_info_encoding;
- code_info_encoding.non_header_size = non_header_size;
- code_info_encoding.number_of_stack_maps = stack_maps_.size();
- code_info_encoding.number_of_stack_masks = number_of_stack_masks;
- code_info_encoding.number_of_register_masks = number_of_register_masks;
- code_info_encoding.stack_mask_size_in_bits = stack_mask_size_in_bits;
- code_info_encoding.register_mask_size_in_bits = register_mask_size_in_bits;
- code_info_encoding.stack_map_encoding = stack_map_encoding_;
- code_info_encoding.inline_info_encoding = inline_info_encoding_;
- code_info_encoding.number_of_location_catalog_entries = location_catalog_entries_.size();
- code_info_encoding.Compress(&code_info_encoding_);
-
- // TODO: Move the catalog at the end. It is currently too expensive at runtime
- // to compute its size (note that we do not encode that size in the CodeInfo).
- dex_register_location_catalog_start_ = code_info_encoding_.size() + stack_maps_size_;
- dex_register_maps_start_ =
- dex_register_location_catalog_start_ + dex_register_location_catalog_size_;
- inline_infos_start_ = dex_register_maps_start_ + dex_register_maps_size_;
-
- needed_size_ = code_info_encoding_.size() + non_header_size;
+ encoding.dex_register_map.num_bytes,
+ encoding.inline_info.num_entries,
+ encoding.register_mask.num_entries,
+ encoding.stack_mask.num_entries);
+ DCHECK_EQ(code_info_encoding_.size(), 0u);
+ encoding.Compress(&code_info_encoding_);
+ encoding.ComputeTableOffsets();
+ // Compute table offsets so we can get the non header size.
+ DCHECK_EQ(encoding.HeaderSize(), code_info_encoding_.size());
+ needed_size_ = code_info_encoding_.size() + encoding.NonHeaderSize();
return needed_size_;
}
@@ -255,7 +234,8 @@
return size;
}
-void StackMapStream::ComputeInlineInfoEncoding() {
+void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
+ size_t dex_register_maps_bytes) {
uint32_t method_index_max = 0;
uint32_t dex_pc_max = DexFile::kDexNoIndex;
uint32_t extra_data_max = 0;
@@ -281,10 +261,7 @@
}
DCHECK_EQ(inline_info_index, inline_infos_.size());
- inline_info_encoding_.SetFromSizes(method_index_max,
- dex_pc_max,
- extra_data_max,
- dex_register_maps_size_);
+ encoding->SetFromSizes(method_index_max, dex_pc_max, extra_data_max, dex_register_maps_bytes);
}
void StackMapStream::FillIn(MemoryRegion region) {
@@ -299,19 +276,18 @@
// Write the CodeInfo header.
region.CopyFrom(0, MemoryRegion(code_info_encoding_.data(), code_info_encoding_.size()));
- MemoryRegion dex_register_locations_region = region.Subregion(
- dex_register_maps_start_, dex_register_maps_size_);
-
- MemoryRegion inline_infos_region = region.Subregion(
- inline_infos_start_, inline_info_size_);
-
CodeInfo code_info(region);
CodeInfoEncoding encoding = code_info.ExtractEncoding();
- DCHECK_EQ(code_info.GetStackMapsSize(encoding), stack_maps_size_);
+ DCHECK_EQ(encoding.stack_map.num_entries, stack_maps_.size());
+
+ MemoryRegion dex_register_locations_region = region.Subregion(
+ encoding.dex_register_map.byte_offset,
+ encoding.dex_register_map.num_bytes);
// Set the Dex register location catalog.
MemoryRegion dex_register_location_catalog_region = region.Subregion(
- dex_register_location_catalog_start_, dex_register_location_catalog_size_);
+ encoding.location_catalog.byte_offset,
+ encoding.location_catalog.num_bytes);
DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
// Offset in `dex_register_location_catalog` where to store the next
// register location.
@@ -325,27 +301,27 @@
ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false, kArenaAllocStackMapStream);
uintptr_t next_dex_register_map_offset = 0;
- uintptr_t next_inline_info_offset = 0;
+ uintptr_t next_inline_info_index = 0;
for (size_t i = 0, e = stack_maps_.size(); i < e; ++i) {
StackMap stack_map = code_info.GetStackMapAt(i, encoding);
StackMapEntry entry = stack_maps_[i];
- stack_map.SetDexPc(stack_map_encoding_, entry.dex_pc);
- stack_map.SetNativePcCodeOffset(stack_map_encoding_, entry.native_pc_code_offset);
- stack_map.SetRegisterMaskIndex(stack_map_encoding_, entry.register_mask_index);
- stack_map.SetStackMaskIndex(stack_map_encoding_, entry.stack_mask_index);
+ stack_map.SetDexPc(encoding.stack_map.encoding, entry.dex_pc);
+ stack_map.SetNativePcCodeOffset(encoding.stack_map.encoding, entry.native_pc_code_offset);
+ stack_map.SetRegisterMaskIndex(encoding.stack_map.encoding, entry.register_mask_index);
+ stack_map.SetStackMaskIndex(encoding.stack_map.encoding, entry.stack_mask_index);
if (entry.num_dex_registers == 0 || (entry.live_dex_registers_mask->NumSetBits() == 0)) {
// No dex map available.
- stack_map.SetDexRegisterMapOffset(stack_map_encoding_, StackMap::kNoDexRegisterMap);
+ stack_map.SetDexRegisterMapOffset(encoding.stack_map.encoding, StackMap::kNoDexRegisterMap);
} else {
// Search for an entry with the same dex map.
if (entry.same_dex_register_map_as_ != kNoSameDexMapFound) {
// If we have a hit reuse the offset.
stack_map.SetDexRegisterMapOffset(
- stack_map_encoding_,
+ encoding.stack_map.encoding,
code_info.GetStackMapAt(entry.same_dex_register_map_as_, encoding)
- .GetDexRegisterMapOffset(stack_map_encoding_));
+ .GetDexRegisterMapOffset(encoding.stack_map.encoding));
} else {
// New dex registers maps should be added to the stack map.
MemoryRegion register_region = dex_register_locations_region.Subregion(
@@ -354,7 +330,8 @@
next_dex_register_map_offset += register_region.size();
DexRegisterMap dex_register_map(register_region);
stack_map.SetDexRegisterMapOffset(
- stack_map_encoding_, register_region.begin() - dex_register_locations_region.begin());
+ encoding.stack_map.encoding,
+ register_region.begin() - dex_register_locations_region.begin());
// Set the dex register location.
FillInDexRegisterMap(dex_register_map,
@@ -366,37 +343,37 @@
// Set the inlining info.
if (entry.inlining_depth != 0) {
- MemoryRegion inline_region = inline_infos_region.Subregion(
- next_inline_info_offset,
- entry.inlining_depth * inline_info_encoding_.GetEntrySize());
- next_inline_info_offset += inline_region.size();
- InlineInfo inline_info(inline_region);
+ InlineInfo inline_info = code_info.GetInlineInfo(next_inline_info_index, encoding);
- // Currently relative to the dex register map.
- stack_map.SetInlineDescriptorOffset(
- stack_map_encoding_, inline_region.begin() - dex_register_locations_region.begin());
+ // Fill in the index.
+ stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, next_inline_info_index);
+ DCHECK_EQ(next_inline_info_index, entry.inline_infos_start_index);
+ next_inline_info_index += entry.inlining_depth;
- inline_info.SetDepth(inline_info_encoding_, entry.inlining_depth);
+ inline_info.SetDepth(encoding.inline_info.encoding, entry.inlining_depth);
DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
+
for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
if (inline_entry.method != nullptr) {
inline_info.SetMethodIndexAtDepth(
- inline_info_encoding_,
+ encoding.inline_info.encoding,
depth,
High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
inline_info.SetExtraDataAtDepth(
- inline_info_encoding_,
+ encoding.inline_info.encoding,
depth,
Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
} else {
- inline_info.SetMethodIndexAtDepth(inline_info_encoding_, depth, inline_entry.method_index);
- inline_info.SetExtraDataAtDepth(inline_info_encoding_, depth, 1);
+ inline_info.SetMethodIndexAtDepth(encoding.inline_info.encoding,
+ depth,
+ inline_entry.method_index);
+ inline_info.SetExtraDataAtDepth(encoding.inline_info.encoding, depth, 1);
}
- inline_info.SetDexPcAtDepth(inline_info_encoding_, depth, inline_entry.dex_pc);
+ inline_info.SetDexPcAtDepth(encoding.inline_info.encoding, depth, inline_entry.dex_pc);
if (inline_entry.num_dex_registers == 0) {
// No dex map available.
- inline_info.SetDexRegisterMapOffsetAtDepth(inline_info_encoding_,
+ inline_info.SetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding,
depth,
StackMap::kNoDexRegisterMap);
DCHECK(inline_entry.live_dex_registers_mask == nullptr);
@@ -408,8 +385,9 @@
next_dex_register_map_offset += register_region.size();
DexRegisterMap dex_register_map(register_region);
inline_info.SetDexRegisterMapOffsetAtDepth(
- inline_info_encoding_,
- depth, register_region.begin() - dex_register_locations_region.begin());
+ encoding.inline_info.encoding,
+ depth,
+ register_region.begin() - dex_register_locations_region.begin());
FillInDexRegisterMap(dex_register_map,
inline_entry.num_dex_registers,
@@ -417,30 +395,28 @@
inline_entry.dex_register_locations_start_index);
}
}
- } else {
- if (inline_info_size_ != 0) {
- stack_map.SetInlineDescriptorOffset(stack_map_encoding_, StackMap::kNoInlineInfo);
- }
+ } else if (encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
+ stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, StackMap::kNoInlineInfo);
}
}
// Write stack masks table.
- size_t stack_mask_bits = encoding.stack_mask_size_in_bits;
+ const size_t stack_mask_bits = encoding.stack_mask.encoding.BitSize();
if (stack_mask_bits > 0) {
size_t stack_mask_bytes = RoundUp(stack_mask_bits, kBitsPerByte) / kBitsPerByte;
- for (size_t i = 0; i < encoding.number_of_stack_masks; ++i) {
+ for (size_t i = 0; i < encoding.stack_mask.num_entries; ++i) {
MemoryRegion source(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes);
- BitMemoryRegion stack_mask = code_info.GetStackMask(encoding, i);
- for (size_t bit_index = 0; bit_index < encoding.stack_mask_size_in_bits; ++bit_index) {
+ BitMemoryRegion stack_mask = code_info.GetStackMask(i, encoding);
+ for (size_t bit_index = 0; bit_index < stack_mask_bits; ++bit_index) {
stack_mask.StoreBit(bit_index, source.LoadBit(bit_index));
}
}
}
// Write register masks table.
- for (size_t i = 0; i < encoding.number_of_register_masks; ++i) {
- BitMemoryRegion register_mask = code_info.GetRegisterMask(encoding, i);
- register_mask.StoreBits(0, register_masks_[i], encoding.register_mask_size_in_bits);
+ for (size_t i = 0; i < encoding.register_mask.num_entries; ++i) {
+ BitMemoryRegion register_mask = code_info.GetRegisterMask(i, encoding);
+ register_mask.StoreBits(0, register_masks_[i], encoding.register_mask.encoding.BitSize());
}
// Verify all written data in debug build.
@@ -542,7 +518,8 @@
}
// Compare to the seen location.
if (expected.GetKind() == DexRegisterLocation::Kind::kNone) {
- DCHECK(!dex_register_map.IsValid() || !dex_register_map.IsDexRegisterLive(reg));
+ DCHECK(!dex_register_map.IsValid() || !dex_register_map.IsDexRegisterLive(reg))
+ << dex_register_map.IsValid() << " " << dex_register_map.IsDexRegisterLive(reg);
} else {
DCHECK(dex_register_map.IsDexRegisterLive(reg));
DexRegisterLocation seen = dex_register_map.GetDexRegisterLocation(
@@ -595,7 +572,7 @@
DCHECK_EQ(code_info.GetNumberOfStackMaps(encoding), stack_maps_.size());
for (size_t s = 0; s < stack_maps_.size(); ++s) {
const StackMap stack_map = code_info.GetStackMapAt(s, encoding);
- const StackMapEncoding& stack_map_encoding = encoding.stack_map_encoding;
+ const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
StackMapEntry entry = stack_maps_[s];
// Check main stack map fields.
@@ -629,18 +606,18 @@
DCHECK_EQ(stack_map.HasInlineInfo(stack_map_encoding), (entry.inlining_depth != 0));
if (entry.inlining_depth != 0) {
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
- DCHECK_EQ(inline_info.GetDepth(encoding.inline_info_encoding), entry.inlining_depth);
+ DCHECK_EQ(inline_info.GetDepth(encoding.inline_info.encoding), entry.inlining_depth);
for (size_t d = 0; d < entry.inlining_depth; ++d) {
size_t inline_info_index = entry.inline_infos_start_index + d;
DCHECK_LT(inline_info_index, inline_infos_.size());
InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
- DCHECK_EQ(inline_info.GetDexPcAtDepth(encoding.inline_info_encoding, d),
+ DCHECK_EQ(inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, d),
inline_entry.dex_pc);
- if (inline_info.EncodesArtMethodAtDepth(encoding.inline_info_encoding, d)) {
- DCHECK_EQ(inline_info.GetArtMethodAtDepth(encoding.inline_info_encoding, d),
+ if (inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, d)) {
+ DCHECK_EQ(inline_info.GetArtMethodAtDepth(encoding.inline_info.encoding, d),
inline_entry.method);
} else {
- DCHECK_EQ(inline_info.GetMethodIndexAtDepth(encoding.inline_info_encoding, d),
+ DCHECK_EQ(inline_info.GetMethodIndexAtDepth(encoding.inline_info.encoding, d),
inline_entry.method_index);
}
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index b1069a1..08c1d3e 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -79,13 +79,6 @@
current_entry_(),
current_inline_info_(),
code_info_encoding_(allocator->Adapter(kArenaAllocStackMapStream)),
- inline_info_size_(0),
- dex_register_maps_size_(0),
- stack_maps_size_(0),
- dex_register_location_catalog_size_(0),
- dex_register_location_catalog_start_(0),
- dex_register_maps_start_(0),
- inline_infos_start_(0),
needed_size_(0),
current_dex_register_(0),
in_inline_frame_(false) {
@@ -160,7 +153,8 @@
size_t ComputeDexRegisterMapSize(uint32_t num_dex_registers,
const BitVector* live_dex_registers_mask) const;
size_t ComputeDexRegisterMapsSize() const;
- void ComputeInlineInfoEncoding();
+ void ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
+ size_t dex_register_maps_bytes);
CodeOffset ComputeMaxNativePcCodeOffset() const;
@@ -214,16 +208,7 @@
StackMapEntry current_entry_;
InlineInfoEntry current_inline_info_;
- StackMapEncoding stack_map_encoding_;
- InlineInfoEncoding inline_info_encoding_;
ArenaVector<uint8_t> code_info_encoding_;
- size_t inline_info_size_;
- size_t dex_register_maps_size_;
- size_t stack_maps_size_;
- size_t dex_register_location_catalog_size_;
- size_t dex_register_location_catalog_start_;
- size_t dex_register_maps_start_;
- size_t inline_infos_start_;
size_t needed_size_;
uint32_t current_dex_register_;
bool in_inline_frame_;
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index ce6d5c2..bd0aa6d 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -32,10 +32,10 @@
const StackMap& stack_map,
const BitVector& bit_vector) {
BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
- if (bit_vector.GetNumberOfBits() > encoding.stack_mask_size_in_bits) {
+ if (bit_vector.GetNumberOfBits() > encoding.stack_mask.encoding.BitSize()) {
return false;
}
- for (size_t i = 0; i < encoding.stack_mask_size_in_bits; ++i) {
+ for (size_t i = 0; i < encoding.stack_mask.encoding.BitSize(); ++i) {
if (stack_mask.LoadBit(i) != bit_vector.IsBitSet(i)) {
return false;
}
@@ -78,13 +78,13 @@
StackMap stack_map = code_info.GetStackMapAt(0, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
@@ -123,7 +123,7 @@
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
}
TEST(StackMapTest, Test2) {
@@ -193,13 +193,13 @@
StackMap stack_map = code_info.GetStackMapAt(0, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask1));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
@@ -238,13 +238,13 @@
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
- ASSERT_TRUE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
- ASSERT_EQ(2u, inline_info.GetDepth(encoding.inline_info_encoding));
- ASSERT_EQ(3u, inline_info.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(2u, inline_info.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
+ ASSERT_EQ(2u, inline_info.GetDepth(encoding.inline_info.encoding));
+ ASSERT_EQ(3u, inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_EQ(2u, inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
}
// Second stack map.
@@ -252,13 +252,13 @@
StackMap stack_map = code_info.GetStackMapAt(1, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u, encoding)));
- ASSERT_EQ(1u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(128u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(1u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(128u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(encoding, stack_map));
ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask2));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
@@ -298,7 +298,7 @@
ASSERT_EQ(18, location0.GetValue());
ASSERT_EQ(3, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
}
// Third stack map.
@@ -306,13 +306,13 @@
StackMap stack_map = code_info.GetStackMapAt(2, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u, encoding)));
- ASSERT_EQ(2u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(192u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(2u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(192u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(encoding, stack_map));
ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask3));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
@@ -352,7 +352,7 @@
ASSERT_EQ(6, location0.GetValue());
ASSERT_EQ(8, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
}
// Fourth stack map.
@@ -360,13 +360,13 @@
StackMap stack_map = code_info.GetStackMapAt(3, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u, encoding)));
- ASSERT_EQ(3u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(256u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(3u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(256u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(encoding, stack_map));
ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask4));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
@@ -406,7 +406,7 @@
ASSERT_EQ(3, location0.GetValue());
ASSERT_EQ(1, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
}
}
@@ -442,11 +442,11 @@
StackMap stack_map = code_info.GetStackMapAt(0, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0));
@@ -483,7 +483,7 @@
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
}
// Generate a stack map whose dex register offset is
@@ -543,13 +543,13 @@
ASSERT_EQ(255u, dex_register_map0.Size());
StackMap stack_map1 = code_info.GetStackMapAt(1, encoding);
- ASSERT_TRUE(stack_map1.HasDexRegisterMap(encoding.stack_map_encoding));
+ ASSERT_TRUE(stack_map1.HasDexRegisterMap(encoding.stack_map.encoding));
// ...the offset of the second Dex register map (relative to the
// beginning of the Dex register maps region) is 255 (i.e.,
// kNoDexRegisterMapSmallEncoding).
- ASSERT_NE(stack_map1.GetDexRegisterMapOffset(encoding.stack_map_encoding),
+ ASSERT_NE(stack_map1.GetDexRegisterMapOffset(encoding.stack_map.encoding),
StackMap::kNoDexRegisterMap);
- ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(encoding.stack_map_encoding), 0xFFu);
+ ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(encoding.stack_map.encoding), 0xFFu);
}
TEST(StackMapTest, TestShareDexRegisterMap) {
@@ -602,12 +602,12 @@
ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci, encoding));
// Verify dex register map offsets.
- ASSERT_EQ(sm0.GetDexRegisterMapOffset(encoding.stack_map_encoding),
- sm1.GetDexRegisterMapOffset(encoding.stack_map_encoding));
- ASSERT_NE(sm0.GetDexRegisterMapOffset(encoding.stack_map_encoding),
- sm2.GetDexRegisterMapOffset(encoding.stack_map_encoding));
- ASSERT_NE(sm1.GetDexRegisterMapOffset(encoding.stack_map_encoding),
- sm2.GetDexRegisterMapOffset(encoding.stack_map_encoding));
+ ASSERT_EQ(sm0.GetDexRegisterMapOffset(encoding.stack_map.encoding),
+ sm1.GetDexRegisterMapOffset(encoding.stack_map.encoding));
+ ASSERT_NE(sm0.GetDexRegisterMapOffset(encoding.stack_map.encoding),
+ sm2.GetDexRegisterMapOffset(encoding.stack_map.encoding));
+ ASSERT_NE(sm1.GetDexRegisterMapOffset(encoding.stack_map.encoding),
+ sm2.GetDexRegisterMapOffset(encoding.stack_map.encoding));
}
TEST(StackMapTest, TestNoDexRegisterMap) {
@@ -641,22 +641,22 @@
StackMap stack_map = code_info.GetStackMapAt(0, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
- ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
stack_map = code_info.GetStackMapAt(1, encoding);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1, encoding)));
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68, encoding)));
- ASSERT_EQ(1u, stack_map.GetDexPc(encoding.stack_map_encoding));
- ASSERT_EQ(68u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA));
+ ASSERT_EQ(1u, stack_map.GetDexPc(encoding.stack_map.encoding));
+ ASSERT_EQ(68u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(encoding, stack_map));
- ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding));
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
}
TEST(StackMapTest, InlineTest) {
@@ -743,11 +743,11 @@
ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding));
InlineInfo if0 = ci.GetInlineInfoOf(sm0, encoding);
- ASSERT_EQ(2u, if0.GetDepth(encoding.inline_info_encoding));
- ASSERT_EQ(2u, if0.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(3u, if0.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
+ ASSERT_EQ(2u, if0.GetDepth(encoding.inline_info.encoding));
+ ASSERT_EQ(2u, if0.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_EQ(3u, if0.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, encoding, 1);
ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding));
@@ -767,13 +767,13 @@
ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding));
InlineInfo if1 = ci.GetInlineInfoOf(sm1, encoding);
- ASSERT_EQ(3u, if1.GetDepth(encoding.inline_info_encoding));
- ASSERT_EQ(2u, if1.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(3u, if1.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(5u, if1.GetDexPcAtDepth(encoding.inline_info_encoding, 2));
- ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 2));
+ ASSERT_EQ(3u, if1.GetDepth(encoding.inline_info.encoding));
+ ASSERT_EQ(2u, if1.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_EQ(3u, if1.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_EQ(5u, if1.GetDexPcAtDepth(encoding.inline_info.encoding, 2));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 2));
DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, encoding, 1);
ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding));
@@ -783,7 +783,7 @@
ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci, encoding));
ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci, encoding));
- ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(encoding.inline_info_encoding, 2));
+ ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, 2));
}
{
@@ -793,7 +793,7 @@
DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, encoding, 2);
ASSERT_FALSE(dex_registers0.IsDexRegisterLive(0));
ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding));
- ASSERT_FALSE(sm2.HasInlineInfo(encoding.stack_map_encoding));
+ ASSERT_FALSE(sm2.HasInlineInfo(encoding.stack_map.encoding));
}
{
@@ -805,15 +805,15 @@
ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding));
InlineInfo if2 = ci.GetInlineInfoOf(sm3, encoding);
- ASSERT_EQ(3u, if2.GetDepth(encoding.inline_info_encoding));
- ASSERT_EQ(2u, if2.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(5u, if2.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(10u, if2.GetDexPcAtDepth(encoding.inline_info_encoding, 2));
- ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 2));
+ ASSERT_EQ(3u, if2.GetDepth(encoding.inline_info.encoding));
+ ASSERT_EQ(2u, if2.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_EQ(5u, if2.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_EQ(10u, if2.GetDexPcAtDepth(encoding.inline_info.encoding, 2));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 2));
- ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(encoding.inline_info_encoding, 0));
+ ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, 0));
DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, encoding, 1);
ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci, encoding));
@@ -865,8 +865,8 @@
StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4, encoding);
StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8, encoding);
- EXPECT_EQ(stack_map1.GetStackMaskIndex(encoding.stack_map_encoding),
- stack_map2.GetStackMaskIndex(encoding.stack_map_encoding));
+ EXPECT_EQ(stack_map1.GetStackMaskIndex(encoding.stack_map.encoding),
+ stack_map2.GetStackMaskIndex(encoding.stack_map.encoding));
}
} // namespace art
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 0f02da7..d3192de 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1461,8 +1461,8 @@
StackMap last = code_info_.GetStackMapAt(0u, encoding_);
for (size_t i = 1; i != number_of_stack_maps_; ++i) {
StackMap current = code_info_.GetStackMapAt(i, encoding_);
- if (last.GetNativePcOffset(encoding_.stack_map_encoding, instruction_set) >
- current.GetNativePcOffset(encoding_.stack_map_encoding, instruction_set)) {
+ if (last.GetNativePcOffset(encoding_.stack_map.encoding, instruction_set) >
+ current.GetNativePcOffset(encoding_.stack_map.encoding, instruction_set)) {
ordered = false;
break;
}
@@ -1478,16 +1478,16 @@
indexes_.end(),
[this](size_t lhs, size_t rhs) {
StackMap left = code_info_.GetStackMapAt(lhs, encoding_);
- uint32_t left_pc = left.GetNativePcOffset(encoding_.stack_map_encoding,
+ uint32_t left_pc = left.GetNativePcOffset(encoding_.stack_map.encoding,
instruction_set_);
StackMap right = code_info_.GetStackMapAt(rhs, encoding_);
- uint32_t right_pc = right.GetNativePcOffset(encoding_.stack_map_encoding,
+ uint32_t right_pc = right.GetNativePcOffset(encoding_.stack_map.encoding,
instruction_set_);
// If the PCs are the same, compare indexes to preserve the original order.
return (left_pc < right_pc) || (left_pc == right_pc && lhs < rhs);
});
}
- offset_ = GetStackMapAt(0).GetNativePcOffset(encoding_.stack_map_encoding,
+ offset_ = GetStackMapAt(0).GetNativePcOffset(encoding_.stack_map.encoding,
instruction_set_);
}
}
@@ -1512,7 +1512,7 @@
++stack_map_index_;
offset_ = (stack_map_index_ == number_of_stack_maps_)
? static_cast<uint32_t>(-1)
- : GetStackMapAt(stack_map_index_).GetNativePcOffset(encoding_.stack_map_encoding,
+ : GetStackMapAt(stack_map_index_).GetNativePcOffset(encoding_.stack_map.encoding,
instruction_set_);
}
@@ -1550,9 +1550,9 @@
StackMapsHelper helper(oat_method.GetVmapTable(), instruction_set_);
{
CodeInfoEncoding encoding(helper.GetEncoding());
- StackMapEncoding stack_map_encoding(encoding.stack_map_encoding);
+ StackMapEncoding stack_map_encoding(encoding.stack_map.encoding);
// helper.GetCodeInfo().GetStackMapAt(0, encoding).;
- const size_t num_stack_maps = encoding.number_of_stack_maps;
+ const size_t num_stack_maps = encoding.stack_map.num_entries;
std::vector<uint8_t> size_vector;
encoding.Compress(&size_vector);
if (stats_.AddBitsIfUnique(Stats::kByteKindCodeInfoEncoding,
@@ -1578,12 +1578,10 @@
stack_map_encoding.GetStackMaskIndexEncoding().BitSize() * num_stack_maps);
stats_.AddBits(
Stats::kByteKindCodeInfoStackMasks,
- helper.GetCodeInfo().GetNumberOfStackMaskBits(encoding) *
- encoding.number_of_stack_masks);
+ encoding.stack_mask.encoding.BitSize() * encoding.stack_mask.num_entries);
stats_.AddBits(
Stats::kByteKindCodeInfoRegisterMasks,
- encoding.register_mask_size_in_bits * encoding.number_of_stack_masks);
- const size_t stack_map_bytes = helper.GetCodeInfo().GetStackMapsSize(encoding);
+ encoding.register_mask.encoding.BitSize() * encoding.register_mask.num_entries);
const size_t location_catalog_bytes =
helper.GetCodeInfo().GetDexRegisterLocationCatalogSize(encoding);
stats_.AddBits(Stats::kByteKindCodeInfoLocationCatalog,
@@ -1593,13 +1591,9 @@
stats_.AddBits(
Stats::kByteKindCodeInfoDexRegisterMap,
kBitsPerByte * dex_register_bytes);
- const size_t inline_info_bytes =
- encoding.non_header_size -
- stack_map_bytes -
- location_catalog_bytes -
- dex_register_bytes;
stats_.AddBits(Stats::kByteKindCodeInfoInlineInfo,
- inline_info_bytes * kBitsPerByte);
+ encoding.inline_info.encoding.BitSize() *
+ encoding.inline_info.num_entries);
}
}
const uint8_t* quick_native_pc = reinterpret_cast<const uint8_t*>(quick_code);
diff --git a/runtime/bit_memory_region.h b/runtime/bit_memory_region.h
index c3b5be4..3a696f1 100644
--- a/runtime/bit_memory_region.h
+++ b/runtime/bit_memory_region.h
@@ -40,6 +40,10 @@
return region_.size_in_bits();
}
+ ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset, size_t bit_size) const {
+ return BitMemoryRegion(region_, bit_start_ + bit_offset, bit_size);
+ }
+
// Load a single bit in the region. The bit at offset 0 is the least
// significant bit in the first byte.
ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const {
diff --git a/runtime/class_table.cc b/runtime/class_table.cc
index ff846a7..f1458b4 100644
--- a/runtime/class_table.cc
+++ b/runtime/class_table.cc
@@ -55,10 +55,6 @@
return nullptr;
}
-// Bug: http://b/31104323 Ignore -Wunreachable-code from the for loop below
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wunreachable-code"
-
mirror::Class* ClassTable::UpdateClass(const char* descriptor, mirror::Class* klass, size_t hash) {
WriterMutexLock mu(Thread::Current(), lock_);
// Should only be updating latest table.
@@ -84,8 +80,6 @@
return existing;
}
-#pragma clang diagnostic pop // http://b/31104323
-
size_t ClassTable::CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
const ClassSet& set) const {
size_t count = 0;
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index 06c11f5..fb8139b 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -204,12 +204,12 @@
CodeInfoEncoding encoding = code_info.ExtractEncoding();
StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
DCHECK(stack_map.IsValid());
- if (stack_map.HasInlineInfo(encoding.stack_map_encoding)) {
+ if (stack_map.HasInlineInfo(encoding.stack_map.encoding)) {
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
caller = GetResolvedMethod(outer_method,
inline_info,
- encoding.inline_info_encoding,
- inline_info.GetDepth(encoding.inline_info_encoding) - 1);
+ encoding.inline_info.encoding,
+ inline_info.GetDepth(encoding.inline_info.encoding) - 1);
}
}
if (kIsDebugBuild && do_caller_check) {
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index bde9009..3ef47c4 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -346,12 +346,12 @@
CodeInfoEncoding encoding = code_info.ExtractEncoding();
StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset, encoding);
DCHECK(stack_map.IsValid());
- if (stack_map.HasInlineInfo(encoding.stack_map_encoding)) {
+ if (stack_map.HasInlineInfo(encoding.stack_map.encoding)) {
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
- return inline_info.GetDexPcAtDepth(encoding.inline_info_encoding,
- inline_info.GetDepth(encoding.inline_info_encoding)-1);
+ return inline_info.GetDexPcAtDepth(encoding.inline_info.encoding,
+ inline_info.GetDepth(encoding.inline_info.encoding)-1);
} else {
- return stack_map.GetDexPc(encoding.stack_map_encoding);
+ return stack_map.GetDexPc(encoding.stack_map.encoding);
}
} else {
return current_code->ToDexPc(*caller_sp, outer_pc);
diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc
index fec3c4f..1ec4749 100644
--- a/runtime/jit/jit.cc
+++ b/runtime/jit/jit.cc
@@ -523,7 +523,7 @@
}
}
- native_pc = stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA) +
+ native_pc = stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA) +
osr_method->GetEntryPoint();
VLOG(jit) << "Jumping to "
<< method_name
diff --git a/runtime/oat.h b/runtime/oat.h
index 532c968..e7e8328 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,7 +32,7 @@
class PACKED(4) OatHeader {
public:
static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' };
- static constexpr uint8_t kOatVersion[] = { '1', '0', '9', '\0' }; // Register mask change.
+ static constexpr uint8_t kOatVersion[] = { '1', '1', '0', '\0' }; // Clean up code info change.
static constexpr const char* kImageLocationKey = "image-location";
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
diff --git a/runtime/oat_quick_method_header.cc b/runtime/oat_quick_method_header.cc
index fd84426..b4e4285 100644
--- a/runtime/oat_quick_method_header.cc
+++ b/runtime/oat_quick_method_header.cc
@@ -44,7 +44,7 @@
CodeInfoEncoding encoding = code_info.ExtractEncoding();
StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset, encoding);
if (stack_map.IsValid()) {
- return stack_map.GetDexPc(encoding.stack_map_encoding);
+ return stack_map.GetDexPc(encoding.stack_map.encoding);
}
} else {
DCHECK(method->IsNative());
@@ -80,7 +80,7 @@
: code_info.GetStackMapForDexPc(dex_pc, encoding);
if (stack_map.IsValid()) {
return reinterpret_cast<uintptr_t>(entry_point) +
- stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA);
+ stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA);
}
if (abort_on_failure) {
ScopedObjectAccess soa(Thread::Current());
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index bf99509..72e0500 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -441,7 +441,7 @@
const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
value = *reinterpret_cast<const uint32_t*>(addr);
uint32_t bit = (offset >> 2);
- if (bit < encoding.stack_mask_size_in_bits && stack_mask.LoadBit(bit)) {
+ if (bit < encoding.stack_mask.encoding.BitSize() && stack_mask.LoadBit(bit)) {
is_reference = true;
}
break;
diff --git a/runtime/stack.cc b/runtime/stack.cc
index c737fe4..d7ba1d7 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -145,7 +145,7 @@
DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames);
return GetResolvedMethod(*GetCurrentQuickFrame(),
inline_info,
- encoding.inline_info_encoding,
+ encoding.inline_info.encoding,
depth_in_stack_map);
} else {
return *cur_quick_frame_;
@@ -162,7 +162,7 @@
size_t depth_in_stack_map = current_inlining_depth_ - 1;
const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
CodeInfoEncoding encoding = method_header->GetOptimizedCodeInfo().ExtractEncoding();
- return GetCurrentInlineInfo().GetDexPcAtDepth(encoding.inline_info_encoding,
+ return GetCurrentInlineInfo().GetDexPcAtDepth(encoding.inline_info.encoding,
depth_in_stack_map);
} else if (cur_oat_quick_method_header_ == nullptr) {
return DexFile::kDexNoIndex;
@@ -826,10 +826,10 @@
uint32_t native_pc_offset =
cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_);
StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
- if (stack_map.IsValid() && stack_map.HasInlineInfo(encoding.stack_map_encoding)) {
+ if (stack_map.IsValid() && stack_map.HasInlineInfo(encoding.stack_map.encoding)) {
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
DCHECK_EQ(current_inlining_depth_, 0u);
- for (current_inlining_depth_ = inline_info.GetDepth(encoding.inline_info_encoding);
+ for (current_inlining_depth_ = inline_info.GetDepth(encoding.inline_info.encoding);
current_inlining_depth_ != 0;
--current_inlining_depth_) {
bool should_continue = VisitFrame();
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 4e7c3f4..d657311 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -126,9 +126,9 @@
<< ", number_of_stack_maps=" << number_of_stack_maps
<< ")\n";
ScopedIndentation indent1(vios);
- encoding.stack_map_encoding.Dump(vios);
+ encoding.stack_map.encoding.Dump(vios);
if (HasInlineInfo(encoding)) {
- encoding.inline_info_encoding.Dump(vios);
+ encoding.inline_info.encoding.Dump(vios);
}
// Display the Dex register location catalog.
GetDexRegisterLocationCatalog(encoding).Dump(vios, *this);
@@ -193,22 +193,22 @@
uint16_t number_of_dex_registers,
InstructionSet instruction_set,
const std::string& header_suffix) const {
- StackMapEncoding stack_map_encoding = encoding.stack_map_encoding;
+ StackMapEncoding stack_map_encoding = encoding.stack_map.encoding;
const uint32_t pc_offset = GetNativePcOffset(stack_map_encoding, instruction_set);
vios->Stream()
<< "StackMap" << header_suffix
<< std::hex
<< " [native_pc=0x" << code_offset + pc_offset << "]"
- << " [entry_size=0x" << encoding.stack_map_encoding.BitSize() << " bits]"
+ << " [entry_size=0x" << encoding.stack_map.encoding.BitSize() << " bits]"
<< " (dex_pc=0x" << GetDexPc(stack_map_encoding)
<< ", native_pc_offset=0x" << pc_offset
<< ", dex_register_map_offset=0x" << GetDexRegisterMapOffset(stack_map_encoding)
- << ", inline_info_offset=0x" << GetInlineDescriptorOffset(stack_map_encoding)
+ << ", inline_info_offset=0x" << GetInlineInfoIndex(stack_map_encoding)
<< ", register_mask=0x" << code_info.GetRegisterMaskOf(encoding, *this)
<< std::dec
<< ", stack_mask=0b";
BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, *this);
- for (size_t i = 0, e = encoding.stack_mask_size_in_bits; i < e; ++i) {
+ for (size_t i = 0, e = encoding.stack_mask.encoding.BitSize(); i < e; ++i) {
vios->Stream() << stack_mask.LoadBit(e - i - 1);
}
vios->Stream() << ")\n";
@@ -229,7 +229,7 @@
void InlineInfo::Dump(VariableIndentationOutputStream* vios,
const CodeInfo& code_info,
uint16_t number_of_dex_registers[]) const {
- InlineInfoEncoding inline_info_encoding = code_info.ExtractEncoding().inline_info_encoding;
+ InlineInfoEncoding inline_info_encoding = code_info.ExtractEncoding().inline_info.encoding;
vios->Stream() << "InlineInfo with depth "
<< static_cast<uint32_t>(GetDepth(inline_info_encoding))
<< "\n";
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 062404d..61d6a58 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -693,7 +693,7 @@
size_t SetFromSizes(size_t native_pc_max,
size_t dex_pc_max,
size_t dex_register_map_size,
- size_t inline_info_size,
+ size_t number_of_inline_info,
size_t number_of_register_masks,
size_t number_of_stack_masks) {
total_bit_size_ = 0;
@@ -712,9 +712,7 @@
// greater than the offset we might try to encode, we already implicitly have it.
// If inline_info_size is zero, we can encode only kNoInlineInfo (in zero bits).
inline_info_bit_offset_ = total_bit_size_;
- if (inline_info_size != 0) {
- total_bit_size_ += MinimumBitsToStore(dex_register_map_size + inline_info_size);
- }
+ total_bit_size_ += MinimumBitsToStore(number_of_inline_info);
register_mask_index_bit_offset_ = total_bit_size_;
total_bit_size_ += MinimumBitsToStore(number_of_register_masks);
@@ -749,6 +747,18 @@
return total_bit_size_;
}
+ template<typename Vector>
+ void Encode(Vector* dest) const {
+ static_assert(alignof(StackMapEncoding) == 1, "Should not require alignment");
+ const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
+ dest->insert(dest->end(), ptr, ptr + sizeof(*this));
+ }
+
+ void Decode(const uint8_t** ptr) {
+ *this = *reinterpret_cast<const StackMapEncoding*>(*ptr);
+ *ptr += sizeof(*this);
+ }
+
void Dump(VariableIndentationOutputStream* vios) const;
private:
@@ -771,7 +781,7 @@
*
* The information is of the form:
*
- * [native_pc_offset, dex_pc, dex_register_map_offset, inlining_info_offset, register_mask_index,
+ * [native_pc_offset, dex_pc, dex_register_map_offset, inlining_info_index, register_mask_index,
* stack_mask_index].
*/
class StackMap {
@@ -809,12 +819,12 @@
encoding.GetDexRegisterMapEncoding().Store(region_, offset);
}
- ALWAYS_INLINE uint32_t GetInlineDescriptorOffset(const StackMapEncoding& encoding) const {
+ ALWAYS_INLINE uint32_t GetInlineInfoIndex(const StackMapEncoding& encoding) const {
return encoding.GetInlineInfoEncoding().Load(region_);
}
- ALWAYS_INLINE void SetInlineDescriptorOffset(const StackMapEncoding& encoding, uint32_t offset) {
- encoding.GetInlineInfoEncoding().Store(region_, offset);
+ ALWAYS_INLINE void SetInlineInfoIndex(const StackMapEncoding& encoding, uint32_t index) {
+ encoding.GetInlineInfoEncoding().Store(region_, index);
}
ALWAYS_INLINE uint32_t GetRegisterMaskIndex(const StackMapEncoding& encoding) const {
@@ -838,7 +848,7 @@
}
ALWAYS_INLINE bool HasInlineInfo(const StackMapEncoding& encoding) const {
- return GetInlineDescriptorOffset(encoding) != kNoInlineInfo;
+ return GetInlineInfoIndex(encoding) != kNoInlineInfo;
}
ALWAYS_INLINE bool Equals(const StackMap& other) const {
@@ -908,12 +918,24 @@
ALWAYS_INLINE FieldEncoding GetDexRegisterMapEncoding() const {
return FieldEncoding(dex_register_map_bit_offset_, total_bit_size_, -1 /* min_value */);
}
- ALWAYS_INLINE size_t GetEntrySize() const {
- return RoundUp(total_bit_size_, kBitsPerByte) / kBitsPerByte;
+ ALWAYS_INLINE size_t BitSize() const {
+ return total_bit_size_;
}
void Dump(VariableIndentationOutputStream* vios) const;
+ template<typename Vector>
+ void Encode(Vector* dest) const {
+ static_assert(alignof(InlineInfoEncoding) == 1, "Should not require alignment");
+ const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
+ dest->insert(dest->end(), ptr, ptr + sizeof(*this));
+ }
+
+ void Decode(const uint8_t** ptr) {
+ *this = *reinterpret_cast<const InlineInfoEncoding*>(*ptr);
+ *ptr += sizeof(*this);
+ }
+
private:
static constexpr uint8_t kIsLastBitOffset = 0;
static constexpr uint8_t kMethodIndexBitOffset = 1;
@@ -934,8 +956,7 @@
*/
class InlineInfo {
public:
- explicit InlineInfo(MemoryRegion region) : region_(region) {
- }
+ explicit InlineInfo(BitMemoryRegion region) : region_(region) {}
ALWAYS_INLINE uint32_t GetDepth(const InlineInfoEncoding& encoding) const {
size_t depth = 0;
@@ -1018,83 +1039,189 @@
uint16_t* number_of_dex_registers) const;
private:
- ALWAYS_INLINE MemoryRegion GetRegionAtDepth(const InlineInfoEncoding& encoding,
- uint32_t depth) const {
- size_t entry_size = encoding.GetEntrySize();
+ ALWAYS_INLINE BitMemoryRegion GetRegionAtDepth(const InlineInfoEncoding& encoding,
+ uint32_t depth) const {
+ size_t entry_size = encoding.BitSize();
DCHECK_GT(entry_size, 0u);
return region_.Subregion(depth * entry_size, entry_size);
}
- MemoryRegion region_;
+ BitMemoryRegion region_;
+};
+
+// Bit sized region encoding, may be more than 255 bits.
+class BitRegionEncoding {
+ public:
+ uint32_t num_bits = 0;
+
+ ALWAYS_INLINE size_t BitSize() const {
+ return num_bits;
+ }
+
+ template<typename Vector>
+ void Encode(Vector* dest) const {
+ EncodeUnsignedLeb128(dest, num_bits); // Use leb in case num_bits is greater than 255.
+ }
+
+ void Decode(const uint8_t** ptr) {
+ num_bits = DecodeUnsignedLeb128(ptr);
+ }
+};
+
+// A table of bit sized encodings.
+template <typename Encoding>
+struct BitEncodingTable {
+ static constexpr size_t kInvalidOffset = static_cast<size_t>(-1);
+ // How the encoding is laid out (serialized).
+ Encoding encoding;
+
+ // Number of entries in the table (serialized).
+ size_t num_entries;
+
+ // Bit offset for the base of the table (computed).
+ size_t bit_offset = kInvalidOffset;
+
+ template<typename Vector>
+ void Encode(Vector* dest) const {
+ EncodeUnsignedLeb128(dest, num_entries);
+ encoding.Encode(dest);
+ }
+
+ ALWAYS_INLINE void Decode(const uint8_t** ptr) {
+ num_entries = DecodeUnsignedLeb128(ptr);
+ encoding.Decode(ptr);
+ }
+
+ // Set the bit offset in the table and adds the space used by the table to offset.
+ void UpdateBitOffset(size_t* offset) {
+ DCHECK(offset != nullptr);
+ bit_offset = *offset;
+ *offset += encoding.BitSize() * num_entries;
+ }
+
+ // Return the bit region for the map at index i.
+ ALWAYS_INLINE BitMemoryRegion BitRegion(MemoryRegion region, size_t index) const {
+ DCHECK_NE(bit_offset, kInvalidOffset) << "Invalid table offset";
+ DCHECK_LT(index, num_entries);
+ const size_t map_size = encoding.BitSize();
+ return BitMemoryRegion(region, bit_offset + index * map_size, map_size);
+ }
+};
+
+// A byte sized table of possible variable sized encodings.
+struct ByteSizedTable {
+ static constexpr size_t kInvalidOffset = static_cast<size_t>(-1);
+
+ // Number of entries in the table (serialized).
+ size_t num_entries = 0;
+
+ // Number of bytes of the table (serialized).
+ size_t num_bytes;
+
+ // Bit offset for the base of the table (computed).
+ size_t byte_offset = kInvalidOffset;
+
+ template<typename Vector>
+ void Encode(Vector* dest) const {
+ EncodeUnsignedLeb128(dest, num_entries);
+ EncodeUnsignedLeb128(dest, num_bytes);
+ }
+
+ ALWAYS_INLINE void Decode(const uint8_t** ptr) {
+ num_entries = DecodeUnsignedLeb128(ptr);
+ num_bytes = DecodeUnsignedLeb128(ptr);
+ }
+
+ // Set the bit offset of the table. Adds the total bit size of the table to offset.
+ void UpdateBitOffset(size_t* offset) {
+ DCHECK(offset != nullptr);
+ DCHECK_ALIGNED(*offset, kBitsPerByte);
+ byte_offset = *offset / kBitsPerByte;
+ *offset += num_bytes * kBitsPerByte;
+ }
};
// Most of the fields are encoded as ULEB128 to save space.
struct CodeInfoEncoding {
- uint32_t non_header_size;
- uint32_t number_of_stack_maps;
- uint32_t number_of_stack_masks;
- uint32_t number_of_register_masks;
- uint32_t stack_mask_size_in_bits;
- uint32_t register_mask_size_in_bits;
- uint32_t number_of_location_catalog_entries;
- StackMapEncoding stack_map_encoding;
- InlineInfoEncoding inline_info_encoding;
- uint8_t header_size;
+ static constexpr uint32_t kInvalidSize = static_cast<size_t>(-1);
+ // Byte sized tables go first to avoid unnecessary alignment bits.
+ ByteSizedTable dex_register_map;
+ ByteSizedTable location_catalog;
+ BitEncodingTable<StackMapEncoding> stack_map;
+ BitEncodingTable<BitRegionEncoding> register_mask;
+ BitEncodingTable<BitRegionEncoding> stack_mask;
+ BitEncodingTable<InlineInfoEncoding> inline_info;
- CodeInfoEncoding() { }
+ CodeInfoEncoding() {}
explicit CodeInfoEncoding(const void* data) {
const uint8_t* ptr = reinterpret_cast<const uint8_t*>(data);
- non_header_size = DecodeUnsignedLeb128(&ptr);
- number_of_stack_maps = DecodeUnsignedLeb128(&ptr);
- number_of_stack_masks = DecodeUnsignedLeb128(&ptr);
- number_of_register_masks = DecodeUnsignedLeb128(&ptr);
- stack_mask_size_in_bits = DecodeUnsignedLeb128(&ptr);
- register_mask_size_in_bits = DecodeUnsignedLeb128(&ptr);
- number_of_location_catalog_entries = DecodeUnsignedLeb128(&ptr);
- static_assert(alignof(StackMapEncoding) == 1,
- "StackMapEncoding should not require alignment");
- stack_map_encoding = *reinterpret_cast<const StackMapEncoding*>(ptr);
- ptr += sizeof(StackMapEncoding);
- if (stack_map_encoding.GetInlineInfoEncoding().BitSize() > 0) {
- static_assert(alignof(InlineInfoEncoding) == 1,
- "InlineInfoEncoding should not require alignment");
- inline_info_encoding = *reinterpret_cast<const InlineInfoEncoding*>(ptr);
- ptr += sizeof(InlineInfoEncoding);
+ dex_register_map.Decode(&ptr);
+ location_catalog.Decode(&ptr);
+ stack_map.Decode(&ptr);
+ register_mask.Decode(&ptr);
+ stack_mask.Decode(&ptr);
+ if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
+ inline_info.Decode(&ptr);
} else {
- inline_info_encoding = InlineInfoEncoding{}; // NOLINT.
+ inline_info = BitEncodingTable<InlineInfoEncoding>();
}
- header_size = dchecked_integral_cast<uint8_t>(ptr - reinterpret_cast<const uint8_t*>(data));
+ cache_header_size =
+ dchecked_integral_cast<uint32_t>(ptr - reinterpret_cast<const uint8_t*>(data));
+ ComputeTableOffsets();
}
template<typename Vector>
- void Compress(Vector* dest) const {
- EncodeUnsignedLeb128(dest, non_header_size);
- EncodeUnsignedLeb128(dest, number_of_stack_maps);
- EncodeUnsignedLeb128(dest, number_of_stack_masks);
- EncodeUnsignedLeb128(dest, number_of_register_masks);
- EncodeUnsignedLeb128(dest, stack_mask_size_in_bits);
- EncodeUnsignedLeb128(dest, register_mask_size_in_bits);
- EncodeUnsignedLeb128(dest, number_of_location_catalog_entries);
- const uint8_t* stack_map_ptr = reinterpret_cast<const uint8_t*>(&stack_map_encoding);
- dest->insert(dest->end(), stack_map_ptr, stack_map_ptr + sizeof(StackMapEncoding));
- if (stack_map_encoding.GetInlineInfoEncoding().BitSize() > 0) {
- const uint8_t* inline_info_ptr = reinterpret_cast<const uint8_t*>(&inline_info_encoding);
- dest->insert(dest->end(), inline_info_ptr, inline_info_ptr + sizeof(InlineInfoEncoding));
+ void Compress(Vector* dest) {
+ dex_register_map.Encode(dest);
+ location_catalog.Encode(dest);
+ stack_map.Encode(dest);
+ register_mask.Encode(dest);
+ stack_mask.Encode(dest);
+ if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
+ inline_info.Encode(dest);
}
+ cache_header_size = dest->size();
}
+
+ ALWAYS_INLINE void ComputeTableOffsets() {
+ // Skip the header.
+ size_t bit_offset = HeaderSize() * kBitsPerByte;
+ // The byte tables must be aligned so they must go first.
+ dex_register_map.UpdateBitOffset(&bit_offset);
+ location_catalog.UpdateBitOffset(&bit_offset);
+ // Other tables don't require alignment.
+ stack_map.UpdateBitOffset(&bit_offset);
+ register_mask.UpdateBitOffset(&bit_offset);
+ stack_mask.UpdateBitOffset(&bit_offset);
+ inline_info.UpdateBitOffset(&bit_offset);
+ cache_non_header_size = RoundUp(bit_offset, kBitsPerByte) / kBitsPerByte - HeaderSize();
+ }
+
+ ALWAYS_INLINE size_t HeaderSize() const {
+ DCHECK_NE(cache_header_size, kInvalidSize) << "Uninitialized";
+ return cache_header_size;
+ }
+
+ ALWAYS_INLINE size_t NonHeaderSize() const {
+ DCHECK_NE(cache_non_header_size, kInvalidSize) << "Uninitialized";
+ return cache_non_header_size;
+ }
+
+ private:
+ // Computed fields (not serialized).
+ // Header size in bytes.
+ uint32_t cache_header_size = kInvalidSize;
+ // Non header size in bytes.
+ uint32_t cache_non_header_size = kInvalidSize;
};
/**
* Wrapper around all compiler information collected for a method.
* The information is of the form:
*
- * [CodeInfoEncoding, StackMap+, DexRegisterLocationCatalog+, DexRegisterMap+, InlineInfo*]
- *
- * where CodeInfoEncoding is of the form:
- *
- * [non_header_size, number_of_stack_maps, stack_map_size_in_bits,
- * number_of_location_catalog_entries, StackMapEncoding]
+ * [CodeInfoEncoding, DexRegisterMap+, DexLocationCatalog+, StackMap+, RegisterMask+, StackMask+,
+ * DexRegisterMap+, InlineInfo*]
*/
class CodeInfo {
public:
@@ -1104,7 +1231,7 @@
explicit CodeInfo(const void* data) {
CodeInfoEncoding encoding = CodeInfoEncoding(data);
region_ = MemoryRegion(const_cast<void*>(data),
- encoding.header_size + encoding.non_header_size);
+ encoding.HeaderSize() + encoding.NonHeaderSize());
}
CodeInfoEncoding ExtractEncoding() const {
@@ -1114,99 +1241,67 @@
}
bool HasInlineInfo(const CodeInfoEncoding& encoding) const {
- return encoding.stack_map_encoding.GetInlineInfoEncoding().BitSize() > 0;
+ return encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0;
}
DexRegisterLocationCatalog GetDexRegisterLocationCatalog(const CodeInfoEncoding& encoding) const {
- return DexRegisterLocationCatalog(region_.Subregion(
- GetDexRegisterLocationCatalogOffset(encoding),
- GetDexRegisterLocationCatalogSize(encoding)));
+ return DexRegisterLocationCatalog(region_.Subregion(encoding.location_catalog.byte_offset,
+ encoding.location_catalog.num_bytes));
}
ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const CodeInfoEncoding& encoding) const {
- return encoding.stack_mask_size_in_bits;
+ return encoding.stack_mask.encoding.BitSize();
}
- ALWAYS_INLINE StackMap GetStackMapAt(size_t i, const CodeInfoEncoding& encoding) const {
- const size_t map_size = encoding.stack_map_encoding.BitSize();
- return StackMap(BitMemoryRegion(GetStackMaps(encoding), i * map_size, map_size));
+ ALWAYS_INLINE StackMap GetStackMapAt(size_t index, const CodeInfoEncoding& encoding) const {
+ return StackMap(encoding.stack_map.BitRegion(region_, index));
}
- BitMemoryRegion GetStackMask(const CodeInfoEncoding& encoding, size_t stack_mask_index) const {
- // All stack mask data is stored before register map data (which is at the very end).
- const size_t entry_size = GetNumberOfStackMaskBits(encoding);
- const size_t register_mask_bits =
- encoding.register_mask_size_in_bits * encoding.number_of_register_masks;
- return BitMemoryRegion(region_,
- region_.size_in_bits() - register_mask_bits -
- entry_size * (stack_mask_index + 1),
- entry_size);
+ BitMemoryRegion GetStackMask(size_t index, const CodeInfoEncoding& encoding) const {
+ return encoding.stack_mask.BitRegion(region_, index);
}
BitMemoryRegion GetStackMaskOf(const CodeInfoEncoding& encoding,
const StackMap& stack_map) const {
- return GetStackMask(encoding, stack_map.GetStackMaskIndex(encoding.stack_map_encoding));
+ return GetStackMask(stack_map.GetStackMaskIndex(encoding.stack_map.encoding), encoding);
}
- BitMemoryRegion GetRegisterMask(const CodeInfoEncoding& encoding, size_t index) const {
- const size_t entry_size = encoding.register_mask_size_in_bits;
- return BitMemoryRegion(region_,
- region_.size_in_bits() - entry_size * (index + 1),
- entry_size);
+ BitMemoryRegion GetRegisterMask(size_t index, const CodeInfoEncoding& encoding) const {
+ return encoding.register_mask.BitRegion(region_, index);
}
uint32_t GetRegisterMaskOf(const CodeInfoEncoding& encoding, const StackMap& stack_map) const {
- size_t index = stack_map.GetRegisterMaskIndex(encoding.stack_map_encoding);
- return GetRegisterMask(encoding, index).LoadBits(0u, encoding.register_mask_size_in_bits);
+ size_t index = stack_map.GetRegisterMaskIndex(encoding.stack_map.encoding);
+ return GetRegisterMask(index, encoding).LoadBits(0u, encoding.register_mask.encoding.BitSize());
}
uint32_t GetNumberOfLocationCatalogEntries(const CodeInfoEncoding& encoding) const {
- return encoding.number_of_location_catalog_entries;
+ return encoding.location_catalog.num_entries;
}
uint32_t GetDexRegisterLocationCatalogSize(const CodeInfoEncoding& encoding) const {
- return ComputeDexRegisterLocationCatalogSize(GetDexRegisterLocationCatalogOffset(encoding),
- GetNumberOfLocationCatalogEntries(encoding));
+ return encoding.location_catalog.num_bytes;
}
uint32_t GetNumberOfStackMaps(const CodeInfoEncoding& encoding) const {
- return encoding.number_of_stack_maps;
+ return encoding.stack_map.num_entries;
}
// Get the size of all the stack maps of this CodeInfo object, in bits. Not byte aligned.
ALWAYS_INLINE size_t GetStackMapsSizeInBits(const CodeInfoEncoding& encoding) const {
- return encoding.stack_map_encoding.BitSize() * GetNumberOfStackMaps(encoding);
- }
-
- // Get the size of all the stack maps of this CodeInfo object, in bytes.
- size_t GetStackMapsSize(const CodeInfoEncoding& encoding) const {
- return RoundUp(GetStackMapsSizeInBits(encoding), kBitsPerByte) / kBitsPerByte;
- }
-
- uint32_t GetDexRegisterLocationCatalogOffset(const CodeInfoEncoding& encoding) const {
- return GetStackMapsOffset(encoding) + GetStackMapsSize(encoding);
- }
-
- size_t GetDexRegisterMapsOffset(const CodeInfoEncoding& encoding) const {
- return GetDexRegisterLocationCatalogOffset(encoding)
- + GetDexRegisterLocationCatalogSize(encoding);
- }
-
- uint32_t GetStackMapsOffset(const CodeInfoEncoding& encoding) const {
- return encoding.header_size;
+ return encoding.stack_map.encoding.BitSize() * GetNumberOfStackMaps(encoding);
}
DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
const CodeInfoEncoding& encoding,
- uint32_t number_of_dex_registers) const {
- if (!stack_map.HasDexRegisterMap(encoding.stack_map_encoding)) {
+ size_t number_of_dex_registers) const {
+ if (!stack_map.HasDexRegisterMap(encoding.stack_map.encoding)) {
return DexRegisterMap();
- } else {
- uint32_t offset = GetDexRegisterMapsOffset(encoding)
- + stack_map.GetDexRegisterMapOffset(encoding.stack_map_encoding);
- size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
- return DexRegisterMap(region_.Subregion(offset, size));
}
+ const uint32_t offset = encoding.dex_register_map.byte_offset +
+ stack_map.GetDexRegisterMapOffset(encoding.stack_map.encoding);
+ size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
+ return DexRegisterMap(region_.Subregion(offset, size));
}
size_t GetDexRegisterMapsSize(const CodeInfoEncoding& encoding,
@@ -1225,27 +1320,34 @@
InlineInfo inline_info,
const CodeInfoEncoding& encoding,
uint32_t number_of_dex_registers) const {
- if (!inline_info.HasDexRegisterMapAtDepth(encoding.inline_info_encoding, depth)) {
+ if (!inline_info.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, depth)) {
return DexRegisterMap();
} else {
- uint32_t offset = GetDexRegisterMapsOffset(encoding) +
- inline_info.GetDexRegisterMapOffsetAtDepth(encoding.inline_info_encoding, depth);
+ uint32_t offset = encoding.dex_register_map.byte_offset +
+ inline_info.GetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding, depth);
size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
return DexRegisterMap(region_.Subregion(offset, size));
}
}
+ InlineInfo GetInlineInfo(size_t index, const CodeInfoEncoding& encoding) const {
+ // Since we do not know the depth, we just return the whole remaining map.
+ // TODO: Clean this up.
+ const size_t bit_offset = encoding.inline_info.bit_offset +
+ index * encoding.inline_info.encoding.BitSize();
+ return InlineInfo(BitMemoryRegion(region_, bit_offset, region_.size_in_bits() - bit_offset));
+ }
+
InlineInfo GetInlineInfoOf(StackMap stack_map, const CodeInfoEncoding& encoding) const {
- DCHECK(stack_map.HasInlineInfo(encoding.stack_map_encoding));
- uint32_t offset = stack_map.GetInlineDescriptorOffset(encoding.stack_map_encoding)
- + GetDexRegisterMapsOffset(encoding);
- return InlineInfo(region_.Subregion(offset, region_.size() - offset));
+ DCHECK(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ uint32_t index = stack_map.GetInlineInfoIndex(encoding.stack_map.encoding);
+ return GetInlineInfo(index, encoding);
}
StackMap GetStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
StackMap stack_map = GetStackMapAt(i, encoding);
- if (stack_map.GetDexPc(encoding.stack_map_encoding) == dex_pc) {
+ if (stack_map.GetDexPc(encoding.stack_map.encoding) == dex_pc) {
return stack_map;
}
}
@@ -1257,7 +1359,7 @@
StackMap GetCatchStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
for (size_t i = GetNumberOfStackMaps(encoding); i > 0; --i) {
StackMap stack_map = GetStackMapAt(i - 1, encoding);
- if (stack_map.GetDexPc(encoding.stack_map_encoding) == dex_pc) {
+ if (stack_map.GetDexPc(encoding.stack_map.encoding) == dex_pc) {
return stack_map;
}
}
@@ -1272,7 +1374,7 @@
}
// Walk over all stack maps. If two consecutive stack maps are identical, then we
// have found a stack map suitable for OSR.
- const StackMapEncoding& stack_map_encoding = encoding.stack_map_encoding;
+ const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
for (size_t i = 0; i < e - 1; ++i) {
StackMap stack_map = GetStackMapAt(i, encoding);
if (stack_map.GetDexPc(stack_map_encoding) == dex_pc) {
@@ -1303,7 +1405,7 @@
// we could do binary search.
for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
StackMap stack_map = GetStackMapAt(i, encoding);
- if (stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA) ==
+ if (stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA) ==
native_pc_offset) {
return stack_map;
}
@@ -1324,23 +1426,17 @@
// Check that the code info has valid stack map and abort if it does not.
void AssertValidStackMap(const CodeInfoEncoding& encoding) const {
- if (region_.size() != 0 && region_.size() < GetStackMapsSize(encoding)) {
+ if (region_.size() != 0 && region_.size_in_bits() < GetStackMapsSizeInBits(encoding)) {
LOG(FATAL) << region_.size() << "\n"
- << encoding.header_size << "\n"
- << encoding.non_header_size << "\n"
- << encoding.number_of_location_catalog_entries << "\n"
- << encoding.number_of_stack_maps << "\n"
- << encoding.stack_map_encoding.BitSize();
+ << encoding.HeaderSize() << "\n"
+ << encoding.NonHeaderSize() << "\n"
+ << encoding.location_catalog.num_entries << "\n"
+ << encoding.stack_map.num_entries << "\n"
+ << encoding.stack_map.encoding.BitSize();
}
}
private:
- ALWAYS_INLINE MemoryRegion GetStackMaps(const CodeInfoEncoding& encoding) const {
- return region_.size() == 0
- ? MemoryRegion()
- : region_.Subregion(GetStackMapsOffset(encoding), GetStackMapsSize(encoding));
- }
-
// Compute the size of the Dex register map associated to the stack map at
// `dex_register_map_offset_in_code_info`.
size_t ComputeDexRegisterMapSizeOf(const CodeInfoEncoding& encoding,
diff --git a/test/testrunner/env.py b/test/testrunner/env.py
index e015d74..278980f 100644
--- a/test/testrunner/env.py
+++ b/test/testrunner/env.py
@@ -165,7 +165,7 @@
ART_PHONY_TEST_HOST_SUFFIX = "64"
_2ND_ART_PHONY_TEST_HOST_SUFFIX = "32"
-HOST_OUT_EXECUTABLES = ('%s/%s') % (ANDROID_BUILD_TOP,
+HOST_OUT_EXECUTABLES = os.path.join(ANDROID_BUILD_TOP,
get_build_var("HOST_OUT_EXECUTABLES"))
os.environ['JACK'] = HOST_OUT_EXECUTABLES + '/jack'
os.environ['DX'] = HOST_OUT_EXECUTABLES + '/dx'
diff --git a/test/testrunner/testrunner.py b/test/testrunner/testrunner.py
index fd3ebd1..f60a6c9 100755
--- a/test/testrunner/testrunner.py
+++ b/test/testrunner/testrunner.py
@@ -452,8 +452,8 @@
test_skipped = True
else:
test_skipped = False
- proc = subprocess.Popen(command.split(), stderr=subprocess.PIPE)
- script_output = proc.stderr.read().strip()
+ proc = subprocess.Popen(command.split(), stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
+ script_output = proc.stdout.read().strip()
test_passed = not proc.wait()
# If verbose is set to True, every test information is printed on a new line.
@@ -474,17 +474,18 @@
if not test_skipped:
if test_passed:
out += COLOR_PASS + 'PASS' + COLOR_NORMAL
+ last_print_length = len(out)
else:
- out += COLOR_ERROR + 'FAIL' + COLOR_NORMAL
failed_tests.append(test_name)
- if verbose:
- out += '\n' + command + '\n' + script_output
+ out += COLOR_ERROR + 'FAIL' + COLOR_NORMAL
+ out += '\n' + command + '\n' + script_output
if not env.ART_TEST_KEEP_GOING:
stop_testrunner = True
+ last_print_length = 0
elif not dry_run:
out += COLOR_SKIP + 'SKIP' + COLOR_NORMAL
+ last_print_length = len(out)
skipped_tests.append(test_name)
- last_print_length = len(out)
print_mutex.acquire()
print_text(prefix + out + suffix)
print_mutex.release()