summaryrefslogtreecommitdiff
path: root/compiler/optimizing/stack_map_stream.cc
diff options
context:
space:
mode:
author David Srbecky <dsrbecky@google.com> 2019-06-16 21:53:07 +0100
committer David Srbecky <dsrbecky@google.com> 2019-06-18 11:35:05 +0000
commit697c47a7ffd4489c4bc4edc229c8123309526286 (patch)
tree04cf832df533fd529cc598ecff9be8c2763d61b8 /compiler/optimizing/stack_map_stream.cc
parent8ac3dc5ec31569630a99caed2a69f64d84a6c0b6 (diff)
Stack maps: Handle special cases using flags.
Keep the BitTable decoder simple (1+NumColumns varints). Move special case handling up to CodeInfo (empty/dedup). This speeds up CodeInfo by 5%, and maps startup by 0.05%. Change in size is negligible (the bits mostly just move). Test: test.py -b --host --64 --optimizing Change-Id: Ib6abe52f04384de9ffd7cfba04a3124b62f713ff
Diffstat (limited to 'compiler/optimizing/stack_map_stream.cc')
-rw-r--r--compiler/optimizing/stack_map_stream.cc31
1 files changed, 15 insertions, 16 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index e21e21cdf3..87702cc798 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -184,7 +184,6 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
in_inline_info_ = true;
DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
- flags_ |= CodeInfo::kHasInlineInfo;
expected_num_dex_registers_ += num_dex_registers;
BitTableBuilder<InlineInfo>::Entry entry;
@@ -294,31 +293,31 @@ void StackMapStream::CreateDexRegisterMap() {
}
}
-template<typename Writer, typename Builder>
-ALWAYS_INLINE static void EncodeTable(Writer& out, const Builder& bit_table) {
- out.WriteBit(false); // Is not deduped.
- bit_table.Encode(out);
-}
-
ScopedArenaVector<uint8_t> StackMapStream::Encode() {
DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
+ uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
+ uint32_t bit_table_flags = 0;
+ ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
+ if (bit_table->size() != 0) { // Record which bit-tables are stored.
+ bit_table_flags |= 1 << i;
+ }
+ });
+
ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
- out.WriteVarint(flags_);
+ out.WriteVarint(flags);
out.WriteVarint(packed_frame_size_);
out.WriteVarint(core_spill_mask_);
out.WriteVarint(fp_spill_mask_);
out.WriteVarint(num_dex_registers_);
- EncodeTable(out, stack_maps_);
- EncodeTable(out, register_masks_);
- EncodeTable(out, stack_masks_);
- EncodeTable(out, inline_infos_);
- EncodeTable(out, method_infos_);
- EncodeTable(out, dex_register_masks_);
- EncodeTable(out, dex_register_maps_);
- EncodeTable(out, dex_register_catalog_);
+ out.WriteVarint(bit_table_flags);
+ ForEachBitTable([&out](size_t, auto bit_table) {
+ if (bit_table->size() != 0) { // Skip empty bit-tables.
+ bit_table->Encode(out);
+ }
+ });
// Verify that we can load the CodeInfo and check some essentials.
CodeInfo code_info(buffer.data());