summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author David Srbecky <dsrbecky@google.com> 2019-06-16 21:53:07 +0100
committer David Srbecky <dsrbecky@google.com> 2019-06-18 11:35:05 +0000
commit697c47a7ffd4489c4bc4edc229c8123309526286 (patch)
tree04cf832df533fd529cc598ecff9be8c2763d61b8 /compiler/optimizing
parent8ac3dc5ec31569630a99caed2a69f64d84a6c0b6 (diff)
Stack maps: Handle special cases using flags.
Keep the BitTable decoder simple (1+NumColumns varints). Move special case handling up to CodeInfo (empty/dedup). This speeds up CodeInfo by 5%, and maps startup by 0.05%. Change in size is negligible (the bits mostly just move). Test: test.py -b --host --64 --optimizing Change-Id: Ib6abe52f04384de9ffd7cfba04a3124b62f713ff
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/stack_map_stream.cc31
-rw-r--r--compiler/optimizing/stack_map_stream.h24
2 files changed, 34 insertions, 21 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index e21e21cdf3..87702cc798 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -184,7 +184,6 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
in_inline_info_ = true;
DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
- flags_ |= CodeInfo::kHasInlineInfo;
expected_num_dex_registers_ += num_dex_registers;
BitTableBuilder<InlineInfo>::Entry entry;
@@ -294,31 +293,31 @@ void StackMapStream::CreateDexRegisterMap() {
}
}
-template<typename Writer, typename Builder>
-ALWAYS_INLINE static void EncodeTable(Writer& out, const Builder& bit_table) {
- out.WriteBit(false); // Is not deduped.
- bit_table.Encode(out);
-}
-
ScopedArenaVector<uint8_t> StackMapStream::Encode() {
DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
+ uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
+ uint32_t bit_table_flags = 0;
+ ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
+ if (bit_table->size() != 0) { // Record which bit-tables are stored.
+ bit_table_flags |= 1 << i;
+ }
+ });
+
ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
- out.WriteVarint(flags_);
+ out.WriteVarint(flags);
out.WriteVarint(packed_frame_size_);
out.WriteVarint(core_spill_mask_);
out.WriteVarint(fp_spill_mask_);
out.WriteVarint(num_dex_registers_);
- EncodeTable(out, stack_maps_);
- EncodeTable(out, register_masks_);
- EncodeTable(out, stack_masks_);
- EncodeTable(out, inline_infos_);
- EncodeTable(out, method_infos_);
- EncodeTable(out, dex_register_masks_);
- EncodeTable(out, dex_register_maps_);
- EncodeTable(out, dex_register_catalog_);
+ out.WriteVarint(bit_table_flags);
+ ForEachBitTable([&out](size_t, auto bit_table) {
+ if (bit_table->size() != 0) { // Skip empty bit-tables.
+ bit_table->Encode(out);
+ }
+ });
// Verify that we can load the CodeInfo and check some essentials.
CodeInfo code_info(buffer.data());
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 20dd32e308..33c624a36c 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -40,10 +40,10 @@ class StackMapStream : public DeletableArenaObject<kArenaAllocStackMapStream> {
: allocator_(allocator),
instruction_set_(instruction_set),
stack_maps_(allocator),
- inline_infos_(allocator),
- method_infos_(allocator),
register_masks_(allocator),
stack_masks_(allocator),
+ inline_infos_(allocator),
+ method_infos_(allocator),
dex_register_masks_(allocator),
dex_register_maps_(allocator),
dex_register_catalog_(allocator),
@@ -97,18 +97,32 @@ class StackMapStream : public DeletableArenaObject<kArenaAllocStackMapStream> {
void CreateDexRegisterMap();
+ // Invokes the callback with pointer of each BitTableBuilder field.
+ template<typename Callback>
+ void ForEachBitTable(Callback callback) {
+ size_t index = 0;
+ callback(index++, &stack_maps_);
+ callback(index++, &register_masks_);
+ callback(index++, &stack_masks_);
+ callback(index++, &inline_infos_);
+ callback(index++, &method_infos_);
+ callback(index++, &dex_register_masks_);
+ callback(index++, &dex_register_maps_);
+ callback(index++, &dex_register_catalog_);
+ CHECK_EQ(index, CodeInfo::kNumBitTables);
+ }
+
ScopedArenaAllocator* allocator_;
const InstructionSet instruction_set_;
- uint32_t flags_ = 0;
uint32_t packed_frame_size_ = 0;
uint32_t core_spill_mask_ = 0;
uint32_t fp_spill_mask_ = 0;
uint32_t num_dex_registers_ = 0;
BitTableBuilder<StackMap> stack_maps_;
- BitTableBuilder<InlineInfo> inline_infos_;
- BitTableBuilder<MethodInfo> method_infos_;
BitTableBuilder<RegisterMask> register_masks_;
BitmapTableBuilder stack_masks_;
+ BitTableBuilder<InlineInfo> inline_infos_;
+ BitTableBuilder<MethodInfo> method_infos_;
BitmapTableBuilder dex_register_masks_;
BitTableBuilder<DexRegisterMapInfo> dex_register_maps_;
BitTableBuilder<DexRegisterInfo> dex_register_catalog_;