summaryrefslogtreecommitdiff
path: root/compiler/optimizing/stack_map_stream.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/stack_map_stream.cc')
-rw-r--r--compiler/optimizing/stack_map_stream.cc92
1 files changed, 53 insertions, 39 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 3685ab2df4..094b75de69 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -46,6 +46,13 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
uint8_t inlining_depth) {
DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
in_stack_map_ = true;
+ // num_dex_registers_ is the constant per-method number of registers.
+ // However we initially don't know what the value is, so lazily initialize it.
+ if (num_dex_registers_ == 0) {
+ num_dex_registers_ = num_dex_registers;
+ } else if (num_dex_registers > 0) {
+ DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count";
+ }
current_stack_map_ = StackMapEntry {
.packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_),
@@ -85,7 +92,6 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
}
CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth);
- CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
});
}
}
@@ -102,16 +108,14 @@ void StackMapStream::EndStackMapEntry() {
inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
}
+ // Generate delta-compressed dex register map.
+ CreateDexRegisterMap();
+
stack_maps_.Add(current_stack_map_);
}
void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
current_dex_registers_.push_back(DexRegisterLocation(kind, value));
-
- // We have collected all the dex registers for StackMap/InlineInfo - create the map.
- if (current_dex_registers_.size() == expected_num_dex_registers_) {
- CreateDexRegisterMap();
- }
}
void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
@@ -142,14 +146,15 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
in_inline_info_ = true;
DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
+ expected_num_dex_registers_ += num_dex_registers;
+
InlineInfoEntry entry = {
.is_last = InlineInfo::kMore,
.dex_pc = dex_pc,
.method_info_index = kNoValue,
.art_method_hi = kNoValue,
.art_method_lo = kNoValue,
- .dex_register_mask_index = kNoValue,
- .dex_register_map_index = kNoValue,
+ .num_dex_registers = static_cast<uint32_t>(expected_num_dex_registers_),
};
if (EncodeArtMethodInInlineInfo(method)) {
entry.art_method_hi = High32Bits(reinterpret_cast<uintptr_t>(method));
@@ -164,9 +169,6 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
}
current_inline_infos_.push_back(entry);
- current_dex_registers_.clear();
- expected_num_dex_registers_ = num_dex_registers;
-
if (kVerifyStackMaps) {
size_t stack_map_index = stack_maps_.size();
size_t depth = current_inline_infos_.size() - 1;
@@ -182,7 +184,6 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()],
method->GetDexMethodIndexUnchecked());
}
- CHECK_EQ(inline_info.HasDexRegisterMap(), (num_dex_registers != 0));
});
}
}
@@ -193,56 +194,68 @@ void StackMapStream::EndInlineInfoEntry() {
DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
}
-// Create dex register map (bitmap + indices + catalogue entries)
-// based on the currently accumulated list of DexRegisterLocations.
+// Create delta-compressed dex register map based on the current list of DexRegisterLocations.
+// All dex registers for a stack map are concatenated - inlined registers are just appended.
void StackMapStream::CreateDexRegisterMap() {
- // Create mask and map based on current registers.
+ // These are fields rather than local variables so that we can reuse the reserved memory.
temp_dex_register_mask_.ClearAllBits();
temp_dex_register_map_.clear();
+
+ // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
+ if (previous_dex_registers_.size() < current_dex_registers_.size()) {
+ previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
+ dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
+ }
+
+ // Set bit in the mask for each register that has been changed since the previous stack map.
+ // Modified registers are stored in the catalogue and the catalogue index added to the list.
for (size_t i = 0; i < current_dex_registers_.size(); i++) {
DexRegisterLocation reg = current_dex_registers_[i];
- if (reg.IsLive()) {
- DexRegisterEntry entry = DexRegisterEntry {
+ // Distance is difference between this index and the index of last modification.
+ uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
+ if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
+ DexRegisterEntry entry = DexRegisterEntry{
.kind = static_cast<uint32_t>(reg.GetKind()),
.packed_value = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()),
};
+ uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
temp_dex_register_mask_.SetBit(i);
- temp_dex_register_map_.push_back(dex_register_catalog_.Dedup(&entry));
+ temp_dex_register_map_.push_back(index);
+ previous_dex_registers_[i] = reg;
+ dex_register_timestamp_[i] = stack_maps_.size();
}
}
- // Set the mask and map for the current StackMap/InlineInfo.
- uint32_t mask_index = StackMap::kNoValue; // Represents mask with all zero bits.
+ // Set the mask and map for the current StackMap (which includes inlined registers).
if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
- mask_index = dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
- temp_dex_register_mask_.GetNumberOfBits());
+ current_stack_map_.dex_register_mask_index =
+ dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
+ temp_dex_register_mask_.GetNumberOfBits());
}
- uint32_t map_index = dex_register_maps_.Dedup(temp_dex_register_map_.data(),
- temp_dex_register_map_.size());
- if (!current_inline_infos_.empty()) {
- current_inline_infos_.back().dex_register_mask_index = mask_index;
- current_inline_infos_.back().dex_register_map_index = map_index;
- } else {
- current_stack_map_.dex_register_mask_index = mask_index;
- current_stack_map_.dex_register_map_index = map_index;
+ if (!current_dex_registers_.empty()) {
+ current_stack_map_.dex_register_map_index =
+ dex_register_maps_.Dedup(temp_dex_register_map_.data(),
+ temp_dex_register_map_.size());
}
if (kVerifyStackMaps) {
size_t stack_map_index = stack_maps_.size();
- int32_t depth = current_inline_infos_.size() - 1;
+ uint32_t depth = current_inline_infos_.size();
// We need to make copy of the current registers for later (when the check is run).
- auto expected_dex_registers = std::make_shared<std::vector<DexRegisterLocation>>(
+ auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
current_dex_registers_.begin(), current_dex_registers_.end());
dchecks_.emplace_back([=](const CodeInfo& code_info) {
StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
- size_t num_dex_registers = expected_dex_registers->size();
- DexRegisterMap map = (depth == -1)
- ? code_info.GetDexRegisterMapOf(stack_map, num_dex_registers)
- : code_info.GetDexRegisterMapAtDepth(depth, stack_map, num_dex_registers);
- CHECK_EQ(map.size(), num_dex_registers);
- for (size_t r = 0; r < num_dex_registers; r++) {
- CHECK_EQ(expected_dex_registers->at(r), map.Get(r));
+ uint32_t expected_reg = 0;
+ for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
+ CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
+ }
+ for (uint32_t d = 0; d < depth; d++) {
+ for (DexRegisterLocation reg : code_info.GetDexRegisterMapAtDepth(d, stack_map)) {
+ CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
+ }
}
+ CHECK_EQ(expected_reg, expected_dex_registers->size());
});
}
}
@@ -290,6 +303,7 @@ size_t StackMapStream::PrepareForFillIn() {
dex_register_masks_.Encode(&out_, &bit_offset);
dex_register_maps_.Encode(&out_, &bit_offset);
dex_register_catalog_.Encode(&out_, &bit_offset);
+ EncodeVarintBits(&out_, &bit_offset, num_dex_registers_);
return UnsignedLeb128Size(out_.size()) + out_.size();
}