summaryrefslogtreecommitdiff
path: root/compiler/optimizing/stack_map_stream.cc
diff options
context:
space:
mode:
author David Srbecky <dsrbecky@google.com> 2018-05-10 17:49:33 +0100
committer David Srbecky <dsrbecky@google.com> 2018-05-21 09:28:22 +0000
commit68fefacc54b9d918a5b17b99ac7e72421d1b1f94 (patch)
tree39cccda109a63153ca363e592b5570b46e6dece9 /compiler/optimizing/stack_map_stream.cc
parentb2683cb0ae69c9a8a0ba654f50fa743a9117171c (diff)
Move and rewrite bit loading/storing methods.
Move bit loading/storing methods to BitMemoryRegion and rewrite them. Enforce natural alignment of the data pointer in BitMemoryRegion. This probably would not be reasonable in MemoryRegion, but it is fine here since the BitMemoryRegion already has extra bit offset. The alignment makes it possible to simplify and optimize the methods. This makes the stackmap reading code 33% faster. (measured by the time needed to verify all stackmap fields on ARM). Test: m -j40 test-art-host-gtest Change-Id: I00be8052969a6056b262df4cd2066ffd86043196
Diffstat (limited to 'compiler/optimizing/stack_map_stream.cc')
-rw-r--r--compiler/optimizing/stack_map_stream.cc12
1 files changed, 7 insertions, 5 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 7010e3f380..bf7c5542ef 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -427,10 +427,11 @@ void StackMapStream::FillInCodeInfo(MemoryRegion region) {
if (stack_mask_bits > 0) {
size_t stack_mask_bytes = RoundUp(stack_mask_bits, kBitsPerByte) / kBitsPerByte;
for (size_t i = 0; i < encoding.stack_mask.num_entries; ++i) {
- MemoryRegion source(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes);
- BitMemoryRegion stack_mask = code_info.GetStackMask(i, encoding);
- for (size_t bit_index = 0; bit_index < stack_mask_bits; ++bit_index) {
- stack_mask.StoreBit(bit_index, source.LoadBit(bit_index));
+ BitMemoryRegion src(MemoryRegion(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes));
+ BitMemoryRegion dst = code_info.GetStackMask(i, encoding);
+ for (size_t bit_index = 0; bit_index < stack_mask_bits; bit_index += BitSizeOf<uint32_t>()) {
+ size_t num_bits = std::min<size_t>(stack_mask_bits - bit_index, BitSizeOf<uint32_t>());
+ dst.StoreBits(bit_index, src.LoadBits(bit_index, num_bits), num_bits);
}
}
}
@@ -600,8 +601,9 @@ size_t StackMapStream::PrepareStackMasks(size_t entry_size_in_bits) {
for (StackMapEntry& stack_map : stack_maps_) {
size_t index = dedup.size();
MemoryRegion stack_mask(stack_masks_.data() + index * byte_entry_size, byte_entry_size);
+ BitMemoryRegion stack_mask_bits(stack_mask);
for (size_t i = 0; i < entry_size_in_bits; i++) {
- stack_mask.StoreBit(i, stack_map.sp_mask != nullptr && stack_map.sp_mask->IsBitSet(i));
+ stack_mask_bits.StoreBit(i, stack_map.sp_mask != nullptr && stack_map.sp_mask->IsBitSet(i));
}
stack_map.stack_mask_index = dedup.emplace(stack_mask, index).first->second;
}