Optimizing: Fix register allocator validation memory usage.
Also attribute ArenaBitVector allocations to appropriate
passes. This was used to track down the source of the
excessive memory alloactions.
Bug: 27690481
Change-Id: Ib895984cb7c04e24cbc7abbd8322079bab8ab100
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 54cbdf8..3f41e35 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -37,7 +37,7 @@
current_entry_.same_dex_register_map_as_ = kNoSameDexMapFound;
if (num_dex_registers != 0) {
current_entry_.live_dex_registers_mask =
- new (allocator_) ArenaBitVector(allocator_, num_dex_registers, true);
+ ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
} else {
current_entry_.live_dex_registers_mask = nullptr;
}
@@ -111,7 +111,7 @@
current_inline_info_.dex_register_locations_start_index = dex_register_locations_.size();
if (num_dex_registers != 0) {
current_inline_info_.live_dex_registers_mask =
- new (allocator_) ArenaBitVector(allocator_, num_dex_registers, true);
+ ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
} else {
current_inline_info_.live_dex_registers_mask = nullptr;
}
@@ -256,7 +256,7 @@
// Ensure we reached the end of the Dex registers location_catalog.
DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
- ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false);
+ ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false, kArenaAllocStackMapStream);
uintptr_t next_dex_register_map_offset = 0;
uintptr_t next_inline_info_offset = 0;
for (size_t i = 0, e = stack_maps_.size(); i < e; ++i) {