Use ScopedArenaAllocator in BCE, DCE, LSE, ...
... ReferenceTypePropagation and GraphChecker.
Define and use a new allocation kind for LoadStoreAnalysis.
Memory needed to compile the two most expensive methods for
aosp_angler-userdebug boot image:
BatteryStats.dumpCheckinLocked() : 19.7MiB -> 19.6MiB (-79KiB)
BatteryStats.dumpLocked(): 39.4MiB -> 39.3MiB (-120KiB)
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 64312607
Change-Id: Ib0cf074ac21ab67d8f8f2efabbdfb84cce9cae8e
diff --git a/compiler/optimizing/dead_code_elimination.cc b/compiler/optimizing/dead_code_elimination.cc
index 5117e07..3cc7b0e 100644
--- a/compiler/optimizing/dead_code_elimination.cc
+++ b/compiler/optimizing/dead_code_elimination.cc
@@ -18,13 +18,18 @@
#include "base/array_ref.h"
#include "base/bit_vector-inl.h"
+#include "base/scoped_arena_allocator.h"
+#include "base/scoped_arena_containers.h"
#include "base/stl_util.h"
#include "ssa_phi_elimination.h"
namespace art {
static void MarkReachableBlocks(HGraph* graph, ArenaBitVector* visited) {
- ArenaVector<HBasicBlock*> worklist(graph->GetAllocator()->Adapter(kArenaAllocDCE));
+ // Use local allocator for allocating memory.
+ ScopedArenaAllocator allocator(graph->GetArenaStack());
+
+ ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocDCE));
constexpr size_t kDefaultWorlistSize = 8;
worklist.reserve(kDefaultWorlistSize);
visited->SetBit(graph->GetEntryBlock()->GetBlockId());
@@ -305,9 +310,12 @@
}
bool HDeadCodeElimination::RemoveDeadBlocks() {
+ // Use local allocator for allocating memory.
+ ScopedArenaAllocator allocator(graph_->GetArenaStack());
+
// Classify blocks as reachable/unreachable.
- ArenaAllocator* allocator = graph_->GetAllocator();
- ArenaBitVector live_blocks(allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE);
+ ArenaBitVector live_blocks(&allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE);
+ live_blocks.ClearAllBits();
MarkReachableBlocks(graph_, &live_blocks);
bool removed_one_or_more_blocks = false;