Use ScopedArenaAllocator in BCE, DCE, LSE, ...

... ReferenceTypePropagation and GraphChecker.

Define and use a new allocation kind for LoadStoreAnalysis.

Memory needed to compile the two most expensive methods for
aosp_angler-userdebug boot image:
  BatteryStats.dumpCheckinLocked() : 19.7MiB -> 19.6MiB (-79KiB)
  BatteryStats.dumpLocked(): 39.4MiB -> 39.3MiB (-120KiB)

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 64312607
Change-Id: Ib0cf074ac21ab67d8f8f2efabbdfb84cce9cae8e
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 39bfc86..af5585e 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -16,6 +16,9 @@
 
 #include "load_store_elimination.h"
 
+#include "base/array_ref.h"
+#include "base/scoped_arena_allocator.h"
+#include "base/scoped_arena_containers.h"
 #include "escape.h"
 #include "load_store_analysis.h"
 #include "side_effects_analysis.h"
@@ -45,17 +48,18 @@
       : HGraphVisitor(graph, stats),
         heap_location_collector_(heap_locations_collector),
         side_effects_(side_effects),
+        allocator_(graph->GetArenaStack()),
         heap_values_for_(graph->GetBlocks().size(),
-                         ArenaVector<HInstruction*>(heap_locations_collector.
-                                                    GetNumberOfHeapLocations(),
-                                                    kUnknownHeapValue,
-                                                    graph->GetAllocator()->Adapter(kArenaAllocLSE)),
-                         graph->GetAllocator()->Adapter(kArenaAllocLSE)),
-        removed_loads_(graph->GetAllocator()->Adapter(kArenaAllocLSE)),
-        substitute_instructions_for_loads_(graph->GetAllocator()->Adapter(kArenaAllocLSE)),
-        possibly_removed_stores_(graph->GetAllocator()->Adapter(kArenaAllocLSE)),
-        singleton_new_instances_(graph->GetAllocator()->Adapter(kArenaAllocLSE)),
-        singleton_new_arrays_(graph->GetAllocator()->Adapter(kArenaAllocLSE)) {
+                         ScopedArenaVector<HInstruction*>(heap_locations_collector.
+                                                          GetNumberOfHeapLocations(),
+                                                          kUnknownHeapValue,
+                                                          allocator_.Adapter(kArenaAllocLSE)),
+                         allocator_.Adapter(kArenaAllocLSE)),
+        removed_loads_(allocator_.Adapter(kArenaAllocLSE)),
+        substitute_instructions_for_loads_(allocator_.Adapter(kArenaAllocLSE)),
+        possibly_removed_stores_(allocator_.Adapter(kArenaAllocLSE)),
+        singleton_new_instances_(allocator_.Adapter(kArenaAllocLSE)),
+        singleton_new_arrays_(allocator_.Adapter(kArenaAllocLSE)) {
   }
 
   void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
@@ -146,7 +150,7 @@
   void HandleLoopSideEffects(HBasicBlock* block) {
     DCHECK(block->IsLoopHeader());
     int block_id = block->GetBlockId();
-    ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
+    ScopedArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
 
     // Don't eliminate loads in irreducible loops. This is safe for singletons, because
     // they are always used by the non-eliminated loop-phi.
@@ -160,7 +164,7 @@
     }
 
     HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
-    ArenaVector<HInstruction*>& pre_header_heap_values =
+    ScopedArenaVector<HInstruction*>& pre_header_heap_values =
         heap_values_for_[pre_header->GetBlockId()];
 
     // Inherit the values from pre-header.
@@ -191,12 +195,12 @@
   }
 
   void MergePredecessorValues(HBasicBlock* block) {
-    const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
+    ArrayRef<HBasicBlock* const> predecessors(block->GetPredecessors());
     if (predecessors.size() == 0) {
       return;
     }
 
-    ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
+    ScopedArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
     for (size_t i = 0; i < heap_values.size(); i++) {
       HInstruction* merged_value = nullptr;
       // Whether merged_value is a result that's merged from all predecessors.
@@ -234,7 +238,8 @@
         // or the heap value may be needed after method return or deoptimization.
         // Keep the last store in each predecessor since future loads cannot be eliminated.
         for (HBasicBlock* predecessor : predecessors) {
-          ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessor->GetBlockId()];
+          ScopedArenaVector<HInstruction*>& pred_values =
+              heap_values_for_[predecessor->GetBlockId()];
           KeepIfIsStore(pred_values[i]);
         }
       }
@@ -303,7 +308,7 @@
     size_t idx = heap_location_collector_.FindHeapLocationIndex(
         ref_info, offset, index, declaring_class_def_index);
     DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
-    ArenaVector<HInstruction*>& heap_values =
+    ScopedArenaVector<HInstruction*>& heap_values =
         heap_values_for_[instruction->GetBlock()->GetBlockId()];
     HInstruction* heap_value = heap_values[idx];
     if (heap_value == kDefaultHeapValue) {
@@ -369,7 +374,7 @@
     size_t idx = heap_location_collector_.FindHeapLocationIndex(
         ref_info, offset, index, declaring_class_def_index);
     DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
-    ArenaVector<HInstruction*>& heap_values =
+    ScopedArenaVector<HInstruction*>& heap_values =
         heap_values_for_[instruction->GetBlock()->GetBlockId()];
     HInstruction* heap_value = heap_values[idx];
     bool same_value = false;
@@ -496,7 +501,7 @@
   }
 
   void VisitDeoptimize(HDeoptimize* instruction) {
-    const ArenaVector<HInstruction*>& heap_values =
+    const ScopedArenaVector<HInstruction*>& heap_values =
         heap_values_for_[instruction->GetBlock()->GetBlockId()];
     for (HInstruction* heap_value : heap_values) {
       // Filter out fake instructions before checking instruction kind below.
@@ -523,7 +528,7 @@
   }
 
   void HandleInvoke(HInstruction* invoke) {
-    ArenaVector<HInstruction*>& heap_values =
+    ScopedArenaVector<HInstruction*>& heap_values =
         heap_values_for_[invoke->GetBlock()->GetBlockId()];
     for (size_t i = 0; i < heap_values.size(); i++) {
       ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
@@ -590,7 +595,7 @@
         !new_instance->NeedsChecks()) {
       singleton_new_instances_.push_back(new_instance);
     }
-    ArenaVector<HInstruction*>& heap_values =
+    ScopedArenaVector<HInstruction*>& heap_values =
         heap_values_for_[new_instance->GetBlock()->GetBlockId()];
     for (size_t i = 0; i < heap_values.size(); i++) {
       HInstruction* ref =
@@ -612,7 +617,7 @@
     if (ref_info->IsSingletonAndRemovable()) {
       singleton_new_arrays_.push_back(new_array);
     }
-    ArenaVector<HInstruction*>& heap_values =
+    ScopedArenaVector<HInstruction*>& heap_values =
         heap_values_for_[new_array->GetBlock()->GetBlockId()];
     for (size_t i = 0; i < heap_values.size(); i++) {
       HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
@@ -639,20 +644,23 @@
   const HeapLocationCollector& heap_location_collector_;
   const SideEffectsAnalysis& side_effects_;
 
+  // Use local allocator for allocating memory.
+  ScopedArenaAllocator allocator_;
+
   // One array of heap values for each block.
-  ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
+  ScopedArenaVector<ScopedArenaVector<HInstruction*>> heap_values_for_;
 
   // We record the instructions that should be eliminated but may be
   // used by heap locations. They'll be removed in the end.
-  ArenaVector<HInstruction*> removed_loads_;
-  ArenaVector<HInstruction*> substitute_instructions_for_loads_;
+  ScopedArenaVector<HInstruction*> removed_loads_;
+  ScopedArenaVector<HInstruction*> substitute_instructions_for_loads_;
 
   // Stores in this list may be removed from the list later when it's
   // found that the store cannot be eliminated.
-  ArenaVector<HInstruction*> possibly_removed_stores_;
+  ScopedArenaVector<HInstruction*> possibly_removed_stores_;
 
-  ArenaVector<HInstruction*> singleton_new_instances_;
-  ArenaVector<HInstruction*> singleton_new_arrays_;
+  ScopedArenaVector<HInstruction*> singleton_new_instances_;
+  ScopedArenaVector<HInstruction*> singleton_new_arrays_;
 
   DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
 };