ART: Add arena tracking mode

Add an arena tracking mode to get better insight into allocation
behavior. In this mode, the default size of arenas is very small
so that arena use for multiple places is limited. At the same time,
do not release arenas back into their pool and instead free them.

Tracking in this context is wrt/ tools that analyze calls to
allocation routines, e.g., massif and heaptrack. The goal of this CL
is to enable more precise tracking with such tools. The smaller
minimal arena sizes and deallocation instead of reuse will lead
to actual malloc calls instead of bump-pointer behavior, exposing
ArenaAllocator-based allocation to such tools.

To limit the build-time impact of switching tracking, add an -inl
file for the arena allocator that defines the controlling flag and
the default arena size.

Bug: 34053922
Test: m test-art-host
Change-Id: I09bb5e743d7dc47e499a402d6fcac637c16a26ad
diff --git a/runtime/base/arena_allocator-inl.h b/runtime/base/arena_allocator-inl.h
new file mode 100644
index 0000000..0e43837
--- /dev/null
+++ b/runtime/base/arena_allocator-inl.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_INL_H_
+#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_INL_H_
+
+#include "arena_allocator.h"
+
+namespace art {
+namespace arena_allocator {
+
+static constexpr bool kArenaAllocatorPreciseTracking = kArenaAllocatorCountAllocations;
+
+static constexpr size_t kArenaDefaultSize = kArenaAllocatorPreciseTracking
+                                                ? 32
+                                                : 128 * KB;
+
+}  // namespace arena_allocator
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_INL_H_
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index 136ed12..fc5b5b1 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -19,7 +19,7 @@
 #include <iomanip>
 #include <numeric>
 
-#include "arena_allocator.h"
+#include "arena_allocator-inl.h"
 #include "logging.h"
 #include "mem_map.h"
 #include "mutex.h"
@@ -29,7 +29,6 @@
 namespace art {
 
 constexpr size_t kMemoryToolRedZoneBytes = 8;
-constexpr size_t Arena::kDefaultSize;
 
 template <bool kCount>
 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
@@ -181,7 +180,7 @@
 
 class MallocArena FINAL : public Arena {
  public:
-  explicit MallocArena(size_t size = Arena::kDefaultSize);
+  explicit MallocArena(size_t size = arena_allocator::kArenaDefaultSize);
   virtual ~MallocArena();
  private:
   static constexpr size_t RequiredOverallocation() {
@@ -344,6 +343,17 @@
       MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
     }
   }
+
+  if (arena_allocator::kArenaAllocatorPreciseTracking) {
+    // Do not reuse arenas when tracking.
+    while (first != nullptr) {
+      Arena* next = first->next_;
+      delete first;
+      first = next;
+    }
+    return;
+  }
+
   if (first != nullptr) {
     Arena* last = first;
     while (last->next_ != nullptr) {
@@ -437,7 +447,7 @@
 }
 
 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
-  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes));
+  Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
   DCHECK(new_arena != nullptr);
   DCHECK_LE(bytes, new_arena->Size());
   if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 60b6ea8..5430458 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -195,7 +195,6 @@
 
 class Arena {
  public:
-  static constexpr size_t kDefaultSize = 128 * KB;
   Arena();
   virtual ~Arena() { }
   // Reset is for pre-use and uses memset for performance.
diff --git a/runtime/base/arena_allocator_test.cc b/runtime/base/arena_allocator_test.cc
index fd48a3f..e2c2e2f 100644
--- a/runtime/base/arena_allocator_test.cc
+++ b/runtime/base/arena_allocator_test.cc
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "base/arena_allocator.h"
+#include "base/arena_allocator-inl.h"
 #include "base/arena_bit_vector.h"
 #include "base/memory_tool.h"
 #include "gtest/gtest.h"
@@ -65,23 +65,28 @@
 }
 
 TEST_F(ArenaAllocatorTest, LargeAllocations) {
+  if (arena_allocator::kArenaAllocatorPreciseTracking) {
+    printf("WARNING: TEST DISABLED FOR precise arena tracking\n");
+    return;
+  }
+
   {
     ArenaPool pool;
     ArenaAllocator arena(&pool);
     // Note: Leaving some space for memory tool red zones.
-    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 5 / 8);
-    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 2 / 8);
+    void* alloc1 = arena.Alloc(arena_allocator::kArenaDefaultSize * 5 / 8);
+    void* alloc2 = arena.Alloc(arena_allocator::kArenaDefaultSize * 2 / 8);
     ASSERT_NE(alloc1, alloc2);
     ASSERT_EQ(1u, NumberOfArenas(&arena));
   }
   {
     ArenaPool pool;
     ArenaAllocator arena(&pool);
-    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
-    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 11 / 16);
+    void* alloc1 = arena.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
+    void* alloc2 = arena.Alloc(arena_allocator::kArenaDefaultSize * 11 / 16);
     ASSERT_NE(alloc1, alloc2);
     ASSERT_EQ(2u, NumberOfArenas(&arena));
-    void* alloc3 = arena.Alloc(Arena::kDefaultSize * 7 / 16);
+    void* alloc3 = arena.Alloc(arena_allocator::kArenaDefaultSize * 7 / 16);
     ASSERT_NE(alloc1, alloc3);
     ASSERT_NE(alloc2, alloc3);
     ASSERT_EQ(3u, NumberOfArenas(&arena));
@@ -89,12 +94,12 @@
   {
     ArenaPool pool;
     ArenaAllocator arena(&pool);
-    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
-    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
+    void* alloc1 = arena.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
+    void* alloc2 = arena.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
     ASSERT_NE(alloc1, alloc2);
     ASSERT_EQ(2u, NumberOfArenas(&arena));
     // Note: Leaving some space for memory tool red zones.
-    void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
+    void* alloc3 = arena.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
     ASSERT_NE(alloc1, alloc3);
     ASSERT_NE(alloc2, alloc3);
     ASSERT_EQ(2u, NumberOfArenas(&arena));
@@ -102,12 +107,12 @@
   {
     ArenaPool pool;
     ArenaAllocator arena(&pool);
-    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
-    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+    void* alloc1 = arena.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
+    void* alloc2 = arena.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
     ASSERT_NE(alloc1, alloc2);
     ASSERT_EQ(2u, NumberOfArenas(&arena));
     // Note: Leaving some space for memory tool red zones.
-    void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
+    void* alloc3 = arena.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
     ASSERT_NE(alloc1, alloc3);
     ASSERT_NE(alloc2, alloc3);
     ASSERT_EQ(2u, NumberOfArenas(&arena));
@@ -117,9 +122,9 @@
     ArenaAllocator arena(&pool);
     // Note: Leaving some space for memory tool red zones.
     for (size_t i = 0; i != 15; ++i) {
-      arena.Alloc(Arena::kDefaultSize * 1 / 16);    // Allocate 15 times from the same arena.
+      arena.Alloc(arena_allocator::kArenaDefaultSize * 1 / 16);    // Allocate 15 times from the same arena.
       ASSERT_EQ(i + 1u, NumberOfArenas(&arena));
-      arena.Alloc(Arena::kDefaultSize * 17 / 16);   // Allocate a separate arena.
+      arena.Alloc(arena_allocator::kArenaDefaultSize * 17 / 16);   // Allocate a separate arena.
       ASSERT_EQ(i + 2u, NumberOfArenas(&arena));
     }
   }
@@ -204,10 +209,11 @@
     ArenaPool pool;
     ArenaAllocator arena(&pool);
 
-    const size_t original_size = Arena::kDefaultSize - ArenaAllocator::kAlignment * 5;
+    const size_t original_size = arena_allocator::kArenaDefaultSize -
+        ArenaAllocator::kAlignment * 5;
     void* original_allocation = arena.Alloc(original_size);
 
-    const size_t new_size = Arena::kDefaultSize + ArenaAllocator::kAlignment * 2;
+    const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2;
     void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
     EXPECT_NE(original_allocation, realloc_allocation);
   }
@@ -217,12 +223,12 @@
     ArenaPool pool;
     ArenaAllocator arena(&pool);
 
-    const size_t original_size = Arena::kDefaultSize -
+    const size_t original_size = arena_allocator::kArenaDefaultSize -
         ArenaAllocator::kAlignment * 4 -
         ArenaAllocator::kAlignment / 2;
     void* original_allocation = arena.Alloc(original_size);
 
-    const size_t new_size = Arena::kDefaultSize +
+    const size_t new_size = arena_allocator::kArenaDefaultSize +
         ArenaAllocator::kAlignment * 2 +
         ArenaAllocator::kAlignment / 2;
     void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
@@ -307,11 +313,12 @@
     ArenaPool pool;
     ArenaAllocator arena(&pool);
 
-    const size_t original_size = Arena::kDefaultSize - ArenaAllocator::kAlignment * 5;
+    const size_t original_size = arena_allocator::kArenaDefaultSize -
+        ArenaAllocator::kAlignment * 5;
     void* original_allocation = arena.Alloc(original_size);
     ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
 
-    const size_t new_size = Arena::kDefaultSize + ArenaAllocator::kAlignment * 2;
+    const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2;
     void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
     EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
 
@@ -324,13 +331,13 @@
     ArenaPool pool;
     ArenaAllocator arena(&pool);
 
-    const size_t original_size = Arena::kDefaultSize -
+    const size_t original_size = arena_allocator::kArenaDefaultSize -
         ArenaAllocator::kAlignment * 4 -
         ArenaAllocator::kAlignment / 2;
     void* original_allocation = arena.Alloc(original_size);
     ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
 
-    const size_t new_size = Arena::kDefaultSize +
+    const size_t new_size = arena_allocator::kArenaDefaultSize +
         ArenaAllocator::kAlignment * 2 +
         ArenaAllocator::kAlignment / 2;
     void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc
index 7d04fa0..973f9b9 100644
--- a/runtime/base/scoped_arena_allocator.cc
+++ b/runtime/base/scoped_arena_allocator.cc
@@ -16,7 +16,7 @@
 
 #include "scoped_arena_allocator.h"
 
-#include "arena_allocator.h"
+#include "arena_allocator-inl.h"
 #include "base/memory_tool.h"
 
 namespace art {
@@ -54,7 +54,7 @@
 
 uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
   UpdateBytesAllocated();
-  size_t allocation_size = std::max(Arena::kDefaultSize, rounded_bytes);
+  size_t allocation_size = std::max(arena_allocator::kArenaDefaultSize, rounded_bytes);
   if (UNLIKELY(top_arena_ == nullptr)) {
     top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
     top_arena_->next_ = nullptr;