Add ArenaUniquePtr

Motivation is to use it for adding arenas in the verifier.
Also added arena tags to prevent double free errors.

Bug: 10921004
Change-Id: I545f3374eceb9a6a35e738cb899d1493098fb760
diff --git a/runtime/base/allocator.h b/runtime/base/allocator.h
index f9960ac..ad255b8 100644
--- a/runtime/base/allocator.h
+++ b/runtime/base/allocator.h
@@ -19,6 +19,7 @@
 
 #include <map>
 #include <set>
+#include <unordered_map>
 
 #include "atomic.h"
 #include "base/macros.h"
@@ -150,19 +151,24 @@
 template<class T, AllocatorTag kTag>
 // C++ doesn't allow template typedefs. This is a workaround template typedef which is
 // TrackingAllocatorImpl<T> if kEnableTrackingAllocator is true, std::allocator<T> otherwise.
-class TrackingAllocator : public TypeStaticIf<kEnableTrackingAllocator,
-                                              TrackingAllocatorImpl<T, kTag>,
-                                              std::allocator<T>>::type {
-};
+using TrackingAllocator = typename TypeStaticIf<kEnableTrackingAllocator,
+                                                TrackingAllocatorImpl<T, kTag>,
+                                                std::allocator<T>>::type;
 
 template<class Key, class T, AllocatorTag kTag, class Compare = std::less<Key>>
-class AllocationTrackingMultiMap : public std::multimap<
-    Key, T, Compare, TrackingAllocator<std::pair<Key, T>, kTag>> {
-};
+using AllocationTrackingMultiMap = std::multimap<
+    Key, T, Compare, TrackingAllocator<std::pair<Key, T>, kTag>>;
 
 template<class Key, AllocatorTag kTag, class Compare = std::less<Key>>
-class AllocationTrackingSet : public std::set<Key, Compare, TrackingAllocator<Key, kTag>> {
-};
+using AllocationTrackingSet = std::set<Key, Compare, TrackingAllocator<Key, kTag>>;
+
+template<class Key,
+         class T,
+         AllocatorTag kTag,
+         class Hash = std::hash<Key>,
+         class Pred = std::equal_to<Key>>
+using AllocationTrackingUnorderedMap = std::unordered_map<
+    Key, T, Hash, Pred, TrackingAllocator<std::pair<const Key, T>, kTag>>;
 
 }  // namespace art
 
diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h
index 2554fb0..a30c73d 100644
--- a/runtime/base/scoped_arena_allocator.h
+++ b/runtime/base/scoped_arena_allocator.h
@@ -31,6 +31,16 @@
 template <typename T>
 class ScopedArenaAllocatorAdapter;
 
+// Tag associated with each allocation to help prevent double free.
+enum class ArenaFreeTag : uint8_t {
+  // Allocation is used and has not yet been destroyed.
+  kUsed,
+  // Allocation has been destroyed.
+  kFree,
+};
+
+static constexpr size_t kArenaAlignment = 8;
+
 // Holds a list of Arenas for use by ScopedArenaAllocator stack.
 class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
  public:
@@ -50,6 +60,12 @@
 
   MemStats GetPeakStats() const;
 
+  // Return the arena tag associated with a pointer.
+  static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
+    DCHECK(kIsDebugBuild) << "Only debug builds have tags";
+    return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
+  }
+
  private:
   struct Peak;
   struct Current;
@@ -72,13 +88,18 @@
     if (UNLIKELY(IsRunningOnMemoryTool())) {
       return AllocWithMemoryTool(bytes, kind);
     }
-    size_t rounded_bytes = RoundUp(bytes, 8);
+    // Add kArenaAlignment for the free or used tag. Required to preserve alignment.
+    size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kArenaAlignment : 0u), kArenaAlignment);
     uint8_t* ptr = top_ptr_;
     if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
       ptr = AllocateFromNextArena(rounded_bytes);
     }
     CurrentStats()->RecordAlloc(bytes, kind);
     top_ptr_ = ptr + rounded_bytes;
+    if (kIsDebugBuild) {
+      ptr += kArenaAlignment;
+      ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
+    }
     return ptr;
   }
 
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
index 562c2bf..b74aef1 100644
--- a/runtime/base/scoped_arena_containers.h
+++ b/runtime/base/scoped_arena_containers.h
@@ -196,6 +196,40 @@
   return ScopedArenaAllocatorAdapter<void>(this, kind);
 }
 
+// Special deleter that only calls the destructor. Also checks for double free errors.
+template <typename T>
+class ArenaDelete {
+  static constexpr uint8_t kMagicFill = 0xCE;
+ public:
+  void operator()(T* ptr) const {
+    ptr->~T();
+    if (RUNNING_ON_MEMORY_TOOL > 0) {
+      // Writing to the memory will fail if it we already destroyed the pointer with
+      // DestroyOnlyDelete since we make it no access.
+      memset(ptr, kMagicFill, sizeof(T));
+      MEMORY_TOOL_MAKE_NOACCESS(ptr, sizeof(T));
+    } else if (kIsDebugBuild) {
+      CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed)
+          << "Freeing invalid object " << ptr;
+      ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree;
+      // Write a magic value to try and catch use after free error.
+      memset(ptr, kMagicFill, sizeof(T));
+    }
+  }
+};
+
+// Declare but do not define a partial specialization for T[].
+// This is to prevent accidental use of this unsupported use case.
+template <typename T>
+class ArenaDelete<T[]> {
+ public:
+  void operator()(T* ptr) const = delete;
+};
+
+// Arena unique ptr that only calls the destructor of the element.
+template <typename T>
+using ArenaUniquePtr = std::unique_ptr<T, ArenaDelete<T>>;
+
 }  // namespace art
 
 #endif  // ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_