summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2015-09-18 14:11:00 +0100
committer Vladimir Marko <vmarko@google.com> 2015-09-30 13:41:46 +0100
commit2a408a3bef330551818f9cec9a7c5aa7a3f1129e (patch)
tree732bf331e38609a75679b101a1e8a6b65dfe576e
parentb4fd73139aca48d7319221aeefe8bae93a98c56d (diff)
ART: Mark deallocated arena memory as inaccessible.
Mark arena and scoped arena memory freed by allocator adapters as inaccessible. This can help catch accesses to old storage of a container, for example the old data of an ArenaVector<> that's been resized. Together with debug-mode enforcement of destruction of all scoped arena containers, this provides strong verification of their memory usage. However, this does not apply to the normal (non-scoped) arena memory held by arena containers as they are typically not destroyed if they are themselves located in the arena. ArenaBitVector memory, whether in normal or scoped arena, isn't marked either. Change-Id: I4d2a80fedf7ceb7d4ce24ee8e7bcd53513171388
-rw-r--r--runtime/base/arena_allocator.cc12
-rw-r--r--runtime/base/arena_allocator.h63
-rw-r--r--runtime/base/arena_containers.h2
-rw-r--r--runtime/base/memory_tool.h3
-rw-r--r--runtime/base/scoped_arena_allocator.cc5
-rw-r--r--runtime/base/scoped_arena_allocator.h15
-rw-r--r--runtime/base/scoped_arena_containers.h3
7 files changed, 79 insertions, 24 deletions
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index 4e51f5555d..e07520cfa7 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -23,7 +23,6 @@
#include "mem_map.h"
#include "mutex.h"
#include "thread-inl.h"
-#include "base/memory_tool.h"
namespace art {
@@ -276,8 +275,7 @@ ArenaAllocator::ArenaAllocator(ArenaPool* pool)
begin_(nullptr),
end_(nullptr),
ptr_(nullptr),
- arena_head_(nullptr),
- is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
+ arena_head_(nullptr) {
}
void ArenaAllocator::UpdateBytesAllocated() {
@@ -288,14 +286,13 @@ void ArenaAllocator::UpdateBytesAllocated() {
}
}
-void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
// Obtain a new block.
ObtainNewArenaForAllocation(rounded_bytes);
- if (UNLIKELY(ptr_ == nullptr)) {
- return nullptr;
- }
+ CHECK(ptr_ != nullptr);
+ MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
}
ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
uint8_t* ret = ptr_;
@@ -304,6 +301,7 @@ void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
CHECK_EQ(*ptr, 0U);
}
+ MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
return ret;
}
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index c5eb741b76..f1cc5b1bf1 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -21,6 +21,7 @@
#include <stddef.h>
#include "base/bit_utils.h"
+#include "base/memory_tool.h"
#include "debug_stack.h"
#include "macros.h"
#include "mutex.h"
@@ -135,6 +136,52 @@ class ArenaAllocatorStatsImpl {
typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
+template <bool kAvailable, bool kValgrind>
+class ArenaAllocatorMemoryToolCheckImpl {
+ // This is the generic template but since there is a partial specialization
+ // for kValgrind == false, this can be instantiated only for kValgrind == true.
+ static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
+ static_assert(kAvailable, "Valgrind implies memory tool availability.");
+
+ public:
+ ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
+ bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
+
+ private:
+ const bool is_running_on_valgrind_;
+};
+
+template <bool kAvailable>
+class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
+ public:
+ ArenaAllocatorMemoryToolCheckImpl() { }
+ bool IsRunningOnMemoryTool() { return kAvailable; }
+};
+
+typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
+ ArenaAllocatorMemoryToolCheck;
+
+class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
+ public:
+ using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
+
+ void MakeDefined(void* ptr, size_t size) {
+ if (IsRunningOnMemoryTool()) {
+ MEMORY_TOOL_MAKE_DEFINED(ptr, size);
+ }
+ }
+ void MakeUndefined(void* ptr, size_t size) {
+ if (IsRunningOnMemoryTool()) {
+ MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
+ }
+ }
+ void MakeInaccessible(void* ptr, size_t size) {
+ if (IsRunningOnMemoryTool()) {
+ MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
+ }
+ }
+};
+
class Arena {
public:
static constexpr size_t kDefaultSize = 128 * KB;
@@ -219,18 +266,24 @@ class ArenaPool {
DISALLOW_COPY_AND_ASSIGN(ArenaPool);
};
-class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
+class ArenaAllocator
+ : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
public:
explicit ArenaAllocator(ArenaPool* pool);
~ArenaAllocator();
+ using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
+ using ArenaAllocatorMemoryTool::MakeDefined;
+ using ArenaAllocatorMemoryTool::MakeUndefined;
+ using ArenaAllocatorMemoryTool::MakeInaccessible;
+
// Get adapter for use in STL containers. See arena_containers.h .
ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
// Returns zeroed memory.
void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
- if (UNLIKELY(is_running_on_memory_tool_)) {
- return AllocValgrind(bytes, kind);
+ if (UNLIKELY(IsRunningOnMemoryTool())) {
+ return AllocWithMemoryTool(bytes, kind);
}
bytes = RoundUp(bytes, kAlignment);
if (UNLIKELY(ptr_ + bytes > end_)) {
@@ -254,6 +307,7 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats
auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
// If we haven't allocated anything else, we can safely extend.
if (end == ptr_) {
+ DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
const size_t size_delta = new_size - ptr_size;
// Check remain space.
const size_t remain = end_ - ptr_;
@@ -274,7 +328,7 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats
return static_cast<T*>(Alloc(length * sizeof(T), kind));
}
- void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+ void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
void ObtainNewArenaForAllocation(size_t allocation_size);
@@ -302,7 +356,6 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats
uint8_t* end_;
uint8_t* ptr_;
Arena* arena_head_;
- bool is_running_on_memory_tool_;
template <typename U>
friend class ArenaAllocatorAdapter;
diff --git a/runtime/base/arena_containers.h b/runtime/base/arena_containers.h
index 810c1c4b66..e7ea09d1b0 100644
--- a/runtime/base/arena_containers.h
+++ b/runtime/base/arena_containers.h
@@ -161,7 +161,7 @@ class ArenaAllocatorAdapter : private ArenaAllocatorAdapterKind {
return arena_allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind());
}
void deallocate(pointer p, size_type n) {
- UNUSED(p, n);
+ arena_allocator_->MakeInaccessible(p, sizeof(T) * n);
}
void construct(pointer p, const_reference val) {
diff --git a/runtime/base/memory_tool.h b/runtime/base/memory_tool.h
index e0bdcfeced..e1a2e07aca 100644
--- a/runtime/base/memory_tool.h
+++ b/runtime/base/memory_tool.h
@@ -32,10 +32,12 @@
#define MEMORY_TOOL_MAKE_NOACCESS(p, s) __asan_poison_memory_region(p, s)
#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) __asan_unpoison_memory_region(p, s)
#define MEMORY_TOOL_MAKE_DEFINED(p, s) __asan_unpoison_memory_region(p, s)
+constexpr bool kMemoryToolIsAvailable = true;
#else
#define MEMORY_TOOL_MAKE_NOACCESS(p, s) do { (void)(p); (void)(s); } while (0)
#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) do { (void)(p); (void)(s); } while (0)
#define MEMORY_TOOL_MAKE_DEFINED(p, s) do { (void)(p); (void)(s); } while (0)
+constexpr bool kMemoryToolIsAvailable = false;
#endif
#define ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address))
@@ -54,6 +56,7 @@ constexpr size_t kMemoryToolStackGuardSizeScale = 2;
#define MEMORY_TOOL_MAKE_DEFINED(p, s) VALGRIND_MAKE_MEM_DEFINED(p, s)
#define ATTRIBUTE_NO_SANITIZE_ADDRESS
#define RUNNING_ON_MEMORY_TOOL RUNNING_ON_VALGRIND
+constexpr bool kMemoryToolIsAvailable = true;
constexpr bool kMemoryToolIsValgrind = true;
constexpr bool kMemoryToolDetectsLeaks = true;
constexpr bool kMemoryToolAddsRedzones = true;
diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc
index d823edd6d2..31f96e4783 100644
--- a/runtime/base/scoped_arena_allocator.cc
+++ b/runtime/base/scoped_arena_allocator.cc
@@ -29,8 +29,7 @@ ArenaStack::ArenaStack(ArenaPool* arena_pool)
bottom_arena_(nullptr),
top_arena_(nullptr),
top_ptr_(nullptr),
- top_end_(nullptr),
- is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL > 0) {
+ top_end_(nullptr) {
}
ArenaStack::~ArenaStack() {
@@ -91,7 +90,7 @@ void ArenaStack::UpdateBytesAllocated() {
}
}
-void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
uint8_t* ptr = top_ptr_;
if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h
index ca514e411c..4f513707bb 100644
--- a/runtime/base/scoped_arena_allocator.h
+++ b/runtime/base/scoped_arena_allocator.h
@@ -32,11 +32,16 @@ template <typename T>
class ScopedArenaAllocatorAdapter;
// Holds a list of Arenas for use by ScopedArenaAllocator stack.
-class ArenaStack : private DebugStackRefCounter {
+class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
public:
explicit ArenaStack(ArenaPool* arena_pool);
~ArenaStack();
+ using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
+ using ArenaAllocatorMemoryTool::MakeDefined;
+ using ArenaAllocatorMemoryTool::MakeUndefined;
+ using ArenaAllocatorMemoryTool::MakeInaccessible;
+
void Reset();
size_t PeakBytesAllocated() {
@@ -64,8 +69,8 @@ class ArenaStack : private DebugStackRefCounter {
// Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
- if (UNLIKELY(is_running_on_memory_tool_)) {
- return AllocValgrind(bytes, kind);
+ if (UNLIKELY(IsRunningOnMemoryTool())) {
+ return AllocWithMemoryTool(bytes, kind);
}
size_t rounded_bytes = RoundUp(bytes, 8);
uint8_t* ptr = top_ptr_;
@@ -80,7 +85,7 @@ class ArenaStack : private DebugStackRefCounter {
uint8_t* AllocateFromNextArena(size_t rounded_bytes);
void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
void UpdateBytesAllocated();
- void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+ void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
StatsAndPool stats_and_pool_;
Arena* bottom_arena_;
@@ -88,8 +93,6 @@ class ArenaStack : private DebugStackRefCounter {
uint8_t* top_ptr_;
uint8_t* top_end_;
- const bool is_running_on_memory_tool_;
-
friend class ScopedArenaAllocator;
template <typename T>
friend class ScopedArenaAllocatorAdapter;
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
index 82db60e4e4..eecc55f6b2 100644
--- a/runtime/base/scoped_arena_containers.h
+++ b/runtime/base/scoped_arena_containers.h
@@ -153,9 +153,8 @@ class ScopedArenaAllocatorAdapter
ArenaAllocatorAdapterKind::Kind()));
}
void deallocate(pointer p, size_type n) {
- UNUSED(p);
- UNUSED(n);
DebugStackIndirectTopRef::CheckTop();
+ arena_stack_->MakeInaccessible(p, sizeof(T) * n);
}
void construct(pointer p, const_reference val) {