diff options
| -rw-r--r-- | runtime/base/arena_allocator.cc | 12 | ||||
| -rw-r--r-- | runtime/base/arena_allocator.h | 63 | ||||
| -rw-r--r-- | runtime/base/arena_containers.h | 2 | ||||
| -rw-r--r-- | runtime/base/memory_tool.h | 3 | ||||
| -rw-r--r-- | runtime/base/scoped_arena_allocator.cc | 5 | ||||
| -rw-r--r-- | runtime/base/scoped_arena_allocator.h | 15 | ||||
| -rw-r--r-- | runtime/base/scoped_arena_containers.h | 3 |
7 files changed, 79 insertions, 24 deletions
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc index 345428c2a6..6f2aa46816 100644 --- a/runtime/base/arena_allocator.cc +++ b/runtime/base/arena_allocator.cc @@ -23,7 +23,6 @@ #include "mem_map.h" #include "mutex.h" #include "thread-inl.h" -#include "base/memory_tool.h" namespace art { @@ -290,8 +289,7 @@ ArenaAllocator::ArenaAllocator(ArenaPool* pool) begin_(nullptr), end_(nullptr), ptr_(nullptr), - arena_head_(nullptr), - is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) { + arena_head_(nullptr) { } void ArenaAllocator::UpdateBytesAllocated() { @@ -302,14 +300,13 @@ void ArenaAllocator::UpdateBytesAllocated() { } } -void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) { +void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) { size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); if (UNLIKELY(ptr_ + rounded_bytes > end_)) { // Obtain a new block. ObtainNewArenaForAllocation(rounded_bytes); - if (UNLIKELY(ptr_ == nullptr)) { - return nullptr; - } + CHECK(ptr_ != nullptr); + MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_); } ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); uint8_t* ret = ptr_; @@ -318,6 +315,7 @@ void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) { for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) { CHECK_EQ(*ptr, 0U); } + MEMORY_TOOL_MAKE_DEFINED(ret, bytes); MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes); return ret; } diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h index b4f19ee8da..565b416b38 100644 --- a/runtime/base/arena_allocator.h +++ b/runtime/base/arena_allocator.h @@ -21,6 +21,7 @@ #include <stddef.h> #include "base/bit_utils.h" +#include "base/memory_tool.h" #include "debug_stack.h" #include "macros.h" #include "mutex.h" @@ -149,6 +150,52 @@ class ArenaAllocatorStatsImpl { typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats; +template <bool kAvailable, bool kValgrind> +class ArenaAllocatorMemoryToolCheckImpl { + // This is the generic template but since there is a partial specialization + // for kValgrind == false, this can be instantiated only for kValgrind == true. + static_assert(kValgrind, "This template can be instantiated only for Valgrind."); + static_assert(kAvailable, "Valgrind implies memory tool availability."); + + public: + ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { } + bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; } + + private: + const bool is_running_on_valgrind_; +}; + +template <bool kAvailable> +class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> { + public: + ArenaAllocatorMemoryToolCheckImpl() { } + bool IsRunningOnMemoryTool() { return kAvailable; } +}; + +typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind> + ArenaAllocatorMemoryToolCheck; + +class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck { + public: + using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool; + + void MakeDefined(void* ptr, size_t size) { + if (IsRunningOnMemoryTool()) { + MEMORY_TOOL_MAKE_DEFINED(ptr, size); + } + } + void MakeUndefined(void* ptr, size_t size) { + if (IsRunningOnMemoryTool()) { + MEMORY_TOOL_MAKE_UNDEFINED(ptr, size); + } + } + void MakeInaccessible(void* ptr, size_t size) { + if (IsRunningOnMemoryTool()) { + MEMORY_TOOL_MAKE_NOACCESS(ptr, size); + } + } +}; + class Arena { public: static constexpr size_t kDefaultSize = 128 * KB; @@ -233,18 +280,24 @@ class ArenaPool { DISALLOW_COPY_AND_ASSIGN(ArenaPool); }; -class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats { +class ArenaAllocator + : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool { public: explicit ArenaAllocator(ArenaPool* pool); ~ArenaAllocator(); + using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool; + using ArenaAllocatorMemoryTool::MakeDefined; + using ArenaAllocatorMemoryTool::MakeUndefined; + using ArenaAllocatorMemoryTool::MakeInaccessible; + // Get adapter for use in STL containers. See arena_containers.h . ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL); // Returns zeroed memory. void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE { - if (UNLIKELY(is_running_on_memory_tool_)) { - return AllocValgrind(bytes, kind); + if (UNLIKELY(IsRunningOnMemoryTool())) { + return AllocWithMemoryTool(bytes, kind); } bytes = RoundUp(bytes, kAlignment); if (UNLIKELY(ptr_ + bytes > end_)) { @@ -268,6 +321,7 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size; // If we haven't allocated anything else, we can safely extend. if (end == ptr_) { + DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_. const size_t size_delta = new_size - ptr_size; // Check remain space. const size_t remain = end_ - ptr_; @@ -288,7 +342,7 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats return static_cast<T*>(Alloc(length * sizeof(T), kind)); } - void* AllocValgrind(size_t bytes, ArenaAllocKind kind); + void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); void ObtainNewArenaForAllocation(size_t allocation_size); @@ -316,7 +370,6 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats uint8_t* end_; uint8_t* ptr_; Arena* arena_head_; - bool is_running_on_memory_tool_; template <typename U> friend class ArenaAllocatorAdapter; diff --git a/runtime/base/arena_containers.h b/runtime/base/arena_containers.h index 810c1c4b66..e7ea09d1b0 100644 --- a/runtime/base/arena_containers.h +++ b/runtime/base/arena_containers.h @@ -161,7 +161,7 @@ class ArenaAllocatorAdapter : private ArenaAllocatorAdapterKind { return arena_allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind()); } void deallocate(pointer p, size_type n) { - UNUSED(p, n); + arena_allocator_->MakeInaccessible(p, sizeof(T) * n); } void construct(pointer p, const_reference val) { diff --git a/runtime/base/memory_tool.h b/runtime/base/memory_tool.h index e0bdcfeced..e1a2e07aca 100644 --- a/runtime/base/memory_tool.h +++ b/runtime/base/memory_tool.h @@ -32,10 +32,12 @@ #define MEMORY_TOOL_MAKE_NOACCESS(p, s) __asan_poison_memory_region(p, s) #define MEMORY_TOOL_MAKE_UNDEFINED(p, s) __asan_unpoison_memory_region(p, s) #define MEMORY_TOOL_MAKE_DEFINED(p, s) __asan_unpoison_memory_region(p, s) +constexpr bool kMemoryToolIsAvailable = true; #else #define MEMORY_TOOL_MAKE_NOACCESS(p, s) do { (void)(p); (void)(s); } while (0) #define MEMORY_TOOL_MAKE_UNDEFINED(p, s) do { (void)(p); (void)(s); } while (0) #define MEMORY_TOOL_MAKE_DEFINED(p, s) do { (void)(p); (void)(s); } while (0) +constexpr bool kMemoryToolIsAvailable = false; #endif #define ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address)) @@ -54,6 +56,7 @@ constexpr size_t kMemoryToolStackGuardSizeScale = 2; #define MEMORY_TOOL_MAKE_DEFINED(p, s) VALGRIND_MAKE_MEM_DEFINED(p, s) #define ATTRIBUTE_NO_SANITIZE_ADDRESS #define RUNNING_ON_MEMORY_TOOL RUNNING_ON_VALGRIND +constexpr bool kMemoryToolIsAvailable = true; constexpr bool kMemoryToolIsValgrind = true; constexpr bool kMemoryToolDetectsLeaks = true; constexpr bool kMemoryToolAddsRedzones = true; diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc index d823edd6d2..31f96e4783 100644 --- a/runtime/base/scoped_arena_allocator.cc +++ b/runtime/base/scoped_arena_allocator.cc @@ -29,8 +29,7 @@ ArenaStack::ArenaStack(ArenaPool* arena_pool) bottom_arena_(nullptr), top_arena_(nullptr), top_ptr_(nullptr), - top_end_(nullptr), - is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL > 0) { + top_end_(nullptr) { } ArenaStack::~ArenaStack() { @@ -91,7 +90,7 @@ void ArenaStack::UpdateBytesAllocated() { } } -void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) { +void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) { size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); uint8_t* ptr = top_ptr_; if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h index ca514e411c..4f513707bb 100644 --- a/runtime/base/scoped_arena_allocator.h +++ b/runtime/base/scoped_arena_allocator.h @@ -32,11 +32,16 @@ template <typename T> class ScopedArenaAllocatorAdapter; // Holds a list of Arenas for use by ScopedArenaAllocator stack. -class ArenaStack : private DebugStackRefCounter { +class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool { public: explicit ArenaStack(ArenaPool* arena_pool); ~ArenaStack(); + using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool; + using ArenaAllocatorMemoryTool::MakeDefined; + using ArenaAllocatorMemoryTool::MakeUndefined; + using ArenaAllocatorMemoryTool::MakeInaccessible; + void Reset(); size_t PeakBytesAllocated() { @@ -64,8 +69,8 @@ class ArenaStack : private DebugStackRefCounter { // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter. void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { - if (UNLIKELY(is_running_on_memory_tool_)) { - return AllocValgrind(bytes, kind); + if (UNLIKELY(IsRunningOnMemoryTool())) { + return AllocWithMemoryTool(bytes, kind); } size_t rounded_bytes = RoundUp(bytes, 8); uint8_t* ptr = top_ptr_; @@ -80,7 +85,7 @@ class ArenaStack : private DebugStackRefCounter { uint8_t* AllocateFromNextArena(size_t rounded_bytes); void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats); void UpdateBytesAllocated(); - void* AllocValgrind(size_t bytes, ArenaAllocKind kind); + void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); StatsAndPool stats_and_pool_; Arena* bottom_arena_; @@ -88,8 +93,6 @@ class ArenaStack : private DebugStackRefCounter { uint8_t* top_ptr_; uint8_t* top_end_; - const bool is_running_on_memory_tool_; - friend class ScopedArenaAllocator; template <typename T> friend class ScopedArenaAllocatorAdapter; diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h index 82db60e4e4..eecc55f6b2 100644 --- a/runtime/base/scoped_arena_containers.h +++ b/runtime/base/scoped_arena_containers.h @@ -153,9 +153,8 @@ class ScopedArenaAllocatorAdapter ArenaAllocatorAdapterKind::Kind())); } void deallocate(pointer p, size_type n) { - UNUSED(p); - UNUSED(n); DebugStackIndirectTopRef::CheckTop(); + arena_stack_->MakeInaccessible(p, sizeof(T) * n); } void construct(pointer p, const_reference val) { |