Fix valgrind tests: mark allocated space as defined.
Follow-up to
https://android-review.googlesource.com/219700
Bug: 28173563
Bug: 28256882
Change-Id: I4e8e8d3202fd01ab48d86b3a6b92302524df66bb
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index 5b6801f..b84e29f 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -321,24 +321,26 @@
// and padding between allocations marked as inaccessible.
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
+ uint8_t* ret;
if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
- void* ret = AllocFromNewArena(rounded_bytes);
+ ret = AllocFromNewArena(rounded_bytes);
+ uint8_t* noaccess_begin = ret + bytes;
+ uint8_t* noaccess_end;
if (ret == arena_head_->Begin()) {
DCHECK(ptr_ - rounded_bytes == ret);
- uint8_t* noaccess_begin = ptr_ - rounded_bytes + bytes;
- MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, end_ - noaccess_begin);
+ noaccess_end = end_;
} else {
// We're still using the old arena but `ret` comes from a new one just after it.
DCHECK(arena_head_->next_ != nullptr);
DCHECK(ret == arena_head_->next_->Begin());
DCHECK_EQ(rounded_bytes, arena_head_->next_->GetBytesAllocated());
- uint8_t* noaccess_begin = arena_head_->next_->Begin() + bytes;
- MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, arena_head_->next_->End() - noaccess_begin);
+ noaccess_end = arena_head_->next_->End();
}
- return ret;
+ MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
+ } else {
+ ret = ptr_;
+ ptr_ += rounded_bytes;
}
- uint8_t* ret = ptr_;
- ptr_ += rounded_bytes;
MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
// Check that the memory is already zeroed out.
DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
@@ -351,7 +353,7 @@
pool_->FreeArenaChain(arena_head_);
}
-void* ArenaAllocator::AllocFromNewArena(size_t bytes) {
+uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes));
DCHECK(new_arena != nullptr);
DCHECK_LE(bytes, new_arena->Size());
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 4b745f0..6c1a898 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -364,7 +364,7 @@
private:
void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
- void* AllocFromNewArena(size_t bytes);
+ uint8_t* AllocFromNewArena(size_t bytes);
static constexpr size_t kAlignment = 8;
diff --git a/runtime/base/arena_allocator_test.cc b/runtime/base/arena_allocator_test.cc
index c07c0ed..9de3cc4 100644
--- a/runtime/base/arena_allocator_test.cc
+++ b/runtime/base/arena_allocator_test.cc
@@ -41,6 +41,28 @@
EXPECT_EQ(2U, bv.GetStorageSize());
}
+TEST_F(ArenaAllocatorTest, MakeDefined) {
+ // Regression test to make sure we mark the allocated area defined.
+ ArenaPool pool;
+ static constexpr size_t kSmallArraySize = 10;
+ static constexpr size_t kLargeArraySize = 50;
+ uint32_t* small_array;
+ {
+ // Allocate a small array from an arena and release it.
+ ArenaAllocator arena(&pool);
+ small_array = arena.AllocArray<uint32_t>(kSmallArraySize);
+ ASSERT_EQ(0u, small_array[kSmallArraySize - 1u]);
+ }
+ {
+ // Reuse the previous arena and allocate more than previous allocation including red zone.
+ ArenaAllocator arena(&pool);
+ uint32_t* large_array = arena.AllocArray<uint32_t>(kLargeArraySize);
+ ASSERT_EQ(0u, large_array[kLargeArraySize - 1u]);
+ // Verify that the allocation was made on the same arena.
+ ASSERT_EQ(small_array, large_array);
+ }
+}
+
TEST_F(ArenaAllocatorTest, LargeAllocations) {
{
ArenaPool pool;