summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2015-11-11 18:44:25 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2015-11-11 18:44:25 +0000
commit00bcbcfdc3ba28b4b985d8a8029d677579e902ff (patch)
treedcb8969dd83f5c06a6e006ffd21465a64da462fb
parenta828aadd6dd9a5e73daf9a167b71c88bc2bdfbd7 (diff)
parent75001934af9fa3f2538f564bb4073d711809f1ff (diff)
Merge "ART: Fix arena allocation for valgrind."
-rw-r--r--runtime/base/arena_allocator.cc12
-rw-r--r--runtime/base/scoped_arena_allocator.cc5
2 files changed, 10 insertions, 7 deletions
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index 71afa0f709..771b2d0509 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -316,22 +316,22 @@ void ArenaAllocator::UpdateBytesAllocated() {
}
void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
+ // We mark all memory for a newly retrieved arena as inaccessible and then
+ // mark only the actually allocated memory as defined. That leaves red zones
+ // and padding between allocations marked as inaccessible.
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
// Obtain a new block.
ObtainNewArenaForAllocation(rounded_bytes);
CHECK(ptr_ != nullptr);
- MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
+ MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
}
ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
uint8_t* ret = ptr_;
ptr_ += rounded_bytes;
- // Check that the memory is already zeroed out.
- for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
- CHECK_EQ(*ptr, 0U);
- }
MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
- MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
+ // Check that the memory is already zeroed out.
+ DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
return ret;
}
diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc
index 31f96e4783..90c6ee34ec 100644
--- a/runtime/base/scoped_arena_allocator.cc
+++ b/runtime/base/scoped_arena_allocator.cc
@@ -91,16 +91,19 @@ void ArenaStack::UpdateBytesAllocated() {
}
void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
+ // We mark all memory for a newly retrieved arena as inaccessible and then
+ // mark only the actually allocated memory as defined. That leaves red zones
+ // and padding between allocations marked as inaccessible.
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
uint8_t* ptr = top_ptr_;
if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
ptr = AllocateFromNextArena(rounded_bytes);
CHECK(ptr != nullptr) << "Failed to allocate memory";
+ MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_);
}
CurrentStats()->RecordAlloc(bytes, kind);
top_ptr_ = ptr + rounded_bytes;
MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
- MEMORY_TOOL_MAKE_NOACCESS(ptr + bytes, rounded_bytes - bytes);
return ptr;
}