ART: Enforce arena alignment in Realloc
Enforce the expected alignment in Realloc when the given buffer
can be expanded.
Add debug checks to prove alignment guarantees.
Change-Id: I7dbaf3ca0896f714cff07ff08f067a7c088a6c8d
Test: m test-art-host
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index b84e29f..aeb990c 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -163,6 +163,7 @@
MallocArena::MallocArena(size_t size) {
memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
CHECK(memory_ != nullptr); // Abort on OOM.
+ DCHECK_ALIGNED(memory_, ArenaAllocator::kAlignment);
size_ = size;
}
@@ -370,6 +371,7 @@
arena_head_ = new_arena;
// Update our internal data structures.
begin_ = new_arena->Begin();
+ DCHECK_ALIGNED(begin_, kAlignment);
ptr_ = begin_ + bytes;
end_ = new_arena->End();
}
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 6c1a898..3fad96b 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -310,6 +310,7 @@
return AllocFromNewArena(bytes);
}
uint8_t* ret = ptr_;
+ DCHECK_ALIGNED(ret, kAlignment);
ptr_ += bytes;
return ret;
}
@@ -319,20 +320,24 @@
ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
DCHECK_GE(new_size, ptr_size);
DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
- auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
+ // We always allocate aligned.
+ const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
+ auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
// If we haven't allocated anything else, we can safely extend.
if (end == ptr_) {
DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
- const size_t size_delta = new_size - ptr_size;
+ const size_t aligned_new_size = RoundUp(new_size, kAlignment);
+ const size_t size_delta = aligned_new_size - aligned_ptr_size;
// Check remain space.
const size_t remain = end_ - ptr_;
if (remain >= size_delta) {
ptr_ += size_delta;
ArenaAllocatorStats::RecordAlloc(size_delta, kind);
+ DCHECK_ALIGNED(ptr_, kAlignment);
return ptr;
}
}
- auto* new_ptr = Alloc(new_size, kind);
+ auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
memcpy(new_ptr, ptr, ptr_size);
// TODO: Call free on ptr if linear alloc supports free.
return new_ptr;
@@ -362,11 +367,12 @@
bool Contains(const void* ptr) const;
+ static constexpr size_t kAlignment = 8;
+
private:
void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
uint8_t* AllocFromNewArena(size_t bytes);
- static constexpr size_t kAlignment = 8;
void UpdateBytesAllocated();