ART: Use ScopedArenaAllocator for pass-local data.
Passes using local ArenaAllocator were hiding their memory
usage from the allocation counting, making it difficult to
track down where memory was used. Using ScopedArenaAllocator
reveals the memory usage.
This changes the HGraph constructor which requires a lot of
changes in tests. Refactor these tests to limit the amount
of work needed the next time we change that constructor.
Test: m test-art-host-gtest
Test: testrunner.py --host
Test: Build with kArenaAllocatorCountAllocations = true.
Bug: 64312607
Change-Id: I34939e4086b500d6e827ff3ef2211d1a421ac91a
diff --git a/compiler/utils/assembler.cc b/compiler/utils/assembler.cc
index 25eca23..944c64b 100644
--- a/compiler/utils/assembler.cc
+++ b/compiler/utils/assembler.cc
@@ -25,10 +25,10 @@
namespace art {
-AssemblerBuffer::AssemblerBuffer(ArenaAllocator* arena)
- : arena_(arena) {
+AssemblerBuffer::AssemblerBuffer(ArenaAllocator* allocator)
+ : allocator_(allocator) {
static const size_t kInitialBufferCapacity = 4 * KB;
- contents_ = arena_->AllocArray<uint8_t>(kInitialBufferCapacity, kArenaAllocAssembler);
+ contents_ = allocator_->AllocArray<uint8_t>(kInitialBufferCapacity, kArenaAllocAssembler);
cursor_ = contents_;
limit_ = ComputeLimit(contents_, kInitialBufferCapacity);
fixup_ = nullptr;
@@ -45,8 +45,8 @@
AssemblerBuffer::~AssemblerBuffer() {
- if (arena_->IsRunningOnMemoryTool()) {
- arena_->MakeInaccessible(contents_, Capacity());
+ if (allocator_->IsRunningOnMemoryTool()) {
+ allocator_->MakeInaccessible(contents_, Capacity());
}
}
@@ -81,7 +81,7 @@
// Allocate the new data area and copy contents of the old one to it.
contents_ = reinterpret_cast<uint8_t*>(
- arena_->Realloc(contents_, old_capacity, new_capacity, kArenaAllocAssembler));
+ allocator_->Realloc(contents_, old_capacity, new_capacity, kArenaAllocAssembler));
// Update the cursor and recompute the limit.
cursor_ = contents_ + old_size;
diff --git a/compiler/utils/assembler.h b/compiler/utils/assembler.h
index 314ff8c..dbd35ab 100644
--- a/compiler/utils/assembler.h
+++ b/compiler/utils/assembler.h
@@ -89,11 +89,11 @@
class AssemblerBuffer {
public:
- explicit AssemblerBuffer(ArenaAllocator* arena);
+ explicit AssemblerBuffer(ArenaAllocator* allocator);
~AssemblerBuffer();
- ArenaAllocator* GetArena() {
- return arena_;
+ ArenaAllocator* GetAllocator() {
+ return allocator_;
}
// Basic support for emitting, loading, and storing.
@@ -252,7 +252,7 @@
// for a single, fast space check per instruction.
static const int kMinimumGap = 32;
- ArenaAllocator* arena_;
+ ArenaAllocator* allocator_;
uint8_t* contents_;
uint8_t* cursor_;
uint8_t* limit_;
@@ -392,8 +392,8 @@
*/
DebugFrameOpCodeWriterForAssembler& cfi() { return cfi_; }
- ArenaAllocator* GetArena() {
- return buffer_.GetArena();
+ ArenaAllocator* GetAllocator() {
+ return buffer_.GetAllocator();
}
AssemblerBuffer* GetBuffer() {
@@ -401,7 +401,7 @@
}
protected:
- explicit Assembler(ArenaAllocator* arena) : buffer_(arena), cfi_(this) {}
+ explicit Assembler(ArenaAllocator* allocator) : buffer_(allocator), cfi_(this) {}
AssemblerBuffer buffer_;
diff --git a/compiler/utils/x86/jni_macro_assembler_x86.cc b/compiler/utils/x86/jni_macro_assembler_x86.cc
index 2b3c65b..7e29c4a 100644
--- a/compiler/utils/x86/jni_macro_assembler_x86.cc
+++ b/compiler/utils/x86/jni_macro_assembler_x86.cc
@@ -518,7 +518,7 @@
}
void X86JNIMacroAssembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
- X86ExceptionSlowPath* slow = new (__ GetArena()) X86ExceptionSlowPath(stack_adjust);
+ X86ExceptionSlowPath* slow = new (__ GetAllocator()) X86ExceptionSlowPath(stack_adjust);
__ GetBuffer()->EnqueueSlowPath(slow);
__ fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()), Immediate(0));
__ j(kNotEqual, slow->Entry());
diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
index 72d9c43..5766f9d 100644
--- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
+++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
@@ -584,9 +584,10 @@
};
void X86_64JNIMacroAssembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
- X86_64ExceptionSlowPath* slow = new (__ GetArena()) X86_64ExceptionSlowPath(stack_adjust);
+ X86_64ExceptionSlowPath* slow = new (__ GetAllocator()) X86_64ExceptionSlowPath(stack_adjust);
__ GetBuffer()->EnqueueSlowPath(slow);
- __ gs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>(), true), Immediate(0));
+ __ gs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>(), true),
+ Immediate(0));
__ j(kNotEqual, slow->Entry());
}