Pass `ArenaAllocator` to JNI compiler.
Avoid using a new arena for every JNI compilation.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 181943478
Change-Id: I7d0b51941116ab0ad90f7e509577a7a3f32550ac
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 2a33858..6cb5021 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -28,7 +28,6 @@
#include "base/enums.h"
#include "base/logging.h" // For VLOG.
#include "base/macros.h"
-#include "base/malloc_arena_pool.h"
#include "base/memory_region.h"
#include "base/utils.h"
#include "calling_convention.h"
@@ -84,7 +83,8 @@
static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file) {
+ const DexFile& dex_file,
+ ArenaAllocator* allocator) {
constexpr size_t kRawPointerSize = static_cast<size_t>(kPointerSize);
const bool is_native = (access_flags & kAccNative) != 0;
CHECK(is_native);
@@ -143,12 +143,9 @@
}
}
- MallocArenaPool pool;
- ArenaAllocator allocator(&pool);
-
// Calling conventions used to iterate over parameters to method
std::unique_ptr<JniCallingConvention> main_jni_conv =
- JniCallingConvention::Create(&allocator,
+ JniCallingConvention::Create(allocator,
is_static,
is_synchronized,
is_fast_native,
@@ -159,11 +156,11 @@
std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv(
ManagedRuntimeCallingConvention::Create(
- &allocator, is_static, is_synchronized, shorty, instruction_set));
+ allocator, is_static, is_synchronized, shorty, instruction_set));
// Assembler that holds generated instructions
std::unique_ptr<JNIMacroAssembler<kPointerSize>> jni_asm =
- GetMacroAssembler<kPointerSize>(&allocator, instruction_set, instruction_set_features);
+ GetMacroAssembler<kPointerSize>(allocator, instruction_set, instruction_set_features);
jni_asm->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo());
jni_asm->SetEmitRunTimeChecksInDebugMode(compiler_options.EmitRunTimeChecksInDebugMode());
@@ -199,9 +196,9 @@
// 1.3 Spill reference register arguments.
constexpr FrameOffset kInvalidReferenceOffset =
JNIMacroAssembler<kPointerSize>::kInvalidReferenceOffset;
- ArenaVector<ArgumentLocation> src_args(allocator.Adapter());
- ArenaVector<ArgumentLocation> dest_args(allocator.Adapter());
- ArenaVector<FrameOffset> refs(allocator.Adapter());
+ ArenaVector<ArgumentLocation> src_args(allocator->Adapter());
+ ArenaVector<ArgumentLocation> dest_args(allocator->Adapter());
+ ArenaVector<FrameOffset> refs(allocator->Adapter());
if (LIKELY(!is_critical_native)) {
mr_conv->ResetIterator(FrameOffset(current_frame_size));
for (; mr_conv->HasNext(); mr_conv->Next()) {
@@ -699,13 +696,14 @@
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file) {
+ const DexFile& dex_file,
+ ArenaAllocator* allocator) {
if (Is64BitInstructionSet(compiler_options.GetInstructionSet())) {
return ArtJniCompileMethodInternal<PointerSize::k64>(
- compiler_options, access_flags, method_idx, dex_file);
+ compiler_options, access_flags, method_idx, dex_file, allocator);
} else {
return ArtJniCompileMethodInternal<PointerSize::k32>(
- compiler_options, access_flags, method_idx, dex_file);
+ compiler_options, access_flags, method_idx, dex_file, allocator);
}
}
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index 313fcd3..52a6f3c 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -24,6 +24,7 @@
namespace art {
+class ArenaAllocator;
class ArtMethod;
class CompilerOptions;
class DexFile;
@@ -65,7 +66,8 @@
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file);
+ const DexFile& dex_file,
+ ArenaAllocator* allocator);
} // namespace art
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 1bf1586..18b0cf5 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -1183,7 +1183,7 @@
}
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, dex_file);
+ compiler_options, access_flags, method_idx, dex_file, &allocator);
MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
@@ -1234,7 +1234,7 @@
if (UNLIKELY(method->IsNative())) {
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, *dex_file);
+ compiler_options, access_flags, method_idx, *dex_file, &allocator);
std::vector<Handle<mirror::Object>> roots;
ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list(
allocator.Adapter(kArenaAllocCHA));