summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2022-04-06 14:44:46 +0000
committer Vladimir Marko <vmarko@google.com> 2022-04-07 12:02:52 +0000
commit601f4e9955be4d25b5ecfe7779d6981a5c1fcbca (patch)
treee754e9263497baef25405e943f2dde050f126943
parent3b661321f66b2058983de8559848aa505f0e929a (diff)
Pass `ArenaAllocator` to JNI compiler.
Avoid using a new arena for every JNI compilation. Test: m test-art-host-gtest Test: testrunner.py --host --optimizing Bug: 181943478 Change-Id: I7d0b51941116ab0ad90f7e509577a7a3f32550ac
-rw-r--r--compiler/jni/quick/jni_compiler.cc26
-rw-r--r--compiler/jni/quick/jni_compiler.h4
-rw-r--r--compiler/optimizing/optimizing_compiler.cc4
3 files changed, 17 insertions, 17 deletions
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 2a33858f52..6cb50211e1 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -28,7 +28,6 @@
#include "base/enums.h"
#include "base/logging.h" // For VLOG.
#include "base/macros.h"
-#include "base/malloc_arena_pool.h"
#include "base/memory_region.h"
#include "base/utils.h"
#include "calling_convention.h"
@@ -84,7 +83,8 @@ template <PointerSize kPointerSize>
static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file) {
+ const DexFile& dex_file,
+ ArenaAllocator* allocator) {
constexpr size_t kRawPointerSize = static_cast<size_t>(kPointerSize);
const bool is_native = (access_flags & kAccNative) != 0;
CHECK(is_native);
@@ -143,12 +143,9 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
}
}
- MallocArenaPool pool;
- ArenaAllocator allocator(&pool);
-
// Calling conventions used to iterate over parameters to method
std::unique_ptr<JniCallingConvention> main_jni_conv =
- JniCallingConvention::Create(&allocator,
+ JniCallingConvention::Create(allocator,
is_static,
is_synchronized,
is_fast_native,
@@ -159,11 +156,11 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv(
ManagedRuntimeCallingConvention::Create(
- &allocator, is_static, is_synchronized, shorty, instruction_set));
+ allocator, is_static, is_synchronized, shorty, instruction_set));
// Assembler that holds generated instructions
std::unique_ptr<JNIMacroAssembler<kPointerSize>> jni_asm =
- GetMacroAssembler<kPointerSize>(&allocator, instruction_set, instruction_set_features);
+ GetMacroAssembler<kPointerSize>(allocator, instruction_set, instruction_set_features);
jni_asm->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo());
jni_asm->SetEmitRunTimeChecksInDebugMode(compiler_options.EmitRunTimeChecksInDebugMode());
@@ -199,9 +196,9 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// 1.3 Spill reference register arguments.
constexpr FrameOffset kInvalidReferenceOffset =
JNIMacroAssembler<kPointerSize>::kInvalidReferenceOffset;
- ArenaVector<ArgumentLocation> src_args(allocator.Adapter());
- ArenaVector<ArgumentLocation> dest_args(allocator.Adapter());
- ArenaVector<FrameOffset> refs(allocator.Adapter());
+ ArenaVector<ArgumentLocation> src_args(allocator->Adapter());
+ ArenaVector<ArgumentLocation> dest_args(allocator->Adapter());
+ ArenaVector<FrameOffset> refs(allocator->Adapter());
if (LIKELY(!is_critical_native)) {
mr_conv->ResetIterator(FrameOffset(current_frame_size));
for (; mr_conv->HasNext(); mr_conv->Next()) {
@@ -699,13 +696,14 @@ static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file) {
+ const DexFile& dex_file,
+ ArenaAllocator* allocator) {
if (Is64BitInstructionSet(compiler_options.GetInstructionSet())) {
return ArtJniCompileMethodInternal<PointerSize::k64>(
- compiler_options, access_flags, method_idx, dex_file);
+ compiler_options, access_flags, method_idx, dex_file, allocator);
} else {
return ArtJniCompileMethodInternal<PointerSize::k32>(
- compiler_options, access_flags, method_idx, dex_file);
+ compiler_options, access_flags, method_idx, dex_file, allocator);
}
}
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index 313fcd361e..52a6f3cf02 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -24,6 +24,7 @@
namespace art {
+class ArenaAllocator;
class ArtMethod;
class CompilerOptions;
class DexFile;
@@ -65,7 +66,8 @@ class JniCompiledMethod {
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file);
+ const DexFile& dex_file,
+ ArenaAllocator* allocator);
} // namespace art
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 1bf1586d37..18b0cf516b 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -1183,7 +1183,7 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
}
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, dex_file);
+ compiler_options, access_flags, method_idx, dex_file, &allocator);
MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
@@ -1234,7 +1234,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
if (UNLIKELY(method->IsNative())) {
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, *dex_file);
+ compiler_options, access_flags, method_idx, *dex_file, &allocator);
std::vector<Handle<mirror::Object>> roots;
ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list(
allocator.Adapter(kArenaAllocCHA));