summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2022-04-08 08:31:15 +0000
committer Santiago Aboy Solanes <solanes@google.com> 2022-04-08 08:35:16 +0000
commitaa5a644f17aab27dee172642a276bd24e69a5b54 (patch)
treee06645d2c25b4e59f3724fcfdfaa33b0a7375ec2
parent8bb486a78e350ce6072d71e15cc4f01a142e80d4 (diff)
Revert "Pass `ArenaAllocator` to JNI compiler."
This reverts commit 601f4e9955be4d25b5ecfe7779d6981a5c1fcbca. Reason for revert: Bot redness e.g. https://ci.chromium.org/ui/p/art/builders/ci/angler-armv7-debug/2490/overview Change-Id: If4d84625273305453ff4bb80554b5c8baca241d1
-rw-r--r--compiler/jni/quick/jni_compiler.cc26
-rw-r--r--compiler/jni/quick/jni_compiler.h4
-rw-r--r--compiler/optimizing/optimizing_compiler.cc4
3 files changed, 17 insertions, 17 deletions
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 6cb50211e1..2a33858f52 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -28,6 +28,7 @@
#include "base/enums.h"
#include "base/logging.h" // For VLOG.
#include "base/macros.h"
+#include "base/malloc_arena_pool.h"
#include "base/memory_region.h"
#include "base/utils.h"
#include "calling_convention.h"
@@ -83,8 +84,7 @@ template <PointerSize kPointerSize>
static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- ArenaAllocator* allocator) {
+ const DexFile& dex_file) {
constexpr size_t kRawPointerSize = static_cast<size_t>(kPointerSize);
const bool is_native = (access_flags & kAccNative) != 0;
CHECK(is_native);
@@ -143,9 +143,12 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
}
}
+ MallocArenaPool pool;
+ ArenaAllocator allocator(&pool);
+
// Calling conventions used to iterate over parameters to method
std::unique_ptr<JniCallingConvention> main_jni_conv =
- JniCallingConvention::Create(allocator,
+ JniCallingConvention::Create(&allocator,
is_static,
is_synchronized,
is_fast_native,
@@ -156,11 +159,11 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv(
ManagedRuntimeCallingConvention::Create(
- allocator, is_static, is_synchronized, shorty, instruction_set));
+ &allocator, is_static, is_synchronized, shorty, instruction_set));
// Assembler that holds generated instructions
std::unique_ptr<JNIMacroAssembler<kPointerSize>> jni_asm =
- GetMacroAssembler<kPointerSize>(allocator, instruction_set, instruction_set_features);
+ GetMacroAssembler<kPointerSize>(&allocator, instruction_set, instruction_set_features);
jni_asm->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo());
jni_asm->SetEmitRunTimeChecksInDebugMode(compiler_options.EmitRunTimeChecksInDebugMode());
@@ -196,9 +199,9 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// 1.3 Spill reference register arguments.
constexpr FrameOffset kInvalidReferenceOffset =
JNIMacroAssembler<kPointerSize>::kInvalidReferenceOffset;
- ArenaVector<ArgumentLocation> src_args(allocator->Adapter());
- ArenaVector<ArgumentLocation> dest_args(allocator->Adapter());
- ArenaVector<FrameOffset> refs(allocator->Adapter());
+ ArenaVector<ArgumentLocation> src_args(allocator.Adapter());
+ ArenaVector<ArgumentLocation> dest_args(allocator.Adapter());
+ ArenaVector<FrameOffset> refs(allocator.Adapter());
if (LIKELY(!is_critical_native)) {
mr_conv->ResetIterator(FrameOffset(current_frame_size));
for (; mr_conv->HasNext(); mr_conv->Next()) {
@@ -696,14 +699,13 @@ static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- ArenaAllocator* allocator) {
+ const DexFile& dex_file) {
if (Is64BitInstructionSet(compiler_options.GetInstructionSet())) {
return ArtJniCompileMethodInternal<PointerSize::k64>(
- compiler_options, access_flags, method_idx, dex_file, allocator);
+ compiler_options, access_flags, method_idx, dex_file);
} else {
return ArtJniCompileMethodInternal<PointerSize::k32>(
- compiler_options, access_flags, method_idx, dex_file, allocator);
+ compiler_options, access_flags, method_idx, dex_file);
}
}
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index 52a6f3cf02..313fcd361e 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -24,7 +24,6 @@
namespace art {
-class ArenaAllocator;
class ArtMethod;
class CompilerOptions;
class DexFile;
@@ -66,8 +65,7 @@ class JniCompiledMethod {
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
- const DexFile& dex_file,
- ArenaAllocator* allocator);
+ const DexFile& dex_file);
} // namespace art
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 6eb3d01e42..ca440326ac 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -1183,7 +1183,7 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
}
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, dex_file, &allocator);
+ compiler_options, access_flags, method_idx, dex_file);
MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
@@ -1234,7 +1234,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
if (UNLIKELY(method->IsNative())) {
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, *dex_file, &allocator);
+ compiler_options, access_flags, method_idx, *dex_file);
std::vector<Handle<mirror::Object>> roots;
ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list(
allocator.Adapter(kArenaAllocCHA));