summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2024-03-07 16:11:09 +0100
committer Treehugger Robot <android-test-infra-autosubmit@system.gserviceaccount.com> 2024-03-08 13:36:51 +0000
commit0f6befa0f064d482cc252913f46ba8264b7e0384 (patch)
treed88a7f7c4ce76ada7e6c14b5ab9368eb25d32575 /compiler
parent43e1c7db46d176da1326f354bc69ef38a5f65754 (diff)
Pass only shorty to `ArtQuickJniCompileMethod()`.
Passing a `dex_file` and `method_idx` makes testing unnecessarily difficult. Test: m test-art-host-gtest Test: testrunner.py --host --optimizing Bug: 288983053 Change-Id: Ice79423ec568e254547acd4448fb82e2ad11b79c
Diffstat (limited to 'compiler')
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.cc4
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.h4
-rw-r--r--compiler/jni/quick/arm64/calling_convention_arm64.cc6
-rw-r--r--compiler/jni/quick/arm64/calling_convention_arm64.h5
-rw-r--r--compiler/jni/quick/calling_convention.cc4
-rw-r--r--compiler/jni/quick/calling_convention.h20
-rw-r--r--compiler/jni/quick/jni_compiler.cc64
-rw-r--r--compiler/jni/quick/jni_compiler.h3
-rw-r--r--compiler/jni/quick/riscv64/calling_convention_riscv64.cc6
-rw-r--r--compiler/jni/quick/riscv64/calling_convention_riscv64.h5
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.cc8
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.h4
-rw-r--r--compiler/jni/quick/x86_64/calling_convention_x86_64.cc6
-rw-r--r--compiler/jni/quick/x86_64/calling_convention_x86_64.h5
-rw-r--r--compiler/optimizing/code_generator.cc5
-rw-r--r--compiler/optimizing/code_generator.h9
-rw-r--r--compiler/optimizing/optimizing_compiler.cc4
17 files changed, 73 insertions, 89 deletions
diff --git a/compiler/jni/quick/arm/calling_convention_arm.cc b/compiler/jni/quick/arm/calling_convention_arm.cc
index 826474f4f2..80abe919a0 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.cc
+++ b/compiler/jni/quick/arm/calling_convention_arm.cc
@@ -296,7 +296,7 @@ ArmJniCallingConvention::ArmJniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -462,7 +462,7 @@ size_t ArmJniCallingConvention::OutFrameSize() const {
}
size_t out_args_size = RoundUp(size, kAapcsStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/arm/calling_convention_arm.h b/compiler/jni/quick/arm/calling_convention_arm.h
index 3a09d4eaad..f74f02362b 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.h
+++ b/compiler/jni/quick/arm/calling_convention_arm.h
@@ -26,7 +26,7 @@ namespace arm {
class ArmManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -60,7 +60,7 @@ class ArmJniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~ArmJniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.cc b/compiler/jni/quick/arm64/calling_convention_arm64.cc
index 3ccbb71d0c..921ad4590e 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.cc
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.cc
@@ -142,7 +142,7 @@ static constexpr uint32_t kAapcs64FpCalleeSpillMask =
CalculateFpCalleeSpillMask(kAapcs64CalleeSaveRegisters);
// Calling convention
-static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty) {
if (shorty[0] == 'F') {
return Arm64ManagedRegister::FromSRegister(S0);
} else if (shorty[0] == 'D') {
@@ -222,7 +222,7 @@ Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -306,7 +306,7 @@ size_t Arm64JniCallingConvention::OutFrameSize() const {
}
size_t out_args_size = RoundUp(size, kAapcs64StackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.h b/compiler/jni/quick/arm64/calling_convention_arm64.h
index f29eb15fa8..4ba1fb75ef 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.h
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.h
@@ -26,7 +26,8 @@ namespace arm64 {
class Arm64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- Arm64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ Arm64ManagedRuntimeCallingConvention(
+ bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -52,7 +53,7 @@ class Arm64JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~Arm64JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/calling_convention.cc b/compiler/jni/quick/calling_convention.cc
index 459beb0c67..14d8c9f61d 100644
--- a/compiler/jni/quick/calling_convention.cc
+++ b/compiler/jni/quick/calling_convention.cc
@@ -49,7 +49,7 @@ std::unique_ptr<ManagedRuntimeCallingConvention> ManagedRuntimeCallingConvention
ArenaAllocator* allocator,
bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set) {
switch (instruction_set) {
#ifdef ART_ENABLE_CODEGEN_arm
@@ -150,7 +150,7 @@ std::unique_ptr<JniCallingConvention> JniCallingConvention::Create(ArenaAllocato
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set) {
switch (instruction_set) {
#ifdef ART_ENABLE_CODEGEN_arm
diff --git a/compiler/jni/quick/calling_convention.h b/compiler/jni/quick/calling_convention.h
index b8b4cc14b1..c0aba57b3a 100644
--- a/compiler/jni/quick/calling_convention.h
+++ b/compiler/jni/quick/calling_convention.h
@@ -76,19 +76,19 @@ class CallingConvention : public DeletableArenaObject<kArenaAllocCallingConventi
protected:
CallingConvention(bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
PointerSize frame_pointer_size)
: itr_slots_(0), itr_refs_(0), itr_args_(0), itr_longs_and_doubles_(0),
itr_float_and_doubles_(0), displacement_(0),
frame_pointer_size_(frame_pointer_size),
is_static_(is_static), is_synchronized_(is_synchronized),
shorty_(shorty) {
- num_args_ = (is_static ? 0 : 1) + strlen(shorty) - 1;
+ num_args_ = (is_static ? 0 : 1) + shorty.length() - 1;
num_ref_args_ = is_static ? 0 : 1; // The implicit this pointer.
num_float_or_double_args_ = 0;
num_long_or_double_args_ = 0;
- for (size_t i = 1; i < strlen(shorty); i++) {
- char ch = shorty_[i];
+ for (size_t i = 1; i < shorty.length(); i++) {
+ char ch = shorty[i];
switch (ch) {
case 'L':
num_ref_args_++;
@@ -195,8 +195,8 @@ class CallingConvention : public DeletableArenaObject<kArenaAllocCallingConventi
}
return result;
}
- const char* GetShorty() const {
- return shorty_.c_str();
+ std::string_view GetShorty() const {
+ return shorty_;
}
// The slot number for current calling_convention argument.
// Note that each slot is 32-bit. When the current argument is bigger
@@ -238,7 +238,7 @@ class ManagedRuntimeCallingConvention : public CallingConvention {
static std::unique_ptr<ManagedRuntimeCallingConvention> Create(ArenaAllocator* allocator,
bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set);
// Offset of Method within the managed frame.
@@ -277,7 +277,7 @@ class ManagedRuntimeCallingConvention : public CallingConvention {
protected:
ManagedRuntimeCallingConvention(bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
PointerSize frame_pointer_size)
: CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size) {}
};
@@ -303,7 +303,7 @@ class JniCallingConvention : public CallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set);
// Size of frame excluding space for outgoing args (its assumed Method* is
@@ -403,7 +403,7 @@ class JniCallingConvention : public CallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty,
+ std::string_view shorty,
PointerSize frame_pointer_size)
: CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size),
is_fast_native_(is_fast_native),
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index c721825683..73e888a79c 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -33,7 +33,6 @@
#include "calling_convention.h"
#include "class_linker.h"
#include "dwarf/debug_frame_opcode_writer.h"
-#include "dex/dex_file-inl.h"
#include "driver/compiler_options.h"
#include "entrypoints/quick/quick_entrypoints.h"
#include "instrumentation.h"
@@ -77,26 +76,19 @@ static std::unique_ptr<JNIMacroAssembler<kPointerSize>> GetMacroAssembler(
//
template <PointerSize kPointerSize>
static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& compiler_options,
+ std::string_view shorty,
uint32_t access_flags,
- uint32_t method_idx,
- const DexFile& dex_file,
ArenaAllocator* allocator) {
constexpr size_t kRawPointerSize = static_cast<size_t>(kPointerSize);
- const bool is_native = (access_flags & kAccNative) != 0;
- CHECK(is_native);
+ CHECK_NE(access_flags & kAccNative, 0u);
const bool is_static = (access_flags & kAccStatic) != 0;
const bool is_synchronized = (access_flags & kAccSynchronized) != 0;
- const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
- InstructionSet instruction_set = compiler_options.GetInstructionSet();
- const InstructionSetFeatures* instruction_set_features =
- compiler_options.GetInstructionSetFeatures();
-
- // i.e. if the method was annotated with @FastNative
const bool is_fast_native = (access_flags & kAccFastNative) != 0u;
-
- // i.e. if the method was annotated with @CriticalNative
const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u;
+ InstructionSet instruction_set = compiler_options.GetInstructionSet();
+ const InstructionSetFeatures* instruction_set_features =
+ compiler_options.GetInstructionSetFeatures();
bool emit_read_barrier = compiler_options.EmitReadBarrier();
bool is_debuggable = compiler_options.GetDebuggable();
bool needs_entry_exit_hooks = is_debuggable && compiler_options.IsJitCompiler();
@@ -116,25 +108,18 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// debuggable runtimes.
bool should_tag_sp = needs_entry_exit_hooks;
- VLOG(jni) << "JniCompile: Method :: "
- << dex_file.PrettyMethod(method_idx, /* with signature */ true)
- << " :: access_flags = " << std::hex << access_flags << std::dec;
-
- if (UNLIKELY(is_fast_native)) {
- VLOG(jni) << "JniCompile: Fast native method detected :: "
- << dex_file.PrettyMethod(method_idx, /* with signature */ true);
- }
-
- if (UNLIKELY(is_critical_native)) {
- VLOG(jni) << "JniCompile: Critical native method detected :: "
- << dex_file.PrettyMethod(method_idx, /* with signature */ true);
- }
+ VLOG(jni) << "JniCompile: shorty=\"" << shorty
+ << "\", access_flags=0x" << std::hex << access_flags
+ << (is_static ? " static" : "")
+ << (is_synchronized ? " synchronized" : "")
+ << (is_fast_native ? " @FastNative" : "")
+ << (is_critical_native ? " @CriticalNative" : "");
if (kIsDebugBuild) {
// Don't allow both @FastNative and @CriticalNative. They are mutually exclusive.
if (UNLIKELY(is_fast_native && is_critical_native)) {
- LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative"
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
+ LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
}
// @CriticalNative - extra checks:
@@ -144,16 +129,16 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
if (UNLIKELY(is_critical_native)) {
CHECK(is_static)
<< "@CriticalNative functions cannot be virtual since that would "
- << "require passing a reference parameter (this), which is illegal "
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
+ << "require passing a reference parameter (this), which is illegal, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
CHECK(!is_synchronized)
<< "@CriticalNative functions cannot be synchronized since that would "
- << "require passing a (class and/or this) reference parameter, which is illegal "
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
- for (size_t i = 0; i < strlen(shorty); ++i) {
- CHECK_NE(Primitive::kPrimNot, Primitive::GetType(shorty[i]))
- << "@CriticalNative methods' shorty types must not have illegal references "
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
+ << "require passing a (class and/or this) reference parameter, which is illegal, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
+ for (char c : shorty) {
+ CHECK_NE(Primitive::kPrimNot, Primitive::GetType(c))
+ << "@CriticalNative methods' shorty types must not have illegal references, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
}
}
}
@@ -783,16 +768,15 @@ static void CallDecodeReferenceResult(JNIMacroAssembler<kPointerSize>* jni_asm,
}
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
+ std::string_view shorty,
uint32_t access_flags,
- uint32_t method_idx,
- const DexFile& dex_file,
ArenaAllocator* allocator) {
if (Is64BitInstructionSet(compiler_options.GetInstructionSet())) {
return ArtJniCompileMethodInternal<PointerSize::k64>(
- compiler_options, access_flags, method_idx, dex_file, allocator);
+ compiler_options, shorty, access_flags, allocator);
} else {
return ArtJniCompileMethodInternal<PointerSize::k32>(
- compiler_options, access_flags, method_idx, dex_file, allocator);
+ compiler_options, shorty, access_flags, allocator);
}
}
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index d43b2a9917..94f0dacb38 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -65,9 +65,8 @@ class JniCompiledMethod {
};
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
+ std::string_view shorty,
uint32_t access_flags,
- uint32_t method_idx,
- const DexFile& dex_file,
ArenaAllocator* allocator);
} // namespace art
diff --git a/compiler/jni/quick/riscv64/calling_convention_riscv64.cc b/compiler/jni/quick/riscv64/calling_convention_riscv64.cc
index 195d7c1ec8..6d2fbed25e 100644
--- a/compiler/jni/quick/riscv64/calling_convention_riscv64.cc
+++ b/compiler/jni/quick/riscv64/calling_convention_riscv64.cc
@@ -135,7 +135,7 @@ static constexpr uint32_t kNativeCoreCalleeSpillMask =
static constexpr uint32_t kNativeFpCalleeSpillMask =
CalculateFpCalleeSpillMask(kNativeCalleeSaveRegisters);
-static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
return Riscv64ManagedRegister::FromFRegister(FA0);
} else if (shorty[0] == 'V') {
@@ -198,7 +198,7 @@ Riscv64JniCallingConvention::Riscv64JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -256,7 +256,7 @@ size_t Riscv64JniCallingConvention::OutFrameSize() const {
}
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/riscv64/calling_convention_riscv64.h b/compiler/jni/quick/riscv64/calling_convention_riscv64.h
index 5add183f72..f6193a005b 100644
--- a/compiler/jni/quick/riscv64/calling_convention_riscv64.h
+++ b/compiler/jni/quick/riscv64/calling_convention_riscv64.h
@@ -26,7 +26,8 @@ namespace riscv64 {
class Riscv64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- Riscv64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ Riscv64ManagedRuntimeCallingConvention(
+ bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -52,7 +53,7 @@ class Riscv64JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~Riscv64JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/x86/calling_convention_x86.cc b/compiler/jni/quick/x86/calling_convention_x86.cc
index e692cff93f..da03e86e2c 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.cc
+++ b/compiler/jni/quick/x86/calling_convention_x86.cc
@@ -102,7 +102,7 @@ ArrayRef<const ManagedRegister> X86JniCallingConvention::ArgumentScratchRegister
return scratch_regs;
}
-static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty, bool jni) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
if (jni) {
return X86ManagedRegister::FromX87Register(ST0);
@@ -205,7 +205,7 @@ X86JniCallingConvention::X86JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -267,14 +267,14 @@ size_t X86JniCallingConvention::OutFrameSize() const {
static_assert(kFramePointerSize < kNativeStackAlignment);
// The stub frame size is considered 0 in the callee where the return PC is a part of
// the callee frame but it is kPointerSize in the compiled stub before the tail call.
- DCHECK_EQ(0u, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(0u, GetCriticalNativeStubFrameSize(GetShorty()));
return kFramePointerSize;
}
}
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/x86/calling_convention_x86.h b/compiler/jni/quick/x86/calling_convention_x86.h
index f0d663dd98..f7a453eeac 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.h
+++ b/compiler/jni/quick/x86/calling_convention_x86.h
@@ -26,7 +26,7 @@ namespace x86 {
class X86ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -57,7 +57,7 @@ class X86JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~X86JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
index f0aa07e255..5342f52d3b 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
@@ -115,7 +115,7 @@ ArrayRef<const ManagedRegister> X86_64JniCallingConvention::ArgumentScratchRegis
return scratch_regs;
}
-static ManagedRegister ReturnRegisterForShorty(const char* shorty, [[maybe_unused]] bool jni) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty, [[maybe_unused]] bool jni) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
return X86_64ManagedRegister::FromXmmRegister(XMM0);
} else if (shorty[0] == 'J') {
@@ -186,7 +186,7 @@ X86_64JniCallingConvention::X86_64JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -247,7 +247,7 @@ size_t X86_64JniCallingConvention::OutFrameSize() const {
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.h b/compiler/jni/quick/x86_64/calling_convention_x86_64.h
index 859a277c60..9c1f4eda9d 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.h
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.h
@@ -26,7 +26,8 @@ namespace x86_64 {
class X86_64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- X86_64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ X86_64ManagedRuntimeCallingConvention(
+ bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -52,7 +53,7 @@ class X86_64JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~X86_64JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index c734922268..88bd818b0c 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -493,11 +493,10 @@ void CodeGenerator::FinishCriticalNativeFrameSetup(size_t out_frame_size,
GetMoveResolver()->EmitNativeCode(parallel_move);
}
-const char* CodeGenerator::GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke,
- uint32_t* shorty_len) {
+std::string_view CodeGenerator::GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke) {
ScopedObjectAccess soa(Thread::Current());
DCHECK(invoke->GetResolvedMethod()->IsCriticalNative());
- return invoke->GetResolvedMethod()->GetShorty(shorty_len);
+ return invoke->GetResolvedMethod()->GetShortyView();
}
void CodeGenerator::GenerateInvokeStaticOrDirectRuntimeCall(
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index c54c96c40f..73059313e2 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -597,7 +597,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
template <typename CriticalNativeCallingConventionVisitor,
size_t kNativeStackAlignment,
- size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len)>
+ size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty)>
size_t PrepareCriticalNativeCall(HInvokeStaticOrDirect* invoke) {
DCHECK(!invoke->GetLocations()->Intrinsified());
CriticalNativeCallingConventionVisitor calling_convention_visitor(
@@ -607,9 +607,8 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
size_t out_frame_size =
RoundUp(calling_convention_visitor.GetStackOffset(), kNativeStackAlignment);
if (kIsDebugBuild) {
- uint32_t shorty_len;
- const char* shorty = GetCriticalNativeShorty(invoke, &shorty_len);
- CHECK_EQ(GetCriticalNativeDirectCallFrameSize(shorty, shorty_len), out_frame_size);
+ std::string_view shorty = GetCriticalNativeShorty(invoke);
+ CHECK_EQ(GetCriticalNativeDirectCallFrameSize(shorty), out_frame_size);
}
if (out_frame_size != 0u) {
FinishCriticalNativeFrameSetup(out_frame_size, &parallel_move);
@@ -882,7 +881,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
void FinishCriticalNativeFrameSetup(size_t out_frame_size, /*inout*/HParallelMove* parallel_move);
- static const char* GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke, uint32_t* shorty_len);
+ static std::string_view GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke);
OptimizingCompilerStats* stats_;
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 65e8e51712..0e5de00f97 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -1224,7 +1224,7 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
}
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, dex_file, &allocator);
+ compiler_options, dex_file.GetMethodShortyView(method_idx), access_flags, &allocator);
MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
@@ -1291,7 +1291,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
DCHECK_IMPLIES(method->IsCriticalNative(), !runtime->IsJavaDebuggable());
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, *dex_file, &allocator);
+ compiler_options, dex_file->GetMethodShortyView(method_idx), access_flags, &allocator);
std::vector<Handle<mirror::Object>> roots;
ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list(
allocator.Adapter(kArenaAllocCHA));