summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2024-03-07 16:11:09 +0100
committer Treehugger Robot <android-test-infra-autosubmit@system.gserviceaccount.com> 2024-03-08 13:36:51 +0000
commit0f6befa0f064d482cc252913f46ba8264b7e0384 (patch)
treed88a7f7c4ce76ada7e6c14b5ab9368eb25d32575
parent43e1c7db46d176da1326f354bc69ef38a5f65754 (diff)
Pass only shorty to `ArtQuickJniCompileMethod()`.
Passing a `dex_file` and `method_idx` makes testing unnecessarily difficult. Test: m test-art-host-gtest Test: testrunner.py --host --optimizing Bug: 288983053 Change-Id: Ice79423ec568e254547acd4448fb82e2ad11b79c
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.cc4
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.h4
-rw-r--r--compiler/jni/quick/arm64/calling_convention_arm64.cc6
-rw-r--r--compiler/jni/quick/arm64/calling_convention_arm64.h5
-rw-r--r--compiler/jni/quick/calling_convention.cc4
-rw-r--r--compiler/jni/quick/calling_convention.h20
-rw-r--r--compiler/jni/quick/jni_compiler.cc64
-rw-r--r--compiler/jni/quick/jni_compiler.h3
-rw-r--r--compiler/jni/quick/riscv64/calling_convention_riscv64.cc6
-rw-r--r--compiler/jni/quick/riscv64/calling_convention_riscv64.h5
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.cc8
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.h4
-rw-r--r--compiler/jni/quick/x86_64/calling_convention_x86_64.cc6
-rw-r--r--compiler/jni/quick/x86_64/calling_convention_x86_64.h5
-rw-r--r--compiler/optimizing/code_generator.cc5
-rw-r--r--compiler/optimizing/code_generator.h9
-rw-r--r--compiler/optimizing/optimizing_compiler.cc4
-rw-r--r--libdexfile/dex/dex_file-inl.h6
-rw-r--r--libdexfile/dex/dex_file.h1
-rw-r--r--runtime/arch/arm/jni_frame_arm.h14
-rw-r--r--runtime/arch/arm64/jni_frame_arm64.h16
-rw-r--r--runtime/arch/riscv64/jni_frame_riscv64.h16
-rw-r--r--runtime/arch/x86/jni_frame_x86.h16
-rw-r--r--runtime/arch/x86_64/jni_frame_x86_64.h16
-rw-r--r--runtime/entrypoints/jni/jni_entrypoints.cc26
25 files changed, 125 insertions, 148 deletions
diff --git a/compiler/jni/quick/arm/calling_convention_arm.cc b/compiler/jni/quick/arm/calling_convention_arm.cc
index 826474f4f2..80abe919a0 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.cc
+++ b/compiler/jni/quick/arm/calling_convention_arm.cc
@@ -296,7 +296,7 @@ ArmJniCallingConvention::ArmJniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -462,7 +462,7 @@ size_t ArmJniCallingConvention::OutFrameSize() const {
}
size_t out_args_size = RoundUp(size, kAapcsStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/arm/calling_convention_arm.h b/compiler/jni/quick/arm/calling_convention_arm.h
index 3a09d4eaad..f74f02362b 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.h
+++ b/compiler/jni/quick/arm/calling_convention_arm.h
@@ -26,7 +26,7 @@ namespace arm {
class ArmManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -60,7 +60,7 @@ class ArmJniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~ArmJniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.cc b/compiler/jni/quick/arm64/calling_convention_arm64.cc
index 3ccbb71d0c..921ad4590e 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.cc
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.cc
@@ -142,7 +142,7 @@ static constexpr uint32_t kAapcs64FpCalleeSpillMask =
CalculateFpCalleeSpillMask(kAapcs64CalleeSaveRegisters);
// Calling convention
-static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty) {
if (shorty[0] == 'F') {
return Arm64ManagedRegister::FromSRegister(S0);
} else if (shorty[0] == 'D') {
@@ -222,7 +222,7 @@ Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -306,7 +306,7 @@ size_t Arm64JniCallingConvention::OutFrameSize() const {
}
size_t out_args_size = RoundUp(size, kAapcs64StackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.h b/compiler/jni/quick/arm64/calling_convention_arm64.h
index f29eb15fa8..4ba1fb75ef 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.h
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.h
@@ -26,7 +26,8 @@ namespace arm64 {
class Arm64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- Arm64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ Arm64ManagedRuntimeCallingConvention(
+ bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -52,7 +53,7 @@ class Arm64JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~Arm64JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/calling_convention.cc b/compiler/jni/quick/calling_convention.cc
index 459beb0c67..14d8c9f61d 100644
--- a/compiler/jni/quick/calling_convention.cc
+++ b/compiler/jni/quick/calling_convention.cc
@@ -49,7 +49,7 @@ std::unique_ptr<ManagedRuntimeCallingConvention> ManagedRuntimeCallingConvention
ArenaAllocator* allocator,
bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set) {
switch (instruction_set) {
#ifdef ART_ENABLE_CODEGEN_arm
@@ -150,7 +150,7 @@ std::unique_ptr<JniCallingConvention> JniCallingConvention::Create(ArenaAllocato
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set) {
switch (instruction_set) {
#ifdef ART_ENABLE_CODEGEN_arm
diff --git a/compiler/jni/quick/calling_convention.h b/compiler/jni/quick/calling_convention.h
index b8b4cc14b1..c0aba57b3a 100644
--- a/compiler/jni/quick/calling_convention.h
+++ b/compiler/jni/quick/calling_convention.h
@@ -76,19 +76,19 @@ class CallingConvention : public DeletableArenaObject<kArenaAllocCallingConventi
protected:
CallingConvention(bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
PointerSize frame_pointer_size)
: itr_slots_(0), itr_refs_(0), itr_args_(0), itr_longs_and_doubles_(0),
itr_float_and_doubles_(0), displacement_(0),
frame_pointer_size_(frame_pointer_size),
is_static_(is_static), is_synchronized_(is_synchronized),
shorty_(shorty) {
- num_args_ = (is_static ? 0 : 1) + strlen(shorty) - 1;
+ num_args_ = (is_static ? 0 : 1) + shorty.length() - 1;
num_ref_args_ = is_static ? 0 : 1; // The implicit this pointer.
num_float_or_double_args_ = 0;
num_long_or_double_args_ = 0;
- for (size_t i = 1; i < strlen(shorty); i++) {
- char ch = shorty_[i];
+ for (size_t i = 1; i < shorty.length(); i++) {
+ char ch = shorty[i];
switch (ch) {
case 'L':
num_ref_args_++;
@@ -195,8 +195,8 @@ class CallingConvention : public DeletableArenaObject<kArenaAllocCallingConventi
}
return result;
}
- const char* GetShorty() const {
- return shorty_.c_str();
+ std::string_view GetShorty() const {
+ return shorty_;
}
// The slot number for current calling_convention argument.
// Note that each slot is 32-bit. When the current argument is bigger
@@ -238,7 +238,7 @@ class ManagedRuntimeCallingConvention : public CallingConvention {
static std::unique_ptr<ManagedRuntimeCallingConvention> Create(ArenaAllocator* allocator,
bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set);
// Offset of Method within the managed frame.
@@ -277,7 +277,7 @@ class ManagedRuntimeCallingConvention : public CallingConvention {
protected:
ManagedRuntimeCallingConvention(bool is_static,
bool is_synchronized,
- const char* shorty,
+ std::string_view shorty,
PointerSize frame_pointer_size)
: CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size) {}
};
@@ -303,7 +303,7 @@ class JniCallingConvention : public CallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty,
+ std::string_view shorty,
InstructionSet instruction_set);
// Size of frame excluding space for outgoing args (its assumed Method* is
@@ -403,7 +403,7 @@ class JniCallingConvention : public CallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty,
+ std::string_view shorty,
PointerSize frame_pointer_size)
: CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size),
is_fast_native_(is_fast_native),
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index c721825683..73e888a79c 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -33,7 +33,6 @@
#include "calling_convention.h"
#include "class_linker.h"
#include "dwarf/debug_frame_opcode_writer.h"
-#include "dex/dex_file-inl.h"
#include "driver/compiler_options.h"
#include "entrypoints/quick/quick_entrypoints.h"
#include "instrumentation.h"
@@ -77,26 +76,19 @@ static std::unique_ptr<JNIMacroAssembler<kPointerSize>> GetMacroAssembler(
//
template <PointerSize kPointerSize>
static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& compiler_options,
+ std::string_view shorty,
uint32_t access_flags,
- uint32_t method_idx,
- const DexFile& dex_file,
ArenaAllocator* allocator) {
constexpr size_t kRawPointerSize = static_cast<size_t>(kPointerSize);
- const bool is_native = (access_flags & kAccNative) != 0;
- CHECK(is_native);
+ CHECK_NE(access_flags & kAccNative, 0u);
const bool is_static = (access_flags & kAccStatic) != 0;
const bool is_synchronized = (access_flags & kAccSynchronized) != 0;
- const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
- InstructionSet instruction_set = compiler_options.GetInstructionSet();
- const InstructionSetFeatures* instruction_set_features =
- compiler_options.GetInstructionSetFeatures();
-
- // i.e. if the method was annotated with @FastNative
const bool is_fast_native = (access_flags & kAccFastNative) != 0u;
-
- // i.e. if the method was annotated with @CriticalNative
const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u;
+ InstructionSet instruction_set = compiler_options.GetInstructionSet();
+ const InstructionSetFeatures* instruction_set_features =
+ compiler_options.GetInstructionSetFeatures();
bool emit_read_barrier = compiler_options.EmitReadBarrier();
bool is_debuggable = compiler_options.GetDebuggable();
bool needs_entry_exit_hooks = is_debuggable && compiler_options.IsJitCompiler();
@@ -116,25 +108,18 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// debuggable runtimes.
bool should_tag_sp = needs_entry_exit_hooks;
- VLOG(jni) << "JniCompile: Method :: "
- << dex_file.PrettyMethod(method_idx, /* with signature */ true)
- << " :: access_flags = " << std::hex << access_flags << std::dec;
-
- if (UNLIKELY(is_fast_native)) {
- VLOG(jni) << "JniCompile: Fast native method detected :: "
- << dex_file.PrettyMethod(method_idx, /* with signature */ true);
- }
-
- if (UNLIKELY(is_critical_native)) {
- VLOG(jni) << "JniCompile: Critical native method detected :: "
- << dex_file.PrettyMethod(method_idx, /* with signature */ true);
- }
+ VLOG(jni) << "JniCompile: shorty=\"" << shorty
+ << "\", access_flags=0x" << std::hex << access_flags
+ << (is_static ? " static" : "")
+ << (is_synchronized ? " synchronized" : "")
+ << (is_fast_native ? " @FastNative" : "")
+ << (is_critical_native ? " @CriticalNative" : "");
if (kIsDebugBuild) {
// Don't allow both @FastNative and @CriticalNative. They are mutually exclusive.
if (UNLIKELY(is_fast_native && is_critical_native)) {
- LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative"
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
+ LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
}
// @CriticalNative - extra checks:
@@ -144,16 +129,16 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
if (UNLIKELY(is_critical_native)) {
CHECK(is_static)
<< "@CriticalNative functions cannot be virtual since that would "
- << "require passing a reference parameter (this), which is illegal "
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
+ << "require passing a reference parameter (this), which is illegal, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
CHECK(!is_synchronized)
<< "@CriticalNative functions cannot be synchronized since that would "
- << "require passing a (class and/or this) reference parameter, which is illegal "
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
- for (size_t i = 0; i < strlen(shorty); ++i) {
- CHECK_NE(Primitive::kPrimNot, Primitive::GetType(shorty[i]))
- << "@CriticalNative methods' shorty types must not have illegal references "
- << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
+ << "require passing a (class and/or this) reference parameter, which is illegal, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
+ for (char c : shorty) {
+ CHECK_NE(Primitive::kPrimNot, Primitive::GetType(c))
+ << "@CriticalNative methods' shorty types must not have illegal references, \""
+ << shorty << "\", 0x" << std::hex << access_flags;
}
}
}
@@ -783,16 +768,15 @@ static void CallDecodeReferenceResult(JNIMacroAssembler<kPointerSize>* jni_asm,
}
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
+ std::string_view shorty,
uint32_t access_flags,
- uint32_t method_idx,
- const DexFile& dex_file,
ArenaAllocator* allocator) {
if (Is64BitInstructionSet(compiler_options.GetInstructionSet())) {
return ArtJniCompileMethodInternal<PointerSize::k64>(
- compiler_options, access_flags, method_idx, dex_file, allocator);
+ compiler_options, shorty, access_flags, allocator);
} else {
return ArtJniCompileMethodInternal<PointerSize::k32>(
- compiler_options, access_flags, method_idx, dex_file, allocator);
+ compiler_options, shorty, access_flags, allocator);
}
}
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index d43b2a9917..94f0dacb38 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -65,9 +65,8 @@ class JniCompiledMethod {
};
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
+ std::string_view shorty,
uint32_t access_flags,
- uint32_t method_idx,
- const DexFile& dex_file,
ArenaAllocator* allocator);
} // namespace art
diff --git a/compiler/jni/quick/riscv64/calling_convention_riscv64.cc b/compiler/jni/quick/riscv64/calling_convention_riscv64.cc
index 195d7c1ec8..6d2fbed25e 100644
--- a/compiler/jni/quick/riscv64/calling_convention_riscv64.cc
+++ b/compiler/jni/quick/riscv64/calling_convention_riscv64.cc
@@ -135,7 +135,7 @@ static constexpr uint32_t kNativeCoreCalleeSpillMask =
static constexpr uint32_t kNativeFpCalleeSpillMask =
CalculateFpCalleeSpillMask(kNativeCalleeSaveRegisters);
-static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
return Riscv64ManagedRegister::FromFRegister(FA0);
} else if (shorty[0] == 'V') {
@@ -198,7 +198,7 @@ Riscv64JniCallingConvention::Riscv64JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -256,7 +256,7 @@ size_t Riscv64JniCallingConvention::OutFrameSize() const {
}
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/riscv64/calling_convention_riscv64.h b/compiler/jni/quick/riscv64/calling_convention_riscv64.h
index 5add183f72..f6193a005b 100644
--- a/compiler/jni/quick/riscv64/calling_convention_riscv64.h
+++ b/compiler/jni/quick/riscv64/calling_convention_riscv64.h
@@ -26,7 +26,8 @@ namespace riscv64 {
class Riscv64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- Riscv64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ Riscv64ManagedRuntimeCallingConvention(
+ bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -52,7 +53,7 @@ class Riscv64JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~Riscv64JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/x86/calling_convention_x86.cc b/compiler/jni/quick/x86/calling_convention_x86.cc
index e692cff93f..da03e86e2c 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.cc
+++ b/compiler/jni/quick/x86/calling_convention_x86.cc
@@ -102,7 +102,7 @@ ArrayRef<const ManagedRegister> X86JniCallingConvention::ArgumentScratchRegister
return scratch_regs;
}
-static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty, bool jni) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
if (jni) {
return X86ManagedRegister::FromX87Register(ST0);
@@ -205,7 +205,7 @@ X86JniCallingConvention::X86JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -267,14 +267,14 @@ size_t X86JniCallingConvention::OutFrameSize() const {
static_assert(kFramePointerSize < kNativeStackAlignment);
// The stub frame size is considered 0 in the callee where the return PC is a part of
// the callee frame but it is kPointerSize in the compiled stub before the tail call.
- DCHECK_EQ(0u, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(0u, GetCriticalNativeStubFrameSize(GetShorty()));
return kFramePointerSize;
}
}
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/x86/calling_convention_x86.h b/compiler/jni/quick/x86/calling_convention_x86.h
index f0d663dd98..f7a453eeac 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.h
+++ b/compiler/jni/quick/x86/calling_convention_x86.h
@@ -26,7 +26,7 @@ namespace x86 {
class X86ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -57,7 +57,7 @@ class X86JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~X86JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
index f0aa07e255..5342f52d3b 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
@@ -115,7 +115,7 @@ ArrayRef<const ManagedRegister> X86_64JniCallingConvention::ArgumentScratchRegis
return scratch_regs;
}
-static ManagedRegister ReturnRegisterForShorty(const char* shorty, [[maybe_unused]] bool jni) {
+static ManagedRegister ReturnRegisterForShorty(std::string_view shorty, [[maybe_unused]] bool jni) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
return X86_64ManagedRegister::FromXmmRegister(XMM0);
} else if (shorty[0] == 'J') {
@@ -186,7 +186,7 @@ X86_64JniCallingConvention::X86_64JniCallingConvention(bool is_static,
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty)
+ std::string_view shorty)
: JniCallingConvention(is_static,
is_synchronized,
is_fast_native,
@@ -247,7 +247,7 @@ size_t X86_64JniCallingConvention::OutFrameSize() const {
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
- DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
+ DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
}
return out_args_size;
}
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.h b/compiler/jni/quick/x86_64/calling_convention_x86_64.h
index 859a277c60..9c1f4eda9d 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.h
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.h
@@ -26,7 +26,8 @@ namespace x86_64 {
class X86_64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
- X86_64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ X86_64ManagedRuntimeCallingConvention(
+ bool is_static, bool is_synchronized, std::string_view shorty)
: ManagedRuntimeCallingConvention(is_static,
is_synchronized,
shorty,
@@ -52,7 +53,7 @@ class X86_64JniCallingConvention final : public JniCallingConvention {
bool is_synchronized,
bool is_fast_native,
bool is_critical_native,
- const char* shorty);
+ std::string_view shorty);
~X86_64JniCallingConvention() override {}
// Calling convention
ManagedRegister ReturnRegister() const override;
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index c734922268..88bd818b0c 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -493,11 +493,10 @@ void CodeGenerator::FinishCriticalNativeFrameSetup(size_t out_frame_size,
GetMoveResolver()->EmitNativeCode(parallel_move);
}
-const char* CodeGenerator::GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke,
- uint32_t* shorty_len) {
+std::string_view CodeGenerator::GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke) {
ScopedObjectAccess soa(Thread::Current());
DCHECK(invoke->GetResolvedMethod()->IsCriticalNative());
- return invoke->GetResolvedMethod()->GetShorty(shorty_len);
+ return invoke->GetResolvedMethod()->GetShortyView();
}
void CodeGenerator::GenerateInvokeStaticOrDirectRuntimeCall(
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index c54c96c40f..73059313e2 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -597,7 +597,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
template <typename CriticalNativeCallingConventionVisitor,
size_t kNativeStackAlignment,
- size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len)>
+ size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty)>
size_t PrepareCriticalNativeCall(HInvokeStaticOrDirect* invoke) {
DCHECK(!invoke->GetLocations()->Intrinsified());
CriticalNativeCallingConventionVisitor calling_convention_visitor(
@@ -607,9 +607,8 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
size_t out_frame_size =
RoundUp(calling_convention_visitor.GetStackOffset(), kNativeStackAlignment);
if (kIsDebugBuild) {
- uint32_t shorty_len;
- const char* shorty = GetCriticalNativeShorty(invoke, &shorty_len);
- CHECK_EQ(GetCriticalNativeDirectCallFrameSize(shorty, shorty_len), out_frame_size);
+ std::string_view shorty = GetCriticalNativeShorty(invoke);
+ CHECK_EQ(GetCriticalNativeDirectCallFrameSize(shorty), out_frame_size);
}
if (out_frame_size != 0u) {
FinishCriticalNativeFrameSetup(out_frame_size, &parallel_move);
@@ -882,7 +881,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
void FinishCriticalNativeFrameSetup(size_t out_frame_size, /*inout*/HParallelMove* parallel_move);
- static const char* GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke, uint32_t* shorty_len);
+ static std::string_view GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke);
OptimizingCompilerStats* stats_;
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 65e8e51712..0e5de00f97 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -1224,7 +1224,7 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
}
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, dex_file, &allocator);
+ compiler_options, dex_file.GetMethodShortyView(method_idx), access_flags, &allocator);
MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
@@ -1291,7 +1291,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
DCHECK_IMPLIES(method->IsCriticalNative(), !runtime->IsJavaDebuggable());
JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod(
- compiler_options, access_flags, method_idx, *dex_file, &allocator);
+ compiler_options, dex_file->GetMethodShortyView(method_idx), access_flags, &allocator);
std::vector<Handle<mirror::Object>> roots;
ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list(
allocator.Adapter(kArenaAllocCHA));
diff --git a/libdexfile/dex/dex_file-inl.h b/libdexfile/dex/dex_file-inl.h
index b01b004e5b..2b3eb4ae69 100644
--- a/libdexfile/dex/dex_file-inl.h
+++ b/libdexfile/dex/dex_file-inl.h
@@ -167,7 +167,11 @@ inline std::string_view DexFile::GetMethodNameView(uint32_t idx) const {
}
inline const char* DexFile::GetMethodShorty(uint32_t idx) const {
- return StringDataByIdx(GetProtoId(GetMethodId(idx).proto_idx_).shorty_idx_);
+ return GetMethodShorty(GetMethodId(idx));
+}
+
+inline std::string_view DexFile::GetMethodShortyView(uint32_t idx) const {
+ return GetMethodShortyView(GetMethodId(idx));
}
inline const char* DexFile::GetMethodShorty(const dex::MethodId& method_id) const {
diff --git a/libdexfile/dex/dex_file.h b/libdexfile/dex/dex_file.h
index 6bc6e4f4ca..8e399af812 100644
--- a/libdexfile/dex/dex_file.h
+++ b/libdexfile/dex/dex_file.h
@@ -500,6 +500,7 @@ class DexFile {
// Returns the shorty of a method by its index.
const char* GetMethodShorty(uint32_t idx) const;
+ std::string_view GetMethodShortyView(uint32_t idx) const;
// Returns the shorty of a method id.
const char* GetMethodShorty(const dex::MethodId& method_id) const;
diff --git a/runtime/arch/arm/jni_frame_arm.h b/runtime/arch/arm/jni_frame_arm.h
index 5410950efb..b905b43706 100644
--- a/runtime/arch/arm/jni_frame_arm.h
+++ b/runtime/arch/arm/jni_frame_arm.h
@@ -40,11 +40,9 @@ static_assert(kAapcsStackAlignment < kStackAlignment);
constexpr size_t kJniArgumentRegisterCount = 4u;
// Get stack args size for @CriticalNative method calls.
-inline size_t GetCriticalNativeCallArgsSize(const char* shorty, uint32_t shorty_len) {
- DCHECK_EQ(shorty_len, strlen(shorty));
-
+inline size_t GetCriticalNativeCallArgsSize(std::string_view shorty) {
size_t reg = 0; // Register for the current argument; if reg >= 4, we shall use stack.
- for (size_t i = 1; i != shorty_len; ++i) {
+ for (size_t i = 1; i != shorty.length(); ++i) {
if (shorty[i] == 'J' || shorty[i] == 'D') {
// 8-byte args need to start in even-numbered register or at aligned stack position.
reg += (reg & 1);
@@ -59,9 +57,9 @@ inline size_t GetCriticalNativeCallArgsSize(const char* shorty, uint32_t shorty_
// Get the frame size for @CriticalNative method stub.
// This must match the size of the frame emitted by the JNI compiler at the native call site.
-inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeStubFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// Check if this is a tail call, i.e. there are no stack args and the return type
// is not an FP type (otherwise we need to move the result to FP register).
@@ -74,9 +72,9 @@ inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty
// Get the frame size for direct call to a @CriticalNative method.
// This must match the size of the extra frame emitted by the compiler at the native call site.
-inline size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// No return PC to save, zero- and sign-extension and FP value moves are handled by the caller.
return RoundUp(size, kAapcsStackAlignment);
diff --git a/runtime/arch/arm64/jni_frame_arm64.h b/runtime/arch/arm64/jni_frame_arm64.h
index 245f84808b..c99b569335 100644
--- a/runtime/arch/arm64/jni_frame_arm64.h
+++ b/runtime/arch/arm64/jni_frame_arm64.h
@@ -55,21 +55,19 @@ inline size_t GetNativeOutArgsSize(size_t num_fp_args, size_t num_non_fp_args) {
}
// Get stack args size for @CriticalNative method calls.
-inline size_t GetCriticalNativeCallArgsSize(const char* shorty, uint32_t shorty_len) {
- DCHECK_EQ(shorty_len, strlen(shorty));
-
+inline size_t GetCriticalNativeCallArgsSize(std::string_view shorty) {
size_t num_fp_args =
- std::count_if(shorty + 1, shorty + shorty_len, [](char c) { return c == 'F' || c == 'D'; });
- size_t num_non_fp_args = shorty_len - 1u - num_fp_args;
+ std::count_if(shorty.begin() + 1, shorty.end(), [](char c) { return c == 'F' || c == 'D'; });
+ size_t num_non_fp_args = shorty.length() - 1u - num_fp_args;
return GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
}
// Get the frame size for @CriticalNative method stub.
// This must match the size of the extra frame emitted by the compiler at the native call site.
-inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeStubFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// We can make a tail call if there are no stack args and we do not need
// to extend the result. Otherwise, add space for return PC.
@@ -81,9 +79,9 @@ inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty
// Get the frame size for direct call to a @CriticalNative method.
// This must match the size of the frame emitted by the JNI compiler at the native call site.
-inline size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// No return PC to save, zero- and sign-extension are handled by the caller.
return RoundUp(size, kAapcs64StackAlignment);
diff --git a/runtime/arch/riscv64/jni_frame_riscv64.h b/runtime/arch/riscv64/jni_frame_riscv64.h
index 29fb6d0deb..65ef10aae9 100644
--- a/runtime/arch/riscv64/jni_frame_riscv64.h
+++ b/runtime/arch/riscv64/jni_frame_riscv64.h
@@ -58,21 +58,19 @@ inline size_t GetNativeOutArgsSize(size_t num_fp_args, size_t num_non_fp_args) {
}
// Get stack args size for @CriticalNative method calls.
-inline size_t GetCriticalNativeCallArgsSize(const char* shorty, uint32_t shorty_len) {
- DCHECK_EQ(shorty_len, strlen(shorty));
-
+inline size_t GetCriticalNativeCallArgsSize(std::string_view shorty) {
size_t num_fp_args =
- std::count_if(shorty + 1, shorty + shorty_len, [](char c) { return c == 'F' || c == 'D'; });
- size_t num_non_fp_args = shorty_len - 1u - num_fp_args;
+ std::count_if(shorty.begin() + 1, shorty.end(), [](char c) { return c == 'F' || c == 'D'; });
+ size_t num_non_fp_args = shorty.length() - 1u - num_fp_args;
return GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
}
// Get the frame size for @CriticalNative method stub.
// This must match the size of the extra frame emitted by the compiler at the native call site.
-inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeStubFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// We can make a tail call if there are no stack args. Otherwise, add space for return PC.
// Note: Result does not neeed to be zero- or sign-extended.
@@ -84,9 +82,9 @@ inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty
// Get the frame size for direct call to a @CriticalNative method.
// This must match the size of the frame emitted by the JNI compiler at the native call site.
-inline size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// No return PC to save.
return RoundUp(size, kNativeStackAlignment);
diff --git a/runtime/arch/x86/jni_frame_x86.h b/runtime/arch/x86/jni_frame_x86.h
index 0d95f50d7a..94fc80d410 100644
--- a/runtime/arch/x86/jni_frame_x86.h
+++ b/runtime/arch/x86/jni_frame_x86.h
@@ -41,20 +41,18 @@ inline size_t GetNativeOutArgsSize(size_t num_args, size_t num_long_or_double_ar
}
// Get stack args size for @CriticalNative method calls.
-inline size_t GetCriticalNativeCallArgsSize(const char* shorty, uint32_t shorty_len) {
- DCHECK_EQ(shorty_len, strlen(shorty));
-
+inline size_t GetCriticalNativeCallArgsSize(std::string_view shorty) {
size_t num_long_or_double_args =
- std::count_if(shorty + 1, shorty + shorty_len, [](char c) { return c == 'J' || c == 'D'; });
+ std::count_if(shorty.begin() + 1, shorty.end(), [](char c) { return c == 'J' || c == 'D'; });
- return GetNativeOutArgsSize(/*num_args=*/ shorty_len - 1u, num_long_or_double_args);
+ return GetNativeOutArgsSize(/*num_args=*/ shorty.length() - 1u, num_long_or_double_args);
}
// Get the frame size for @CriticalNative method stub.
// This must match the size of the frame emitted by the JNI compiler at the native call site.
-inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeStubFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// We can make a tail call if there are no stack args and the return type is not
// FP type (needs moving from ST0 to MMX0) and we do not need to extend the result.
@@ -70,9 +68,9 @@ inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty
// Get the frame size for direct call to a @CriticalNative method.
// This must match the size of the extra frame emitted by the compiler at the native call site.
-inline size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// No return PC to save, zero- and sign-extension and FP value moves are handled by the caller.
return RoundUp(size, kNativeStackAlignment);
diff --git a/runtime/arch/x86_64/jni_frame_x86_64.h b/runtime/arch/x86_64/jni_frame_x86_64.h
index 1e16f5f016..b934bfdd3b 100644
--- a/runtime/arch/x86_64/jni_frame_x86_64.h
+++ b/runtime/arch/x86_64/jni_frame_x86_64.h
@@ -60,21 +60,19 @@ inline size_t GetNativeOutArgsSize(size_t num_fp_args, size_t num_non_fp_args) {
}
// Get stack args size for @CriticalNative method calls.
-inline size_t GetCriticalNativeCallArgsSize(const char* shorty, uint32_t shorty_len) {
- DCHECK_EQ(shorty_len, strlen(shorty));
-
+inline size_t GetCriticalNativeCallArgsSize(std::string_view shorty) {
size_t num_fp_args =
- std::count_if(shorty + 1, shorty + shorty_len, [](char c) { return c == 'F' || c == 'D'; });
- size_t num_non_fp_args = shorty_len - 1u - num_fp_args;
+ std::count_if(shorty.begin() + 1, shorty.end(), [](char c) { return c == 'F' || c == 'D'; });
+ size_t num_non_fp_args = shorty.length() - 1u - num_fp_args;
return GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
}
// Get the frame size for @CriticalNative method stub.
// This must match the size of the frame emitted by the JNI compiler at the native call site.
-inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeStubFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// We always need to spill xmm12-xmm15 as they are managed callee-saves
// but not native callee-saves.
@@ -87,9 +85,9 @@ inline size_t GetCriticalNativeStubFrameSize(const char* shorty, uint32_t shorty
// Get the frame size for direct call to a @CriticalNative method.
// This must match the size of the extra frame emitted by the compiler at the native call site.
-inline size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len) {
+inline size_t GetCriticalNativeDirectCallFrameSize(std::string_view shorty) {
// The size of outgoing arguments.
- size_t size = GetCriticalNativeCallArgsSize(shorty, shorty_len);
+ size_t size = GetCriticalNativeCallArgsSize(shorty);
// No return PC to save, zero- and sign-extension are handled by the caller.
return RoundUp(size, kNativeStackAlignment);
diff --git a/runtime/entrypoints/jni/jni_entrypoints.cc b/runtime/entrypoints/jni/jni_entrypoints.cc
index 6e6763b1e1..fc18269e7b 100644
--- a/runtime/entrypoints/jni/jni_entrypoints.cc
+++ b/runtime/entrypoints/jni/jni_entrypoints.cc
@@ -138,22 +138,21 @@ extern "C" size_t artCriticalNativeFrameSize(ArtMethod* method, uintptr_t caller
if (method->IsNative()) {
// Get the method's shorty.
DCHECK(method->IsCriticalNative());
- uint32_t shorty_len;
- const char* shorty = method->GetShorty(&shorty_len);
+ std::string_view shorty = method->GetShortyView();
// Return the platform-dependent stub frame size.
switch (kRuntimeISA) {
case InstructionSet::kArm:
case InstructionSet::kThumb2:
- return arm::GetCriticalNativeStubFrameSize(shorty, shorty_len);
+ return arm::GetCriticalNativeStubFrameSize(shorty);
case InstructionSet::kArm64:
- return arm64::GetCriticalNativeStubFrameSize(shorty, shorty_len);
+ return arm64::GetCriticalNativeStubFrameSize(shorty);
case InstructionSet::kRiscv64:
- return riscv64::GetCriticalNativeStubFrameSize(shorty, shorty_len);
+ return riscv64::GetCriticalNativeStubFrameSize(shorty);
case InstructionSet::kX86:
- return x86::GetCriticalNativeStubFrameSize(shorty, shorty_len);
+ return x86::GetCriticalNativeStubFrameSize(shorty);
case InstructionSet::kX86_64:
- return x86_64::GetCriticalNativeStubFrameSize(shorty, shorty_len);
+ return x86_64::GetCriticalNativeStubFrameSize(shorty);
default:
UNIMPLEMENTED(FATAL) << kRuntimeISA;
UNREACHABLE();
@@ -176,22 +175,21 @@ extern "C" size_t artCriticalNativeFrameSize(ArtMethod* method, uintptr_t caller
// Get the callee shorty.
const DexFile* dex_file = caller->GetDexFile();
uint32_t method_idx = GetInvokeStaticMethodIndex(caller, dex_pc);
- uint32_t shorty_len;
- const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
+ std::string_view shorty = dex_file->GetMethodShortyView(method_idx);
// Return the platform-dependent direct call frame size.
switch (kRuntimeISA) {
case InstructionSet::kArm:
case InstructionSet::kThumb2:
- return arm::GetCriticalNativeDirectCallFrameSize(shorty, shorty_len);
+ return arm::GetCriticalNativeDirectCallFrameSize(shorty);
case InstructionSet::kArm64:
- return arm64::GetCriticalNativeDirectCallFrameSize(shorty, shorty_len);
+ return arm64::GetCriticalNativeDirectCallFrameSize(shorty);
case InstructionSet::kRiscv64:
- return riscv64::GetCriticalNativeDirectCallFrameSize(shorty, shorty_len);
+ return riscv64::GetCriticalNativeDirectCallFrameSize(shorty);
case InstructionSet::kX86:
- return x86::GetCriticalNativeDirectCallFrameSize(shorty, shorty_len);
+ return x86::GetCriticalNativeDirectCallFrameSize(shorty);
case InstructionSet::kX86_64:
- return x86_64::GetCriticalNativeDirectCallFrameSize(shorty, shorty_len);
+ return x86_64::GetCriticalNativeDirectCallFrameSize(shorty);
default:
UNIMPLEMENTED(FATAL) << kRuntimeISA;
UNREACHABLE();