ART: Use StackReference in Quick Stack Frame
The method reference at the bottom of a quick frame is a stack
reference and not a native pointer. This is important for 64b
architectures, where the notions do not coincide.
Change key methods to have StackReference<mirror::ArtMethod>*
parameter instead of mirror::ArtMethod**. Make changes to
invoke stubs for 64b archs, change the frame setup for JNI code
(both generic JNI and compilers), tie up loose ends.
Tested on x86 and x86-64 with host tests. On x86-64, tests succeed
with jni compiler activated. x86-64 QCG was not tested.
Tested on ARM32 with device tests.
Fix ARM64 not saving x19 (used for wSUSPEND) on upcalls.
Tested on ARM64 in interpreter-only + generic-jni mode.
Fix ARM64 JNI Compiler to work with the CL.
Tested on ARM64 in interpreter-only + jni compiler.
Change-Id: I77931a0cbadd04d163b3eb8d6f6a6f8740578f13
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index 9927fe1..8f4eddb 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -60,7 +60,7 @@
} else {
method = c->FindVirtualMethod(method_name, method_sig);
}
- ASSERT_TRUE(method != NULL) << method_name << " " << method_sig;
+ ASSERT_TRUE(method != nullptr) << method_name << " " << method_sig;
if (method->GetEntryPointFromQuickCompiledCode() == nullptr) {
ASSERT_TRUE(method->GetEntryPointFromPortableCompiledCode() == nullptr);
CompileMethod(method);
@@ -88,16 +88,16 @@
// JNI operations after runtime start.
env_ = Thread::Current()->GetJniEnv();
jklass_ = env_->FindClass("MyClassNatives");
- ASSERT_TRUE(jklass_ != NULL) << method_name << " " << method_sig;
+ ASSERT_TRUE(jklass_ != nullptr) << method_name << " " << method_sig;
if (direct) {
jmethod_ = env_->GetStaticMethodID(jklass_, method_name, method_sig);
} else {
jmethod_ = env_->GetMethodID(jklass_, method_name, method_sig);
}
- ASSERT_TRUE(jmethod_ != NULL) << method_name << " " << method_sig;
+ ASSERT_TRUE(jmethod_ != nullptr) << method_name << " " << method_sig;
- if (native_fnptr != NULL) {
+ if (native_fnptr != nullptr) {
JNINativeMethod methods[] = { { method_name, method_sig, native_fnptr } };
ASSERT_EQ(JNI_OK, env_->RegisterNatives(jklass_, methods, 1))
<< method_name << " " << method_sig;
@@ -107,7 +107,7 @@
jmethodID constructor = env_->GetMethodID(jklass_, "<init>", "()V");
jobj_ = env_->NewObject(jklass_, constructor);
- ASSERT_TRUE(jobj_ != NULL) << method_name << " " << method_sig;
+ ASSERT_TRUE(jobj_ != nullptr) << method_name << " " << method_sig;
}
public:
@@ -125,13 +125,14 @@
jobject JniCompilerTest::jobj_;
jobject JniCompilerTest::class_loader_;
+
int gJava_MyClassNatives_foo_calls = 0;
void Java_MyClassNatives_foo(JNIEnv* env, jobject thisObj) {
// 1 = thisObj
EXPECT_EQ(kNative, Thread::Current()->GetState());
Locks::mutator_lock_->AssertNotHeld(Thread::Current());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_foo_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -151,8 +152,8 @@
TEST_F(JniCompilerTest, CompileAndRunIntMethodThroughStub) {
TEST_DISABLED_FOR_PORTABLE();
- SetUpForTest(false, "bar", "(I)I",
- NULL /* calling through stub will link with &Java_MyClassNatives_bar */);
+ SetUpForTest(false, "bar", "(I)I", nullptr);
+ // calling through stub will link with &Java_MyClassNatives_bar
ScopedObjectAccess soa(Thread::Current());
std::string reason;
@@ -168,8 +169,8 @@
TEST_F(JniCompilerTest, CompileAndRunStaticIntMethodThroughStub) {
TEST_DISABLED_FOR_PORTABLE();
- SetUpForTest(true, "sbar", "(I)I",
- NULL /* calling through stub will link with &Java_MyClassNatives_sbar */);
+ SetUpForTest(true, "sbar", "(I)I", nullptr);
+ // calling through stub will link with &Java_MyClassNatives_sbar
ScopedObjectAccess soa(Thread::Current());
std::string reason;
@@ -188,7 +189,7 @@
// 1 = thisObj
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_fooI_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -215,7 +216,7 @@
// 1 = thisObj
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_fooII_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -243,7 +244,7 @@
// 1 = thisObj
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_fooJJ_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -272,7 +273,7 @@
// 1 = thisObj
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_fooDD_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -302,7 +303,7 @@
// 1 = thisObj
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_fooJJ_synchronized_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -329,7 +330,7 @@
// 3 = this + y + z
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
gJava_MyClassNatives_fooIOO_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -353,28 +354,28 @@
reinterpret_cast<void*>(&Java_MyClassNatives_fooIOO));
EXPECT_EQ(0, gJava_MyClassNatives_fooIOO_calls);
- jobject result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, NULL, NULL);
+ jobject result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, nullptr, nullptr);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(1, gJava_MyClassNatives_fooIOO_calls);
- result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, NULL, jklass_);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, nullptr, jklass_);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(2, gJava_MyClassNatives_fooIOO_calls);
- result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, NULL, jklass_);
- EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, nullptr, jklass_);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(3, gJava_MyClassNatives_fooIOO_calls);
- result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, NULL, jklass_);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, nullptr, jklass_);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(4, gJava_MyClassNatives_fooIOO_calls);
- result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, jklass_, NULL);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, jklass_, nullptr);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(5, gJava_MyClassNatives_fooIOO_calls);
- result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, jklass_, NULL);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, jklass_, nullptr);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(6, gJava_MyClassNatives_fooIOO_calls);
- result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, jklass_, NULL);
- EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, jklass_, nullptr);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(7, gJava_MyClassNatives_fooIOO_calls);
}
@@ -383,7 +384,7 @@
// 1 = klass
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(klass != nullptr);
EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
gJava_MyClassNatives_fooSII_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -407,7 +408,7 @@
// 1 = klass
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(klass != nullptr);
EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
gJava_MyClassNatives_fooSDD_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -437,7 +438,7 @@
// 3 = klass + y + z
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(klass != nullptr);
EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
gJava_MyClassNatives_fooSIOO_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -462,28 +463,28 @@
reinterpret_cast<void*>(&Java_MyClassNatives_fooSIOO));
EXPECT_EQ(0, gJava_MyClassNatives_fooSIOO_calls);
- jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, NULL);
+ jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, nullptr);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(1, gJava_MyClassNatives_fooSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, jobj_);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, jobj_);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(2, gJava_MyClassNatives_fooSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, NULL, jobj_);
- EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, nullptr, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(3, gJava_MyClassNatives_fooSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, NULL, jobj_);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, nullptr, jobj_);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(4, gJava_MyClassNatives_fooSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, NULL);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, nullptr);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(5, gJava_MyClassNatives_fooSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, NULL);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, nullptr);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(6, gJava_MyClassNatives_fooSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, NULL);
- EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(7, gJava_MyClassNatives_fooSIOO_calls);
}
@@ -492,7 +493,7 @@
// 3 = klass + y + z
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(klass != nullptr);
EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
gJava_MyClassNatives_fooSSIOO_calls++;
ScopedObjectAccess soa(Thread::Current());
@@ -516,28 +517,28 @@
reinterpret_cast<void*>(&Java_MyClassNatives_fooSSIOO));
EXPECT_EQ(0, gJava_MyClassNatives_fooSSIOO_calls);
- jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, NULL);
+ jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, nullptr);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(1, gJava_MyClassNatives_fooSSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, jobj_);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, nullptr, jobj_);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(2, gJava_MyClassNatives_fooSSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, NULL, jobj_);
- EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, nullptr, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(3, gJava_MyClassNatives_fooSSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, NULL, jobj_);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, nullptr, jobj_);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(4, gJava_MyClassNatives_fooSSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, NULL);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, nullptr);
EXPECT_TRUE(env_->IsSameObject(jklass_, result));
EXPECT_EQ(5, gJava_MyClassNatives_fooSSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, NULL);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, nullptr);
EXPECT_TRUE(env_->IsSameObject(jobj_, result));
EXPECT_EQ(6, gJava_MyClassNatives_fooSSIOO_calls);
- result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, NULL);
- EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(7, gJava_MyClassNatives_fooSSIOO_calls);
}
@@ -591,7 +592,7 @@
jint Java_MyClassNatives_nativeUpCall(JNIEnv* env, jobject thisObj, jint i) {
if (i <= 0) {
- // We want to check raw Object*/Array* below
+ // We want to check raw Object* / Array* below
ScopedObjectAccess soa(env);
// Build stack trace
@@ -599,7 +600,7 @@
jobjectArray ste_array = Thread::InternalStackTraceToStackTraceElementArray(soa, internal);
mirror::ObjectArray<mirror::StackTraceElement>* trace_array =
soa.Decode<mirror::ObjectArray<mirror::StackTraceElement>*>(ste_array);
- EXPECT_TRUE(trace_array != NULL);
+ EXPECT_TRUE(trace_array != nullptr);
EXPECT_EQ(11, trace_array->GetLength());
// Check stack trace entries have expected values
@@ -615,9 +616,9 @@
return 0;
} else {
jclass jklass = env->FindClass("MyClassNatives");
- EXPECT_TRUE(jklass != NULL);
+ EXPECT_TRUE(jklass != nullptr);
jmethodID jmethod = env->GetMethodID(jklass, "fooI", "(I)I");
- EXPECT_TRUE(jmethod != NULL);
+ EXPECT_TRUE(jmethod != nullptr);
// Recurse with i - 1
jint result = env->CallNonvirtualIntMethod(thisObj, jklass, jmethod, i - 1);
@@ -721,7 +722,7 @@
TEST_F(JniCompilerTest, GetSinkPropertiesNative) {
TEST_DISABLED_FOR_PORTABLE();
- SetUpForTest(false, "getSinkPropertiesNative", "(Ljava/lang/String;)[Ljava/lang/Object;", NULL);
+ SetUpForTest(false, "getSinkPropertiesNative", "(Ljava/lang/String;)[Ljava/lang/Object;", nullptr);
// This space intentionally left blank. Just testing compilation succeeds.
}
@@ -804,7 +805,7 @@
jfloat Java_MyClassNatives_checkFloats(JNIEnv* env, jobject thisObj, jfloat f1, jfloat f2) {
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
ScopedObjectAccess soa(Thread::Current());
EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
@@ -826,12 +827,12 @@
}
void Java_MyClassNatives_checkParameterAlign(JNIEnv* env, jobject thisObj, jint i1, jlong l1) {
- /*EXPECT_EQ(kNative, Thread::Current()->GetState());
- EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
- EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
- ScopedObjectAccess soa(Thread::Current());
- EXPECT_EQ(1U, Thread::Current()->NumStackReferences());*/
+// EXPECT_EQ(kNative, Thread::Current()->GetState());
+// EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+// EXPECT_TRUE(thisObj != nullptr);
+// EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+// ScopedObjectAccess soa(Thread::Current());
+// EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
EXPECT_EQ(i1, 1234);
EXPECT_EQ(l1, INT64_C(0x12345678ABCDEF0));
}
@@ -879,7 +880,7 @@
jobject o248, jobject o249, jobject o250, jobject o251, jobject o252, jobject o253) {
EXPECT_EQ(kNative, Thread::Current()->GetState());
EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
- EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(thisObj != nullptr);
EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
ScopedObjectAccess soa(Thread::Current());
EXPECT_GE(255U, Thread::Current()->NumStackReferences());
diff --git a/compiler/jni/quick/arm/calling_convention_arm.cc b/compiler/jni/quick/arm/calling_convention_arm.cc
index 649a80f..f0c0ed7 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.cc
+++ b/compiler/jni/quick/arm/calling_convention_arm.cc
@@ -143,9 +143,10 @@
size_t ArmJniCallingConvention::FrameSize() {
// Method*, LR and callee save area size, local reference segment state
- size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kFramePointerSize;
+ size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
+ (2 + CalleeSaveRegisters().size()) * kFramePointerSize;
// References plus 2 words for HandleScope header
- size_t handle_scope_size = HandleScope::GetAlignedHandleScopeSizeTarget(kFramePointerSize, ReferenceCount());
+ size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
// Plus return value spill area size
return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
}
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.cc b/compiler/jni/quick/arm64/calling_convention_arm64.cc
index ffd27ee..0a00d7d 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.cc
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.cc
@@ -95,7 +95,7 @@
CHECK(IsCurrentParamOnStack());
FrameOffset result =
FrameOffset(displacement_.Int32Value() + // displacement
- kFramePointerSize + // Method*
+ sizeof(StackReference<mirror::ArtMethod>) + // Method ref
(itr_slots_ * sizeof(uint32_t))); // offset into in args
return result;
}
@@ -196,9 +196,10 @@
size_t Arm64JniCallingConvention::FrameSize() {
// Method*, callee save area size, local reference segment state
- size_t frame_data_size = ((1 + CalleeSaveRegisters().size()) * kFramePointerSize) + sizeof(uint32_t);
+ size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
+ CalleeSaveRegisters().size() * kFramePointerSize + sizeof(uint32_t);
// References plus 2 words for HandleScope header
- size_t handle_scope_size = HandleScope::GetAlignedHandleScopeSizeTarget(kFramePointerSize, ReferenceCount());
+ size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
// Plus return value spill area size
return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
}
diff --git a/compiler/jni/quick/calling_convention.h b/compiler/jni/quick/calling_convention.h
index 2a6e7d9..efc0b42 100644
--- a/compiler/jni/quick/calling_convention.h
+++ b/compiler/jni/quick/calling_convention.h
@@ -319,7 +319,8 @@
// Position of handle scope and interior fields
FrameOffset HandleScopeOffset() const {
- return FrameOffset(this->displacement_.Int32Value() + frame_pointer_size_); // above Method*
+ return FrameOffset(this->displacement_.Int32Value() + sizeof(StackReference<mirror::ArtMethod>));
+ // above Method reference
}
FrameOffset HandleScopeLinkOffset() const {
diff --git a/compiler/jni/quick/mips/calling_convention_mips.cc b/compiler/jni/quick/mips/calling_convention_mips.cc
index 0402fe6..f7a7be7 100644
--- a/compiler/jni/quick/mips/calling_convention_mips.cc
+++ b/compiler/jni/quick/mips/calling_convention_mips.cc
@@ -147,9 +147,10 @@
size_t MipsJniCallingConvention::FrameSize() {
// Method*, LR and callee save area size, local reference segment state
- size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kFramePointerSize;
+ size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
+ (2 + CalleeSaveRegisters().size()) * kFramePointerSize;
// References plus 2 words for HandleScope header
- size_t handle_scope_size = HandleScope::GetAlignedHandleScopeSizeTarget(kFramePointerSize, ReferenceCount());
+ size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
// Plus return value spill area size
return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
}
diff --git a/compiler/jni/quick/x86/calling_convention_x86.cc b/compiler/jni/quick/x86/calling_convention_x86.cc
index 97b4cdf..9bf7d0f 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.cc
+++ b/compiler/jni/quick/x86/calling_convention_x86.cc
@@ -124,9 +124,10 @@
size_t X86JniCallingConvention::FrameSize() {
// Method*, return address and callee save area size, local reference segment state
- size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kFramePointerSize;
+ size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
+ (2 + CalleeSaveRegisters().size()) * kFramePointerSize;
// References plus 2 words for HandleScope header
- size_t handle_scope_size = HandleScope::GetAlignedHandleScopeSizeTarget(kFramePointerSize, ReferenceCount());
+ size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
// Plus return value spill area size
return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
}
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
index 4871c87..5febed2 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
@@ -96,7 +96,7 @@
FrameOffset X86_64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
return FrameOffset(displacement_.Int32Value() + // displacement
- kFramePointerSize + // Method*
+ sizeof(StackReference<mirror::ArtMethod>) + // Method ref
(itr_slots_ * sizeof(uint32_t))); // offset into in args
}
@@ -139,9 +139,10 @@
size_t X86_64JniCallingConvention::FrameSize() {
// Method*, return address and callee save area size, local reference segment state
- size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kFramePointerSize;
+ size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
+ (2 + CalleeSaveRegisters().size()) * kFramePointerSize;
// References plus link_ (pointer) and number_of_references_ (uint32_t) for HandleScope header
- size_t handle_scope_size = HandleScope::GetAlignedHandleScopeSizeTarget(kFramePointerSize, ReferenceCount());
+ size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
// Plus return value spill area size
return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
}
diff --git a/compiler/utils/arm64/assembler_arm64.cc b/compiler/utils/arm64/assembler_arm64.cc
index 27188b2..009b227 100644
--- a/compiler/utils/arm64/assembler_arm64.cc
+++ b/compiler/utils/arm64/assembler_arm64.cc
@@ -530,7 +530,7 @@
Arm64ManagedRegister scratch = m_scratch.AsArm64();
CHECK(scratch.IsCoreRegister()) << scratch;
// Call *(*(SP + base) + offset)
- LoadFromOffset(scratch.AsCoreRegister(), SP, base.Int32Value());
+ LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP, base.Int32Value());
LoadFromOffset(scratch.AsCoreRegister(), scratch.AsCoreRegister(), offs.Int32Value());
___ Blr(reg_x(scratch.AsCoreRegister()));
}
@@ -656,16 +656,17 @@
// trashed by native code.
___ Mov(reg_x(ETR), reg_x(TR));
- // Increate frame to required size - must be at least space to push Method*.
+ // Increase frame to required size - must be at least space to push StackReference<Method>.
CHECK_GT(frame_size, kCalleeSavedRegsSize * kFramePointerSize);
size_t adjust = frame_size - (kCalleeSavedRegsSize * kFramePointerSize);
IncreaseFrameSize(adjust);
- // Write Method*.
- StoreToOffset(X0, SP, 0);
+ // Write StackReference<Method>.
+ DCHECK_EQ(4U, sizeof(StackReference<mirror::ArtMethod>));
+ StoreWToOffset(StoreOperandType::kStoreWord, W0, SP, 0);
// Write out entry spills
- int32_t offset = frame_size + kFramePointerSize;
+ int32_t offset = frame_size + sizeof(StackReference<mirror::ArtMethod>);
for (size_t i = 0; i < entry_spills.size(); ++i) {
Arm64ManagedRegister reg = entry_spills.at(i).AsArm64();
if (reg.IsNoRegister()) {
diff --git a/compiler/utils/x86/assembler_x86.cc b/compiler/utils/x86/assembler_x86.cc
index 0791c63..56c6536 100644
--- a/compiler/utils/x86/assembler_x86.cc
+++ b/compiler/utils/x86/assembler_x86.cc
@@ -1411,10 +1411,12 @@
}
// return address then method on stack
addl(ESP, Immediate(-frame_size + (spill_regs.size() * kFramePointerSize) +
- kFramePointerSize /*method*/ + kFramePointerSize /*return address*/));
+ sizeof(StackReference<mirror::ArtMethod>) /*method*/ +
+ kFramePointerSize /*return address*/));
pushl(method_reg.AsX86().AsCpuRegister());
for (size_t i = 0; i < entry_spills.size(); ++i) {
- movl(Address(ESP, frame_size + kFramePointerSize + (i * kFramePointerSize)),
+ movl(Address(ESP, frame_size + sizeof(StackReference<mirror::ArtMethod>) +
+ (i * kFramePointerSize)),
entry_spills.at(i).AsX86().AsCpuRegister());
}
}
@@ -1422,7 +1424,8 @@
void X86Assembler::RemoveFrame(size_t frame_size,
const std::vector<ManagedRegister>& spill_regs) {
CHECK_ALIGNED(frame_size, kStackAlignment);
- addl(ESP, Immediate(frame_size - (spill_regs.size() * kFramePointerSize) - kFramePointerSize));
+ addl(ESP, Immediate(frame_size - (spill_regs.size() * kFramePointerSize) -
+ sizeof(StackReference<mirror::ArtMethod>)));
for (size_t i = 0; i < spill_regs.size(); ++i) {
popl(spill_regs.at(i).AsX86().AsCpuRegister());
}
diff --git a/compiler/utils/x86_64/assembler_x86_64.cc b/compiler/utils/x86_64/assembler_x86_64.cc
index 0ede875..a14551c 100644
--- a/compiler/utils/x86_64/assembler_x86_64.cc
+++ b/compiler/utils/x86_64/assembler_x86_64.cc
@@ -59,7 +59,6 @@
EmitLabel(label, kSize);
}
-
void X86_64Assembler::pushq(CpuRegister reg) {
AssemblerBuffer::EnsureCapacity ensured(&buffer_);
EmitOptionalRex32(reg);
@@ -1652,8 +1651,12 @@
}
// return address then method on stack
addq(CpuRegister(RSP), Immediate(-frame_size + (spill_regs.size() * kFramePointerSize) +
- kFramePointerSize /*method*/ + kFramePointerSize /*return address*/));
- pushq(method_reg.AsX86_64().AsCpuRegister());
+ sizeof(StackReference<mirror::ArtMethod>) /*method*/ +
+ kFramePointerSize /*return address*/));
+
+ DCHECK_EQ(4U, sizeof(StackReference<mirror::ArtMethod>));
+ subq(CpuRegister(RSP), Immediate(4));
+ movl(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister());
for (size_t i = 0; i < entry_spills.size(); ++i) {
ManagedRegisterSpill spill = entry_spills.at(i);
@@ -1732,7 +1735,7 @@
void X86_64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
X86_64ManagedRegister src = msrc.AsX86_64();
CHECK(src.IsCpuRegister());
- movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
+ movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
}
void X86_64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
@@ -2070,7 +2073,7 @@
void X86_64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
- movq(scratch, Address(CpuRegister(RSP), base));
+ movl(scratch, Address(CpuRegister(RSP), base));
call(Address(scratch, offset));
}