Add tests for `art_jni_{lock,unlock}_object`.
Exercise various branches to the slow paths.
Test: m test-art-host-gtest
Test: run-gtests.sh
Test: Manual - reintroduce bug from
https://android-review.googlesource.com/1897061
and `run-gtests.sh` to see it fail.
Bug: 172332525
Bug: 208876897
Change-Id: I3ae9000a61fae9d4a318655d0a1ebc2f32390ef3
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index d63ae17..a222ff3 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -337,6 +337,8 @@
static jobject jobj_;
static jobject class_loader_;
+ static LockWord GetLockWord(jobject obj);
+
protected:
// We have to list the methods here so we can share them between default and generic JNI.
void CompileAndRunNoArgMethodImpl();
@@ -476,6 +478,11 @@
return raw_result;
}
+LockWord JniCompilerTest::GetLockWord(jobject obj) {
+ ScopedObjectAccess soa(Thread::Current());
+ return soa.Decode<mirror::Object>(obj)->GetLockWord(/*as_volatile=*/ false);
+}
+
// Test the normal compiler and normal generic JNI only.
// The following features are unsupported in @FastNative:
// 1) synchronized keyword
@@ -896,6 +903,48 @@
EXPECT_EQ(a | b, result);
EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]);
+ // Exercise recursive thin locking/unlocking.
+ // Note: Thin lock count 0 means locked once.
+ env_->MonitorEnter(jobj_);
+ LockWord lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked);
+ ASSERT_EQ(lock_word.ThinLockCount(), 0u);
+ result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b);
+ EXPECT_EQ(a | b, result);
+ EXPECT_EQ(2, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]);
+ lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked);
+ ASSERT_EQ(lock_word.ThinLockCount(), 0u);
+ env_->MonitorExit(jobj_);
+ lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kUnlocked);
+
+ // Exercise lock inflation due to thin lock count overflow.
+ constexpr uint32_t kMaxThinLockRecursiveLocks = 1u << LockWord::kThinLockCountSize;
+ for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) {
+ env_->MonitorEnter(jobj_);
+ lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked);
+ ASSERT_EQ(lock_word.ThinLockCount(), i);
+ }
+ result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b);
+ EXPECT_EQ(a | b, result);
+ EXPECT_EQ(3, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]);
+ lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked);
+ for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) {
+ env_->MonitorExit(jobj_); // Remains "fat-locked" even if actually unlocked.
+ }
+
+ // Exercise locking for "fat-locked".
+ lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked);
+ result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b);
+ EXPECT_EQ(a | b, result);
+ EXPECT_EQ(4, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]);
+ lock_word = GetLockWord(jobj_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked);
+
gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni] = 0;
}
@@ -1214,6 +1263,47 @@
EXPECT_TRUE(env_->IsSameObject(nullptr, result));
EXPECT_EQ(7, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]);
+ // Exercise recursive thin locking/unlocking.
+ // Note: Thin lock count 0 means locked once.
+ env_->MonitorEnter(jklass_);
+ LockWord lock_word = GetLockWord(jklass_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked);
+ ASSERT_EQ(lock_word.ThinLockCount(), 0u);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
+ EXPECT_EQ(8, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]);
+ lock_word = GetLockWord(jklass_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked);
+ ASSERT_EQ(lock_word.ThinLockCount(), 0u);
+ env_->MonitorExit(jklass_);
+ lock_word = GetLockWord(jklass_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kUnlocked);
+
+ // Exercise lock inflation due to thin lock count overflow.
+ constexpr uint32_t kMaxThinLockRecursiveLocks = 1u << LockWord::kThinLockCountSize;
+ for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) {
+ env_->MonitorEnter(jklass_);
+ lock_word = GetLockWord(jklass_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked);
+ ASSERT_EQ(lock_word.ThinLockCount(), i);
+ }
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
+ EXPECT_EQ(9, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]);
+ lock_word = GetLockWord(jklass_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked);
+ for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) {
+ env_->MonitorExit(jklass_); // Remains "fat-locked" even if actually unlocked.
+ }
+
+ // Exercise locking for "fat-locked".
+ lock_word = GetLockWord(jklass_);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr);
+ EXPECT_TRUE(env_->IsSameObject(nullptr, result));
+ EXPECT_EQ(10, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]);
+ lock_word = GetLockWord(jklass_);
+ ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked);
+
gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni] = 0;
}