diff options
author | 2021-12-03 15:55:26 +0000 | |
---|---|---|
committer | 2021-12-06 09:39:04 +0000 | |
commit | 3a50f34c84f79048dfa19b5a4a66e38a0d148a5c (patch) | |
tree | 7a283ec8ca7d401bc353e66458a05c09848f311d /compiler | |
parent | 8d011c7e70409efae542a2f518f336f4fe5ff97e (diff) |
Add tests for `art_jni_{lock,unlock}_object`.
Exercise various branches to the slow paths.
Test: m test-art-host-gtest
Test: run-gtests.sh
Test: Manual - reintroduce bug from
https://android-review.googlesource.com/1897061
and `run-gtests.sh` to see it fail.
Bug: 172332525
Bug: 208876897
Change-Id: I3ae9000a61fae9d4a318655d0a1ebc2f32390ef3
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/jni/jni_compiler_test.cc | 90 |
1 files changed, 90 insertions, 0 deletions
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc index d63ae1737f..a222ff3708 100644 --- a/compiler/jni/jni_compiler_test.cc +++ b/compiler/jni/jni_compiler_test.cc @@ -337,6 +337,8 @@ class JniCompilerTest : public CommonCompilerTest { static jobject jobj_; static jobject class_loader_; + static LockWord GetLockWord(jobject obj); + protected: // We have to list the methods here so we can share them between default and generic JNI. void CompileAndRunNoArgMethodImpl(); @@ -476,6 +478,11 @@ mirror::Object* JniCompilerTest::JniMethodEndWithReferenceSynchronizedOverride( return raw_result; } +LockWord JniCompilerTest::GetLockWord(jobject obj) { + ScopedObjectAccess soa(Thread::Current()); + return soa.Decode<mirror::Object>(obj)->GetLockWord(/*as_volatile=*/ false); +} + // Test the normal compiler and normal generic JNI only. // The following features are unsupported in @FastNative: // 1) synchronized keyword @@ -896,6 +903,48 @@ void JniCompilerTest::CompileAndRun_fooJJ_synchronizedImpl() { EXPECT_EQ(a | b, result); EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]); + // Exercise recursive thin locking/unlocking. + // Note: Thin lock count 0 means locked once. + env_->MonitorEnter(jobj_); + LockWord lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked); + ASSERT_EQ(lock_word.ThinLockCount(), 0u); + result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b); + EXPECT_EQ(a | b, result); + EXPECT_EQ(2, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]); + lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked); + ASSERT_EQ(lock_word.ThinLockCount(), 0u); + env_->MonitorExit(jobj_); + lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kUnlocked); + + // Exercise lock inflation due to thin lock count overflow. + constexpr uint32_t kMaxThinLockRecursiveLocks = 1u << LockWord::kThinLockCountSize; + for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) { + env_->MonitorEnter(jobj_); + lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked); + ASSERT_EQ(lock_word.ThinLockCount(), i); + } + result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b); + EXPECT_EQ(a | b, result); + EXPECT_EQ(3, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]); + lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked); + for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) { + env_->MonitorExit(jobj_); // Remains "fat-locked" even if actually unlocked. + } + + // Exercise locking for "fat-locked". + lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked); + result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b); + EXPECT_EQ(a | b, result); + EXPECT_EQ(4, gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni]); + lock_word = GetLockWord(jobj_); + ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked); + gJava_MyClassNatives_fooJJ_synchronized_calls[gCurrentJni] = 0; } @@ -1214,6 +1263,47 @@ void JniCompilerTest::CompileAndRunStaticSynchronizedIntObjectObjectMethodImpl() EXPECT_TRUE(env_->IsSameObject(nullptr, result)); EXPECT_EQ(7, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); + // Exercise recursive thin locking/unlocking. + // Note: Thin lock count 0 means locked once. + env_->MonitorEnter(jklass_); + LockWord lock_word = GetLockWord(jklass_); + ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked); + ASSERT_EQ(lock_word.ThinLockCount(), 0u); + result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr); + EXPECT_TRUE(env_->IsSameObject(nullptr, result)); + EXPECT_EQ(8, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); + lock_word = GetLockWord(jklass_); + ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked); + ASSERT_EQ(lock_word.ThinLockCount(), 0u); + env_->MonitorExit(jklass_); + lock_word = GetLockWord(jklass_); + ASSERT_EQ(lock_word.GetState(), LockWord::kUnlocked); + + // Exercise lock inflation due to thin lock count overflow. + constexpr uint32_t kMaxThinLockRecursiveLocks = 1u << LockWord::kThinLockCountSize; + for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) { + env_->MonitorEnter(jklass_); + lock_word = GetLockWord(jklass_); + ASSERT_EQ(lock_word.GetState(), LockWord::kThinLocked); + ASSERT_EQ(lock_word.ThinLockCount(), i); + } + result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr); + EXPECT_TRUE(env_->IsSameObject(nullptr, result)); + EXPECT_EQ(9, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); + lock_word = GetLockWord(jklass_); + ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked); + for (uint32_t i = 0; i != kMaxThinLockRecursiveLocks; ++i) { + env_->MonitorExit(jklass_); // Remains "fat-locked" even if actually unlocked. + } + + // Exercise locking for "fat-locked". + lock_word = GetLockWord(jklass_); + result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, nullptr); + EXPECT_TRUE(env_->IsSameObject(nullptr, result)); + EXPECT_EQ(10, gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni]); + lock_word = GetLockWord(jklass_); + ASSERT_EQ(lock_word.GetState(), LockWord::kFatLocked); + gJava_MyClassNatives_fooSSIOO_calls[gCurrentJni] = 0; } |