diff options
author | 2021-06-01 09:26:55 +0100 | |
---|---|---|
committer | 2021-06-01 12:50:50 +0000 | |
commit | 1d326f94a3fdd6292ccdf0022cedfb2a2b8acfee (patch) | |
tree | 17fde709095f46eff040585e694b621676e5d0c0 /runtime/handle_scope-inl.h | |
parent | 654f01cd509ca11eae22177d4e764f1241fb3a53 (diff) |
Avoid race on Thread::tlsPtr_::top_handle_scope.
Require mutator lock for that field and update tests to hold
the mutator lock when needed. This prevents GC thread that
executes a thread roots flip on behalf of suspended threads
from racing against construction or destruction of handle
scopes by those threads and possibly seeing invalid values.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 189439174
Change-Id: I268a0ef6e5aa838347956febca0d3b6e02fe3ae5
Diffstat (limited to 'runtime/handle_scope-inl.h')
-rw-r--r-- | runtime/handle_scope-inl.h | 29 |
1 files changed, 13 insertions, 16 deletions
diff --git a/runtime/handle_scope-inl.h b/runtime/handle_scope-inl.h index cb0333f2d8..3aa9e5221d 100644 --- a/runtime/handle_scope-inl.h +++ b/runtime/handle_scope-inl.h @@ -44,26 +44,22 @@ inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScop } template<size_t kNumReferences> -inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScope* link) - : HandleScope(link, kNumReferences) { - static_assert(kNumReferences >= 1, "FixedSizeHandleScope must contain at least 1 reference"); - DCHECK_EQ(&storage_[0], GetReferences()); // TODO: Figure out how to use a compile assert. - for (size_t i = 0; i < kNumReferences; ++i) { - SetReferenceToNull(i); - } -} - -template<size_t kNumReferences> inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self, ObjPtr<mirror::Object> fill_value) : FixedSizeHandleScope<kNumReferences>(self->GetTopHandleScope(), fill_value), self_(self) { DCHECK_EQ(self, Thread::Current()); + if (kDebugLocking) { + Locks::mutator_lock_->AssertSharedHeld(self_); + } self_->PushHandleScope(this); } template<size_t kNumReferences> inline StackHandleScope<kNumReferences>::~StackHandleScope() { + if (kDebugLocking) { + Locks::mutator_lock_->AssertSharedHeld(self_); + } BaseHandleScope* top_handle_scope = self_->PopHandleScope(); DCHECK_EQ(top_handle_scope, this); } @@ -161,12 +157,6 @@ inline void FixedSizeHandleScope<kNumReferences>::SetReference(size_t i, GetReferences()[i].Assign(object); } -template<size_t kNumReferences> -inline void FixedSizeHandleScope<kNumReferences>::SetReferenceToNull(size_t i) { - DCHECK_LT(i, kNumReferences); - GetReferences()[i].Assign(nullptr); -} - // Number of references contained within this handle scope. inline uint32_t BaseHandleScope::NumberOfReferences() const { return LIKELY(!IsVariableSized()) @@ -227,10 +217,17 @@ inline VariableSizedHandleScope::VariableSizedHandleScope(Thread* const self) self_(self), current_scope_(&first_scope_), first_scope_(/*link=*/ nullptr) { + DCHECK_EQ(self, Thread::Current()); + if (kDebugLocking) { + Locks::mutator_lock_->AssertSharedHeld(self_); + } self_->PushHandleScope(this); } inline VariableSizedHandleScope::~VariableSizedHandleScope() { + if (kDebugLocking) { + Locks::mutator_lock_->AssertSharedHeld(self_); + } BaseHandleScope* top_handle_scope = self_->PopHandleScope(); DCHECK_EQ(top_handle_scope, this); // Don't delete first_scope_ since it is not heap allocated. |