diff options
author | 2015-07-16 20:32:27 -0700 | |
---|---|---|
committer | 2015-07-22 15:13:56 -0700 | |
commit | 90443477f9a0061581c420775ce3b7eeae7468bc (patch) | |
tree | 8c74b81dfae162e0fd0ccf8d5ac50827ba815174 /runtime/scoped_thread_state_change.h | |
parent | 6078aec213dfaf111c29969706e8e5967cfc9bea (diff) |
Move to newer clang annotations
Also enable -Wthread-safety-negative.
Changes:
Switch to capabilities and negative capabilities.
Future work:
Use capabilities to implement uninterruptible annotations to work
with AssertNoThreadSuspension.
Bug: 20072211
Change-Id: I42fcbe0300d98a831c89d1eff3ecd5a7e99ebf33
Diffstat (limited to 'runtime/scoped_thread_state_change.h')
-rw-r--r-- | runtime/scoped_thread_state_change.h | 28 |
1 files changed, 14 insertions, 14 deletions
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h index 1cc2df65ba..b90aa0ec0e 100644 --- a/runtime/scoped_thread_state_change.h +++ b/runtime/scoped_thread_state_change.h @@ -34,7 +34,7 @@ namespace art { class ScopedThreadStateChange { public: ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) { if (UNLIKELY(self_ == nullptr)) { // Value chosen arbitrarily and won't be used in the destructor since thread_ == null. @@ -59,7 +59,7 @@ class ScopedThreadStateChange { } } - ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE { + ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE { if (UNLIKELY(self_ == nullptr)) { if (!expected_has_no_thread_) { Runtime* runtime = Runtime::Current(); @@ -130,7 +130,7 @@ class ScopedObjectAccessAlreadyRunnable { * it's best if we don't grab a mutex. */ template<typename T> - T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + T AddLocalReference(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. DCHECK_NE(obj, Runtime::Current()->GetClearedJniWeakGlobal()); @@ -139,32 +139,32 @@ class ScopedObjectAccessAlreadyRunnable { template<typename T> T Decode(jobject obj) const - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. return down_cast<T>(Self()->DecodeJObject(obj)); } ArtField* DecodeField(jfieldID fid) const - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. return reinterpret_cast<ArtField*>(fid); } - jfieldID EncodeField(ArtField* field) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + jfieldID EncodeField(ArtField* field) const SHARED_REQUIRES(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. return reinterpret_cast<jfieldID>(field); } - ArtMethod* DecodeMethod(jmethodID mid) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + ArtMethod* DecodeMethod(jmethodID mid) const SHARED_REQUIRES(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. return reinterpret_cast<ArtMethod*>(mid); } - jmethodID EncodeMethod(ArtMethod* method) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + jmethodID EncodeMethod(ArtMethod* method) const SHARED_REQUIRES(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. return reinterpret_cast<jmethodID>(method); @@ -176,12 +176,12 @@ class ScopedObjectAccessAlreadyRunnable { protected: explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) { } explicit ScopedObjectAccessAlreadyRunnable(Thread* self) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())), vm_(env_ != nullptr ? env_->vm : nullptr) { } @@ -220,14 +220,14 @@ class ScopedObjectAccessAlreadyRunnable { class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable { public: explicit ScopedObjectAccessUnchecked(JNIEnv* env) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) { Self()->VerifyStack(); Locks::mutator_lock_->AssertSharedHeld(Self()); } explicit ScopedObjectAccessUnchecked(Thread* self) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) { Self()->VerifyStack(); Locks::mutator_lock_->AssertSharedHeld(Self()); @@ -250,13 +250,13 @@ class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable { class ScopedObjectAccess : public ScopedObjectAccessUnchecked { public: explicit ScopedObjectAccess(JNIEnv* env) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) + REQUIRES(!Locks::thread_suspend_count_lock_) SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE : ScopedObjectAccessUnchecked(env) { } explicit ScopedObjectAccess(Thread* self) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) + REQUIRES(!Locks::thread_suspend_count_lock_) SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE : ScopedObjectAccessUnchecked(self) { } |