diff options
author | 2016-09-27 18:43:30 -0700 | |
---|---|---|
committer | 2016-09-29 17:31:09 -0700 | |
commit | 0795f23920ee9aabf28e45c63cd592dcccf00216 (patch) | |
tree | ff3f880c5e84f3316532b47d0e9a7729ade848ac /runtime/scoped_thread_state_change.h | |
parent | d1224dce59eb0019507e41da5e10f12dda66bee4 (diff) |
Clean up ScopedThreadStateChange to use ObjPtr
Also fixed inclusion of -inl.h files in .h files by adding
scoped_object_access-inl.h and scoped_fast_natvie_object_access-inl.h
Changed AddLocalReference / Decode to use ObjPtr.
Changed libartbenchmark to be debug to avoid linkage errors.
Bug: 31113334
Test: test-art-host
Change-Id: I4d2e160483a29d21e1e0e440585ed328b9811483
Diffstat (limited to 'runtime/scoped_thread_state_change.h')
-rw-r--r-- | runtime/scoped_thread_state_change.h | 193 |
1 files changed, 51 insertions, 142 deletions
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h index 8a1aca592f..175bec51d6 100644 --- a/runtime/scoped_thread_state_change.h +++ b/runtime/scoped_thread_state_change.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2012 The Android Open Source Project + * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,85 +17,43 @@ #ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_ #define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_ +#include "art_field.h" #include "base/casts.h" +#include "base/value_object.h" #include "java_vm_ext.h" -#include "jni_env_ext-inl.h" -#include "art_field.h" -#include "read_barrier.h" -#include "thread-inl.h" +#include "thread_state.h" #include "verify_object.h" namespace art { +struct JNIEnvExt; +template<class MirrorType, bool kPoison> class ObjPtr; + // Scoped change into and out of a particular state. Handles Runnable transitions that require // more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and // ScopedObjectAccess are used to handle the change into Runnable to Get direct access to objects, // the unchecked variant doesn't aid annotalysis. class ScopedThreadStateChange : public ValueObject { public: - ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) - REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE - : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) { - if (UNLIKELY(self_ == nullptr)) { - // Value chosen arbitrarily and won't be used in the destructor since thread_ == null. - old_thread_state_ = kTerminated; - Runtime* runtime = Runtime::Current(); - CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_)); - } else { - DCHECK_EQ(self, Thread::Current()); - // Read state without locks, ok as state is effectively thread local and we're not interested - // in the suspend count (this will be handled in the runnable transitions). - old_thread_state_ = self->GetState(); - if (old_thread_state_ != new_thread_state) { - if (new_thread_state == kRunnable) { - self_->TransitionFromSuspendedToRunnable(); - } else if (old_thread_state_ == kRunnable) { - self_->TransitionFromRunnableToSuspended(new_thread_state); - } else { - // A suspended transition to another effectively suspended transition, ok to use Unsafe. - self_->SetState(new_thread_state); - } - } - } - } + ALWAYS_INLINE ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) + REQUIRES(!Locks::thread_suspend_count_lock_); - ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE { - if (UNLIKELY(self_ == nullptr)) { - if (!expected_has_no_thread_) { - Runtime* runtime = Runtime::Current(); - bool shutting_down = (runtime == nullptr) || runtime->IsShuttingDown(nullptr); - CHECK(shutting_down); - } - } else { - if (old_thread_state_ != thread_state_) { - if (old_thread_state_ == kRunnable) { - self_->TransitionFromSuspendedToRunnable(); - } else if (thread_state_ == kRunnable) { - self_->TransitionFromRunnableToSuspended(old_thread_state_); - } else { - // A suspended transition to another effectively suspended transition, ok to use Unsafe. - self_->SetState(old_thread_state_); - } - } - } - } + ALWAYS_INLINE ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_); - Thread* Self() const { + ALWAYS_INLINE Thread* Self() const { return self_; } protected: // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*. - ScopedThreadStateChange() - : self_(nullptr), thread_state_(kTerminated), old_thread_state_(kTerminated), - expected_has_no_thread_(true) {} + ScopedThreadStateChange() {} - Thread* const self_; - const ThreadState thread_state_; + Thread* const self_ = nullptr; + const ThreadState thread_state_ = kTerminated; private: - ThreadState old_thread_state_; - const bool expected_has_no_thread_; + ThreadState old_thread_state_ = kTerminated; + const bool expected_has_no_thread_ = true; friend class ScopedObjectAccessUnchecked; DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange); @@ -129,62 +87,34 @@ class ScopedObjectAccessAlreadyRunnable : public ValueObject { * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and * it's best if we don't grab a mutex. */ - template<typename T> - T AddLocalReference(mirror::Object* obj) const REQUIRES_SHARED(Locks::mutator_lock_) { - Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. - DCHECK_NE(obj, Runtime::Current()->GetClearedJniWeakGlobal()); - return obj == nullptr ? nullptr : Env()->AddLocalReference<T>(obj); - } + template<typename T, typename MirrorType, bool kPoison = kIsDebugBuild> + T AddLocalReference(ObjPtr<MirrorType, kPoison> obj) const + REQUIRES_SHARED(Locks::mutator_lock_); + // TODO: Delete template<typename T> - T Decode(jobject obj) const - REQUIRES_SHARED(Locks::mutator_lock_) { - Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. - return down_cast<T>(Self()->DecodeJObject(obj)); - } + T AddLocalReference(mirror::Object* obj) const + REQUIRES_SHARED(Locks::mutator_lock_); - ArtField* DecodeField(jfieldID fid) const - REQUIRES_SHARED(Locks::mutator_lock_) { - Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. - return reinterpret_cast<ArtField*>(fid); - } + template<typename T, bool kPoison = kIsDebugBuild> + ObjPtr<T, kPoison> Decode(jobject obj) const REQUIRES_SHARED(Locks::mutator_lock_); - jfieldID EncodeField(ArtField* field) const REQUIRES_SHARED(Locks::mutator_lock_) { - Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. - return reinterpret_cast<jfieldID>(field); - } + ArtField* DecodeField(jfieldID fid) const REQUIRES_SHARED(Locks::mutator_lock_); - ArtMethod* DecodeMethod(jmethodID mid) const REQUIRES_SHARED(Locks::mutator_lock_) { - Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. - return reinterpret_cast<ArtMethod*>(mid); - } + jfieldID EncodeField(ArtField* field) const REQUIRES_SHARED(Locks::mutator_lock_); - jmethodID EncodeMethod(ArtMethod* method) const REQUIRES_SHARED(Locks::mutator_lock_) { - Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. - return reinterpret_cast<jmethodID>(method); - } + ArtMethod* DecodeMethod(jmethodID mid) const REQUIRES_SHARED(Locks::mutator_lock_); - bool IsRunnable() const { - return self_->GetState() == kRunnable; - } + jmethodID EncodeMethod(ArtMethod* method) const REQUIRES_SHARED(Locks::mutator_lock_); + + ALWAYS_INLINE bool IsRunnable() const; protected: - explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env) - REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE - : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) { - } + ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env) + REQUIRES(!Locks::thread_suspend_count_lock_); - explicit ScopedObjectAccessAlreadyRunnable(Thread* self) - REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE - : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())), - vm_(env_ != nullptr ? env_->vm : nullptr) { - } + ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(Thread* self) + REQUIRES(!Locks::thread_suspend_count_lock_); // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't // change into Runnable or acquire a share on the mutator_lock_. @@ -192,8 +122,7 @@ class ScopedObjectAccessAlreadyRunnable : public ValueObject { : self_(nullptr), env_(nullptr), vm_(down_cast<JavaVMExt*>(vm)) {} // Here purely to force inlining. - ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE { - } + ALWAYS_INLINE ~ScopedObjectAccessAlreadyRunnable() {} // Self thread, can be null. Thread* const self_; @@ -219,19 +148,11 @@ class ScopedObjectAccessAlreadyRunnable : public ValueObject { // the mutator_lock_ will be acquired on construction. class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable { public: - explicit ScopedObjectAccessUnchecked(JNIEnv* env) - REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE - : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) { - Self()->VerifyStack(); - Locks::mutator_lock_->AssertSharedHeld(Self()); - } + ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(JNIEnv* env) + REQUIRES(!Locks::thread_suspend_count_lock_); - explicit ScopedObjectAccessUnchecked(Thread* self) - REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE - : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) { - Self()->VerifyStack(); - Locks::mutator_lock_->AssertSharedHeld(Self()); - } + ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(Thread* self) + REQUIRES(!Locks::thread_suspend_count_lock_); // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't // change into Runnable or acquire a share on the mutator_lock_. @@ -249,28 +170,24 @@ class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable { // Annotalysis helping variant of the above. class ScopedObjectAccess : public ScopedObjectAccessUnchecked { public: - explicit ScopedObjectAccess(JNIEnv* env) + ALWAYS_INLINE explicit ScopedObjectAccess(JNIEnv* env) REQUIRES(!Locks::thread_suspend_count_lock_) - SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE - : ScopedObjectAccessUnchecked(env) { - } + SHARED_LOCK_FUNCTION(Locks::mutator_lock_) + : ScopedObjectAccessUnchecked(env) {} - explicit ScopedObjectAccess(Thread* self) + ALWAYS_INLINE explicit ScopedObjectAccess(Thread* self) REQUIRES(!Locks::thread_suspend_count_lock_) - SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE - : ScopedObjectAccessUnchecked(self) { - } + SHARED_LOCK_FUNCTION(Locks::mutator_lock_) + : ScopedObjectAccessUnchecked(self) {} - ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE { - // Base class will release share of lock. Invoked after this destructor. - } + // Base class will release share of lock. Invoked after this destructor. + ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {} private: // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that // routines operating with just a VM are sound, they are not, but when you have just a VM // you cannot call the unsound routines. - explicit ScopedObjectAccess(JavaVM* vm) - SHARED_LOCK_FUNCTION(Locks::mutator_lock_) + explicit ScopedObjectAccess(JavaVM* vm) SHARED_LOCK_FUNCTION(Locks::mutator_lock_) : ScopedObjectAccessUnchecked(vm) {} friend class ScopedCheck; @@ -280,19 +197,11 @@ class ScopedObjectAccess : public ScopedObjectAccessUnchecked { // Annotalysis helper for going to a suspended state from runnable. class ScopedThreadSuspension : public ValueObject { public: - explicit ScopedThreadSuspension(Thread* self, ThreadState suspended_state) + ALWAYS_INLINE explicit ScopedThreadSuspension(Thread* self, ThreadState suspended_state) REQUIRES(!Locks::thread_suspend_count_lock_, !Roles::uninterruptible_) - UNLOCK_FUNCTION(Locks::mutator_lock_) - ALWAYS_INLINE - : self_(self), suspended_state_(suspended_state) { - DCHECK(self_ != nullptr); - self_->TransitionFromRunnableToSuspended(suspended_state); - } + UNLOCK_FUNCTION(Locks::mutator_lock_); - ~ScopedThreadSuspension() SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE { - DCHECK_EQ(self_->GetState(), suspended_state_); - self_->TransitionFromSuspendedToRunnable(); - } + ALWAYS_INLINE ~ScopedThreadSuspension() SHARED_LOCK_FUNCTION(Locks::mutator_lock_); private: Thread* const self_; |