summaryrefslogtreecommitdiff
path: root/runtime/scoped_thread_state_change-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/scoped_thread_state_change-inl.h')
-rw-r--r--runtime/scoped_thread_state_change-inl.h156
1 files changed, 156 insertions, 0 deletions
diff --git a/runtime/scoped_thread_state_change-inl.h b/runtime/scoped_thread_state_change-inl.h
new file mode 100644
index 0000000000..cf020d0617
--- /dev/null
+++ b/runtime/scoped_thread_state_change-inl.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_
+#define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_
+
+#include "scoped_thread_state_change.h"
+
+#include "jni_env_ext-inl.h"
+#include "obj_ptr-inl.h"
+#include "thread-inl.h"
+
+namespace art {
+
+inline ScopedThreadStateChange::ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
+ : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
+ if (UNLIKELY(self_ == nullptr)) {
+ // Value chosen arbitrarily and won't be used in the destructor since thread_ == null.
+ old_thread_state_ = kTerminated;
+ Runtime* runtime = Runtime::Current();
+ CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
+ } else {
+ DCHECK_EQ(self, Thread::Current());
+ // Read state without locks, ok as state is effectively thread local and we're not interested
+ // in the suspend count (this will be handled in the runnable transitions).
+ old_thread_state_ = self->GetState();
+ if (old_thread_state_ != new_thread_state) {
+ if (new_thread_state == kRunnable) {
+ self_->TransitionFromSuspendedToRunnable();
+ } else if (old_thread_state_ == kRunnable) {
+ self_->TransitionFromRunnableToSuspended(new_thread_state);
+ } else {
+ // A suspended transition to another effectively suspended transition, ok to use Unsafe.
+ self_->SetState(new_thread_state);
+ }
+ }
+ }
+}
+
+inline ScopedThreadStateChange::~ScopedThreadStateChange() {
+ if (UNLIKELY(self_ == nullptr)) {
+ if (!expected_has_no_thread_) {
+ Runtime* runtime = Runtime::Current();
+ bool shutting_down = (runtime == nullptr) || runtime->IsShuttingDown(nullptr);
+ CHECK(shutting_down);
+ }
+ } else {
+ if (old_thread_state_ != thread_state_) {
+ if (old_thread_state_ == kRunnable) {
+ self_->TransitionFromSuspendedToRunnable();
+ } else if (thread_state_ == kRunnable) {
+ self_->TransitionFromRunnableToSuspended(old_thread_state_);
+ } else {
+ // A suspended transition to another effectively suspended transition, ok to use Unsafe.
+ self_->SetState(old_thread_state_);
+ }
+ }
+ }
+}
+
+template<typename T>
+inline T ScopedObjectAccessAlreadyRunnable::AddLocalReference(mirror::Object* obj) const {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
+ DCHECK_NE(obj, Runtime::Current()->GetClearedJniWeakGlobal());
+ return obj == nullptr ? nullptr : Env()->AddLocalReference<T>(obj);
+}
+
+template<typename T, typename MirrorType, bool kPoison>
+inline T ScopedObjectAccessAlreadyRunnable::AddLocalReference(
+ ObjPtr<MirrorType, kPoison> obj) const {
+ return AddLocalReference<T>(obj.Decode());
+}
+
+template<typename T, bool kPoison>
+inline ObjPtr<T, kPoison> ScopedObjectAccessAlreadyRunnable::Decode(jobject obj) const {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
+ return down_cast<T*>(Self()->DecodeJObject(obj));
+}
+
+inline ArtField* ScopedObjectAccessAlreadyRunnable::DecodeField(jfieldID fid) const {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
+ return reinterpret_cast<ArtField*>(fid);
+}
+
+inline jfieldID ScopedObjectAccessAlreadyRunnable::EncodeField(ArtField* field) const {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
+ return reinterpret_cast<jfieldID>(field);
+}
+
+inline ArtMethod* ScopedObjectAccessAlreadyRunnable::DecodeMethod(jmethodID mid) const {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
+ return reinterpret_cast<ArtMethod*>(mid);
+}
+
+inline jmethodID ScopedObjectAccessAlreadyRunnable::EncodeMethod(ArtMethod* method) const {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
+ return reinterpret_cast<jmethodID>(method);
+}
+
+inline bool ScopedObjectAccessAlreadyRunnable::IsRunnable() const {
+ return self_->GetState() == kRunnable;
+}
+
+inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
+ : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {}
+
+inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(Thread* self)
+ : self_(self),
+ env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
+ vm_(env_ != nullptr ? env_->vm : nullptr) {}
+
+inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(JNIEnv* env)
+ : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
+ Self()->VerifyStack();
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+}
+
+inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(Thread* self)
+ : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
+ Self()->VerifyStack();
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+}
+
+inline ScopedThreadSuspension::ScopedThreadSuspension(Thread* self, ThreadState suspended_state)
+ : self_(self), suspended_state_(suspended_state) {
+ DCHECK(self_ != nullptr);
+ self_->TransitionFromRunnableToSuspended(suspended_state);
+}
+
+inline ScopedThreadSuspension::~ScopedThreadSuspension() {
+ DCHECK_EQ(self_->GetState(), suspended_state_);
+ self_->TransitionFromSuspendedToRunnable();
+}
+
+} // namespace art
+
+#endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_