summaryrefslogtreecommitdiff
path: root/runtime/scoped_thread_state_change.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/scoped_thread_state_change.h')
-rw-r--r--runtime/scoped_thread_state_change.h29
1 files changed, 10 insertions, 19 deletions
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index c39cdb2679..1ca6c4e4fa 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -34,9 +34,8 @@ class ScopedThreadStateChange {
if (UNLIKELY(self_ == NULL)) {
// Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
old_thread_state_ = kTerminated;
- MutexLock mu(NULL, *Locks::runtime_shutdown_lock_);
Runtime* runtime = Runtime::Current();
- CHECK(runtime == NULL || !runtime->IsStarted() || runtime->IsShuttingDown());
+ CHECK(runtime == NULL || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
} else {
bool runnable_transition;
DCHECK_EQ(self, Thread::Current());
@@ -63,9 +62,8 @@ class ScopedThreadStateChange {
~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
if (UNLIKELY(self_ == NULL)) {
if (!expected_has_no_thread_) {
- MutexLock mu(NULL, *Locks::runtime_shutdown_lock_);
Runtime* runtime = Runtime::Current();
- bool shutting_down = (runtime == NULL) || runtime->IsShuttingDown();
+ bool shutting_down = (runtime == NULL) || runtime->IsShuttingDown(nullptr);
CHECK(shutting_down);
}
} else {
@@ -167,6 +165,10 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
return NULL;
}
+ if (kIsDebugBuild) {
+ Runtime::Current()->GetHeap()->VerifyObject(obj);
+ }
+
DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
IndirectReferenceTable& locals = Env()->locals;
@@ -185,7 +187,6 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
}
}
#endif
-
if (Vm()->work_around_app_jni_bugs) {
// Hand out direct pointers to support broken old apps.
return reinterpret_cast<T>(obj);
@@ -206,10 +207,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
-#ifdef MOVING_GARBAGE_COLLECTOR
- // TODO: we should make these unique weak globals if Field instances can ever move.
- UNIMPLEMENTED(WARNING);
-#endif
+ CHECK(!kMovingFields);
return reinterpret_cast<mirror::ArtField*>(fid);
}
@@ -217,9 +215,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
-#ifdef MOVING_GARBAGE_COLLECTOR
- UNIMPLEMENTED(WARNING);
-#endif
+ CHECK(!kMovingFields);
return reinterpret_cast<jfieldID>(field);
}
@@ -227,10 +223,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
-#ifdef MOVING_GARBAGE_COLLECTOR
- // TODO: we should make these unique weak globals if Method instances can ever move.
- UNIMPLEMENTED(WARNING);
-#endif
+ CHECK(!kMovingMethods);
return reinterpret_cast<mirror::ArtMethod*>(mid);
}
@@ -238,9 +231,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
-#ifdef MOVING_GARBAGE_COLLECTOR
- UNIMPLEMENTED(WARNING);
-#endif
+ CHECK(!kMovingMethods);
return reinterpret_cast<jmethodID>(method);
}