diff options
| author | 2012-06-28 00:08:30 -0700 | |
|---|---|---|
| committer | 2012-06-28 00:08:30 -0700 | |
| commit | ea9aff71aafeab5177ec6e32441704288afb290b (patch) | |
| tree | 1bd648149d71bdfa370afde690d2200deb3f39a1 | |
| parent | 089e2aa42a1f42251f92c3f41c41f12d25d18662 (diff) | |
| parent | 52673ffae0025d86f4023735581f19ebcc477487 (diff) | |
am 52673ffa: Move IsDaemon to native code.
* commit '52673ffae0025d86f4023735581f19ebcc477487':
Move IsDaemon to native code.
| -rw-r--r-- | src/native/java_lang_Thread.cc | 7 | ||||
| -rw-r--r-- | src/scoped_jni_thread_state.h | 30 | ||||
| -rw-r--r-- | src/thread.cc | 46 | ||||
| -rw-r--r-- | src/thread.h | 45 |
4 files changed, 90 insertions, 38 deletions
diff --git a/src/native/java_lang_Thread.cc b/src/native/java_lang_Thread.cc index 86b3a204f5..626255e267 100644 --- a/src/native/java_lang_Thread.cc +++ b/src/native/java_lang_Thread.cc @@ -42,8 +42,9 @@ static jboolean Thread_isInterrupted(JNIEnv* env, jobject java_thread) { return (thread != NULL) ? thread->IsInterrupted() : JNI_FALSE; } -static void Thread_nativeCreate(JNIEnv* env, jclass, jobject java_thread, jlong stack_size) { - Thread::CreateNativeThread(env, java_thread, stack_size); +static void Thread_nativeCreate(JNIEnv* env, jclass, jobject java_thread, jlong stack_size, + jboolean daemon) { + Thread::CreateNativeThread(env, java_thread, stack_size, daemon == JNI_TRUE); } static jint Thread_nativeGetStatus(JNIEnv* env, jobject java_thread, jboolean has_been_started) { @@ -140,7 +141,7 @@ static JNINativeMethod gMethods[] = { NATIVE_METHOD(Thread, currentThread, "()Ljava/lang/Thread;"), NATIVE_METHOD(Thread, interrupted, "()Z"), NATIVE_METHOD(Thread, isInterrupted, "()Z"), - NATIVE_METHOD(Thread, nativeCreate, "(Ljava/lang/Thread;J)V"), + NATIVE_METHOD(Thread, nativeCreate, "(Ljava/lang/Thread;JZ)V"), NATIVE_METHOD(Thread, nativeGetStatus, "(Z)I"), NATIVE_METHOD(Thread, nativeHoldsLock, "(Ljava/lang/Object;)Z"), NATIVE_METHOD(Thread, nativeInterrupt, "()V"), diff --git a/src/scoped_jni_thread_state.h b/src/scoped_jni_thread_state.h index 42ed19c258..1c9ab2ce86 100644 --- a/src/scoped_jni_thread_state.h +++ b/src/scoped_jni_thread_state.h @@ -34,22 +34,24 @@ namespace art { class ScopedJniThreadState { public: explicit ScopedJniThreadState(JNIEnv* env, ThreadState new_state = kRunnable) - : env_(reinterpret_cast<JNIEnvExt*>(env)), vm_(env_->vm), self_(ThreadForEnv(env)), - old_thread_state_(self_->SetState(new_state)), thread_state_(new_state) { + : env_(reinterpret_cast<JNIEnvExt*>(env)), vm_(env_->vm), self_(ThreadForEnv(env)), + old_thread_state_(self_->SetState(new_state)), thread_state_(new_state) { self_->VerifyStack(); } explicit ScopedJniThreadState(Thread* self, ThreadState new_state = kRunnable) - : env_(reinterpret_cast<JNIEnvExt*>(self->GetJniEnv())), vm_(env_->vm), self_(self), - old_thread_state_(self_->SetState(new_state)), thread_state_(new_state) { + : env_(reinterpret_cast<JNIEnvExt*>(self->GetJniEnv())), vm_(env_->vm), self_(self), + old_thread_state_(self_->SetState(new_state)), thread_state_(new_state) { + if (!Vm()->work_around_app_jni_bugs && self != Thread::Current()) { + UnexpectedThreads(self, Thread::Current()); + } self_->VerifyStack(); } - // Used when we want a scoped jni thread state but have no thread/JNIEnv. + // Used when we want a scoped JNI thread state but have no thread/JNIEnv. explicit ScopedJniThreadState(JavaVM* vm) - : env_(NULL), vm_(reinterpret_cast<JavaVMExt*>(vm)), self_(NULL), - old_thread_state_(kTerminated), thread_state_(kTerminated) { - } + : env_(NULL), vm_(reinterpret_cast<JavaVMExt*>(vm)), self_(NULL), + old_thread_state_(kTerminated), thread_state_(kTerminated) {} ~ScopedJniThreadState() { if (self_ != NULL) { @@ -164,13 +166,19 @@ class ScopedJniThreadState { Thread* env_self = full_env->self; Thread* self = work_around_app_jni_bugs ? Thread::Current() : env_self; if (!work_around_app_jni_bugs && self != env_self) { - // TODO: pass through function name so we can use it here instead of NULL... - JniAbortF(NULL, "JNIEnv for %s used on %s", - ToStr<Thread>(*env_self).c_str(), ToStr<Thread>(*self).c_str()); + UnexpectedThreads(env_self, self); } return self; } + static void UnexpectedThreads(Thread* found_self, Thread* expected_self) { + // TODO: pass through function name so we can use it here instead of NULL... + JniAbortF(NULL, "JNIEnv for %s used on %s", + found_self != NULL ? ToStr<Thread>(*found_self).c_str() : "NULL", + expected_self != NULL ? ToStr<Thread>(*expected_self).c_str() : "NULL"); + + } + // The full JNIEnv. JNIEnvExt* const env_; // The full JavaVM. diff --git a/src/thread.cc b/src/thread.cc index 8fe016030a..104b9b9ccd 100644 --- a/src/thread.cc +++ b/src/thread.cc @@ -207,8 +207,8 @@ static void TearDownAlternateSignalStack() { delete[] allocated_signal_stack; } -void Thread::CreateNativeThread(JNIEnv* env, jobject java_peer, size_t stack_size) { - Thread* native_thread = new Thread; +void Thread::CreateNativeThread(JNIEnv* env, jobject java_peer, size_t stack_size, bool daemon) { + Thread* native_thread = new Thread(daemon); { ScopedJniThreadState ts(env); Object* peer = ts.Decode<Object*>(java_peer); @@ -277,7 +277,7 @@ void Thread::Init() { } Thread* Thread::Attach(const char* thread_name, bool as_daemon, jobject thread_group) { - Thread* self = new Thread; + Thread* self = new Thread(as_daemon); self->Init(); self->SetState(kNative); @@ -599,12 +599,20 @@ void Thread::DumpStack(std::ostream& os) const { } void Thread::SetStateWithoutSuspendCheck(ThreadState new_state) { + DCHECK_EQ(this, Thread::Current()); volatile void* raw = reinterpret_cast<volatile void*>(&state_); volatile int32_t* addr = reinterpret_cast<volatile int32_t*>(raw); android_atomic_release_store(new_state, addr); } ThreadState Thread::SetState(ThreadState new_state) { + if (new_state != kVmWait && new_state != kTerminated) { + // TODO: kVmWait is set by the parent thread to a child thread to indicate it can go. Similarly + // kTerminated may be set by a parent thread to its child if pthread creation fails. This + // overloaded use of the state variable means we cannot fully assert that only threads + // themselves modify their state. + DCHECK_EQ(this, Thread::Current()); + } ThreadState old_state = state_; if (old_state == kRunnable) { // Non-runnable states are points where we expect thread suspension can occur. @@ -764,7 +772,7 @@ void Thread::Shutdown() { CHECK_PTHREAD_CALL(pthread_key_delete, (Thread::pthread_key_self_), "self key"); } -Thread::Thread() +Thread::Thread(bool daemon) : suspend_count_(0), card_table_(NULL), exception_(NULL), @@ -793,7 +801,9 @@ Thread::Thread() debug_invoke_req_(new DebugInvokeReq), trace_stack_(new std::vector<TraceStackFrame>), name_(new std::string(kThreadNameDuringStartup)), - no_thread_suspension_(0) { + daemon_(daemon), + no_thread_suspension_(0), + last_no_thread_suspension_cause_(NULL) { CHECK_EQ((sizeof(Thread) % 4), 0U) << sizeof(Thread); memset(&held_mutexes_[0], 0, sizeof(held_mutexes_)); } @@ -1065,14 +1075,18 @@ class BuildInternalStackTraceVisitor : public StackVisitor { // object graph. method_trace->Set(depth, dex_pc_trace); // Set the Object*s and assert that no thread suspension is now possible. - ts.Self()->StartAssertNoThreadSuspension(); + const char* last_no_suspend_cause = + ts.Self()->StartAssertNoThreadSuspension("Building internal stack trace"); + CHECK(last_no_suspend_cause == NULL) << last_no_suspend_cause; method_trace_ = method_trace.get(); dex_pc_trace_ = dex_pc_trace; return true; } virtual ~BuildInternalStackTraceVisitor() { - Thread::Current()->EndAssertNoThreadSuspension(); + if (method_trace_ != NULL) { + Thread::Current()->EndAssertNoThreadSuspension(NULL); + } } bool VisitFrame() { @@ -1436,7 +1450,12 @@ class CatchBlockStackVisitor : public StackVisitor { throw_frame_id_(0), throw_dex_pc_(0), handler_quick_frame_(NULL), handler_quick_frame_pc_(0), handler_dex_pc_(0), native_method_count_(0), method_tracing_active_(Runtime::Current()->IsMethodTracingActive()) { - self->StartAssertNoThreadSuspension(); // Exception not in root sets, can't allow GC. + // Exception not in root sets, can't allow GC. + last_no_assert_suspension_cause_ = self->StartAssertNoThreadSuspension("Finding catch block"); + } + + ~CatchBlockStackVisitor() { + LOG(FATAL) << "UNREACHABLE"; // Expected to take long jump. } bool VisitFrame() { @@ -1496,8 +1515,8 @@ class CatchBlockStackVisitor : public StackVisitor { LOG(INFO) << "Handler: " << PrettyMethod(catch_method) << " (line: " << line_number << ")"; } } - self_->SetException(exception_); - self_->EndAssertNoThreadSuspension(); // Exception back in root set. + self_->SetException(exception_); // Exception back in root set. + self_->EndAssertNoThreadSuspension(last_no_assert_suspension_cause_); // Place context back on thread so it will be available when we continue. self_->ReleaseLongJumpContext(context_); context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_)); @@ -1525,6 +1544,8 @@ class CatchBlockStackVisitor : public StackVisitor { uint32_t native_method_count_; // Is method tracing active? const bool method_tracing_active_; + // Support for nesting no thread suspension checks. + const char* last_no_assert_suspension_cause_; }; void Thread::DeliverException() { @@ -1595,11 +1616,6 @@ bool Thread::HoldsLock(Object* object) { return object->GetThinLockId() == thin_lock_id_; } -bool Thread::IsDaemon() { - ScopedJniThreadState ts(this); - return ts.DecodeField(WellKnownClasses::java_lang_Thread_daemon)->GetBoolean(peer_); -} - class ReferenceMapVisitor : public StackVisitor { public: ReferenceMapVisitor(const ManagedStack* stack, const std::vector<TraceStackFrame>* trace_stack, diff --git a/src/thread.h b/src/thread.h index 7cd55a357a..5e6063774b 100644 --- a/src/thread.h +++ b/src/thread.h @@ -94,7 +94,7 @@ class PACKED Thread { // Creates a new native thread corresponding to the given managed peer. // Used to implement Thread.start. - static void CreateNativeThread(JNIEnv* env, jobject peer, size_t stack_size); + static void CreateNativeThread(JNIEnv* env, jobject peer, size_t stack_size, bool daemon); // Attaches the calling native thread to the runtime, returning the new native peer. // Used to implement JNI AttachCurrentThread and AttachCurrentThreadAsDaemon calls. @@ -131,27 +131,45 @@ class PACKED Thread { ThreadState SetState(ThreadState new_state); void SetStateWithoutSuspendCheck(ThreadState new_state); - bool IsDaemon(); + bool IsDaemon() const { + return daemon_; + } + bool IsSuspended(); void WaitUntilSuspended(); // Once called thread suspension will cause an assertion failure. - void StartAssertNoThreadSuspension() { #ifndef NDEBUG + const char* StartAssertNoThreadSuspension(const char* cause) { + CHECK(cause != NULL); + const char* previous_cause = last_no_thread_suspension_cause_; no_thread_suspension_++; -#endif + last_no_thread_suspension_cause_ = cause; + return previous_cause; } +#else + const char* StartAssertNoThreadSuspension(const char* cause) { + CHECK(cause != NULL); + return NULL; + } +#endif + // End region where no thread suspension is expected. - void EndAssertNoThreadSuspension() { #ifndef NDEBUG - DCHECK_GT(no_thread_suspension_, 0U); + void EndAssertNoThreadSuspension(const char* old_cause) { + CHECK(old_cause != NULL || no_thread_suspension_ == 1); + CHECK_GT(no_thread_suspension_, 0U); no_thread_suspension_--; -#endif + last_no_thread_suspension_cause_ = old_cause; + } +#else + void EndAssertNoThreadSuspension(const char*) { } +#endif void AssertThreadSuspensionIsAllowable() const { - DCHECK_EQ(0u, no_thread_suspension_); + DCHECK_EQ(0u, no_thread_suspension_) << last_no_thread_suspension_cause_; } bool CanAccessDirectReferences() const { @@ -494,7 +512,7 @@ class PACKED Thread { void CheckSafeToWait(MutexRank rank); private: - Thread(); + explicit Thread(bool daemon); ~Thread(); void Destroy(); friend class ThreadList; // For ~Thread and Destroy. @@ -626,6 +644,12 @@ class PACKED Thread { // A cached copy of the java.lang.Thread's name. std::string* name_; + // Is the thread a daemon? + const bool daemon_; + + // Keep data fields within Thread 4 byte aligned. + byte pad_[3]; + // A cached pthread_t for the pthread underlying this Thread*. pthread_t pthread_self_; @@ -634,6 +658,9 @@ class PACKED Thread { // A positive value implies we're in a region where thread suspension isn't expected. uint32_t no_thread_suspension_; + + // Cause for last suspension. + const char* last_no_thread_suspension_cause_; public: // Runtime support function pointers EntryPoints entrypoints_; |