diff options
author | 2017-12-11 20:47:56 -0800 | |
---|---|---|
committer | 2018-01-08 18:48:47 -0800 | |
commit | fb6b0b1b04081f9ef7a240f702d8ce4e61a02e9f (patch) | |
tree | ae24c7d7bf4493ab9dc881c7b3b95ef8f706974d | |
parent | 680e88ba30d1c599c5eaab4a207db3e39bf2d57f (diff) |
ART: Add support for VMStack.getAnnotatedStackTrace
Add Thread.CreateAnnotatedStackTrace to return an array that
contains an AnnotatedStackTraceElement for each stack frame,
with the StackTraceElement describing the frame, an array
containing all objects that are locked at the described location,
and optionally for the top frame an object the thread is blocked
on, waiting for or sleeping on.
Add a test.
Bug: 70538431
Test: m test-art-host
Test: art/test/testrunner/testrunner.py -b --host -t 168
Change-Id: I0d92e3d8182c4a592549a6445854816f71afd29e
-rw-r--r-- | runtime/native/dalvik_system_VMStack.cc | 10 | ||||
-rw-r--r-- | runtime/thread.cc | 193 | ||||
-rw-r--r-- | runtime/thread.h | 3 | ||||
-rw-r--r-- | test/168-vmstack-annotated/expected.txt | 0 | ||||
-rw-r--r-- | test/168-vmstack-annotated/info.txt | 1 | ||||
-rw-r--r-- | test/168-vmstack-annotated/run | 18 | ||||
-rw-r--r-- | test/168-vmstack-annotated/src/Main.java | 225 |
7 files changed, 450 insertions, 0 deletions
diff --git a/runtime/native/dalvik_system_VMStack.cc b/runtime/native/dalvik_system_VMStack.cc index 3e8040bfa5..ed0eb97da1 100644 --- a/runtime/native/dalvik_system_VMStack.cc +++ b/runtime/native/dalvik_system_VMStack.cc @@ -160,12 +160,22 @@ static jobjectArray VMStack_getThreadStackTrace(JNIEnv* env, jclass, jobject jav return Thread::InternalStackTraceToStackTraceElementArray(soa, trace); } +static jobjectArray VMStack_getAnnotatedThreadStackTrace(JNIEnv* env, jclass, jobject javaThread) { + ScopedFastNativeObjectAccess soa(env); + auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa) + REQUIRES_SHARED(Locks::mutator_lock_) -> jobjectArray { + return thread->CreateAnnotatedStackTrace(soaa); + }; + return GetThreadStack(soa, javaThread, fn); +} + static JNINativeMethod gMethods[] = { FAST_NATIVE_METHOD(VMStack, fillStackTraceElements, "(Ljava/lang/Thread;[Ljava/lang/StackTraceElement;)I"), FAST_NATIVE_METHOD(VMStack, getCallingClassLoader, "()Ljava/lang/ClassLoader;"), FAST_NATIVE_METHOD(VMStack, getClosestUserClassLoader, "()Ljava/lang/ClassLoader;"), FAST_NATIVE_METHOD(VMStack, getStackClass2, "()Ljava/lang/Class;"), FAST_NATIVE_METHOD(VMStack, getThreadStackTrace, "(Ljava/lang/Thread;)[Ljava/lang/StackTraceElement;"), + FAST_NATIVE_METHOD(VMStack, getAnnotatedThreadStackTrace, "(Ljava/lang/Thread;)[Ldalvik/system/AnnotatedStackTraceElement;"), }; void register_dalvik_system_VMStack(JNIEnv* env) { diff --git a/runtime/thread.cc b/runtime/thread.cc index 9f4e5441a5..46cb751b93 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -2743,6 +2743,199 @@ jobjectArray Thread::InternalStackTraceToStackTraceElementArray( return result; } +jobjectArray Thread::CreateAnnotatedStackTrace(const ScopedObjectAccessAlreadyRunnable& soa) const { + // This code allocates. Do not allow it to operate with a pending exception. + if (IsExceptionPending()) { + return nullptr; + } + + // If flip_function is not null, it means we have run a checkpoint + // before the thread wakes up to execute the flip function and the + // thread roots haven't been forwarded. So the following access to + // the roots (locks or methods in the frames) would be bad. Run it + // here. TODO: clean up. + // Note: copied from DumpJavaStack. + { + Thread* this_thread = const_cast<Thread*>(this); + Closure* flip_func = this_thread->GetFlipFunction(); + if (flip_func != nullptr) { + flip_func->Run(this_thread); + } + } + + class CollectFramesAndLocksStackVisitor : public MonitorObjectsStackVisitor { + public: + CollectFramesAndLocksStackVisitor(const ScopedObjectAccessAlreadyRunnable& soaa_in, + Thread* self, + Context* context) + : MonitorObjectsStackVisitor(self, context), + wait_jobject_(soaa_in.Env(), nullptr), + block_jobject_(soaa_in.Env(), nullptr), + soaa_(soaa_in) {} + + protected: + VisitMethodResult StartMethod(ArtMethod* m, size_t frame_nr ATTRIBUTE_UNUSED) + OVERRIDE + REQUIRES_SHARED(Locks::mutator_lock_) { + ObjPtr<mirror::StackTraceElement> obj = CreateStackTraceElement( + soaa_, m, GetDexPc(/* abort on error */ false)); + if (obj == nullptr) { + return VisitMethodResult::kEndStackWalk; + } + stack_trace_elements_.emplace_back(soaa_.Env(), soaa_.AddLocalReference<jobject>(obj.Ptr())); + return VisitMethodResult::kContinueMethod; + } + + VisitMethodResult EndMethod(ArtMethod* m ATTRIBUTE_UNUSED) OVERRIDE { + lock_objects_.push_back({}); + lock_objects_[lock_objects_.size() - 1].swap(frame_lock_objects_); + + DCHECK_EQ(lock_objects_.size(), stack_trace_elements_.size()); + + return VisitMethodResult::kContinueMethod; + } + + void VisitWaitingObject(mirror::Object* obj, ThreadState state ATTRIBUTE_UNUSED) + OVERRIDE + REQUIRES_SHARED(Locks::mutator_lock_) { + wait_jobject_.reset(soaa_.AddLocalReference<jobject>(obj)); + } + void VisitSleepingObject(mirror::Object* obj) + OVERRIDE + REQUIRES_SHARED(Locks::mutator_lock_) { + wait_jobject_.reset(soaa_.AddLocalReference<jobject>(obj)); + } + void VisitBlockedOnObject(mirror::Object* obj, + ThreadState state ATTRIBUTE_UNUSED, + uint32_t owner_tid ATTRIBUTE_UNUSED) + OVERRIDE + REQUIRES_SHARED(Locks::mutator_lock_) { + block_jobject_.reset(soaa_.AddLocalReference<jobject>(obj)); + } + void VisitLockedObject(mirror::Object* obj) + OVERRIDE + REQUIRES_SHARED(Locks::mutator_lock_) { + frame_lock_objects_.emplace_back(soaa_.Env(), soaa_.AddLocalReference<jobject>(obj)); + } + + public: + std::vector<ScopedLocalRef<jobject>> stack_trace_elements_; + ScopedLocalRef<jobject> wait_jobject_; + ScopedLocalRef<jobject> block_jobject_; + std::vector<std::vector<ScopedLocalRef<jobject>>> lock_objects_; + + private: + const ScopedObjectAccessAlreadyRunnable& soaa_; + + std::vector<ScopedLocalRef<jobject>> frame_lock_objects_; + }; + + std::unique_ptr<Context> context(Context::Create()); + CollectFramesAndLocksStackVisitor dumper(soa, const_cast<Thread*>(this), context.get()); + dumper.WalkStack(); + + // There should not be a pending exception. Otherwise, return with it pending. + if (IsExceptionPending()) { + return nullptr; + } + + // Now go and create Java arrays. + + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + + StackHandleScope<6> hs(soa.Self()); + mirror::Class* aste_array_class = class_linker->FindClass( + soa.Self(), + "[Ldalvik/system/AnnotatedStackTraceElement;", + ScopedNullHandle<mirror::ClassLoader>()); + if (aste_array_class == nullptr) { + return nullptr; + } + Handle<mirror::Class> h_aste_array_class(hs.NewHandle<mirror::Class>(aste_array_class)); + + mirror::Class* o_array_class = class_linker->FindClass(soa.Self(), + "[Ljava/lang/Object;", + ScopedNullHandle<mirror::ClassLoader>()); + if (o_array_class == nullptr) { + // This should not fail in a healthy runtime. + soa.Self()->AssertPendingException(); + return nullptr; + } + Handle<mirror::Class> h_o_array_class(hs.NewHandle<mirror::Class>(o_array_class)); + + Handle<mirror::Class> h_aste_class(hs.NewHandle<mirror::Class>( + h_aste_array_class->GetComponentType())); + ArtField* stack_trace_element_field = h_aste_class->FindField( + soa.Self(), h_aste_class.Get(), "stackTraceElement", "Ljava/lang/StackTraceElement;"); + DCHECK(stack_trace_element_field != nullptr); + ArtField* held_locks_field = h_aste_class->FindField( + soa.Self(), h_aste_class.Get(), "heldLocks", "[Ljava/lang/Object;"); + DCHECK(held_locks_field != nullptr); + ArtField* blocked_on_field = h_aste_class->FindField( + soa.Self(), h_aste_class.Get(), "blockedOn", "Ljava/lang/Object;"); + DCHECK(blocked_on_field != nullptr); + + size_t length = dumper.stack_trace_elements_.size(); + ObjPtr<mirror::ObjectArray<mirror::Object>> array = + mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), aste_array_class, length); + if (array == nullptr) { + soa.Self()->AssertPendingOOMException(); + return nullptr; + } + + ScopedLocalRef<jobjectArray> result(soa.Env(), soa.Env()->AddLocalReference<jobjectArray>(array)); + + MutableHandle<mirror::Object> handle(hs.NewHandle<mirror::Object>(nullptr)); + MutableHandle<mirror::ObjectArray<mirror::Object>> handle2( + hs.NewHandle<mirror::ObjectArray<mirror::Object>>(nullptr)); + for (size_t i = 0; i != length; ++i) { + handle.Assign(h_aste_class->AllocObject(soa.Self())); + if (handle == nullptr) { + soa.Self()->AssertPendingOOMException(); + return nullptr; + } + + // Set stack trace element. + stack_trace_element_field->SetObject<false>( + handle.Get(), soa.Decode<mirror::Object>(dumper.stack_trace_elements_[i].get())); + + // Create locked-on array. + if (!dumper.lock_objects_[i].empty()) { + handle2.Assign(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), + h_o_array_class.Get(), + dumper.lock_objects_[i].size())); + if (handle2 == nullptr) { + soa.Self()->AssertPendingOOMException(); + return nullptr; + } + int32_t j = 0; + for (auto& scoped_local : dumper.lock_objects_[i]) { + if (scoped_local == nullptr) { + continue; + } + handle2->Set(j, soa.Decode<mirror::Object>(scoped_local.get())); + DCHECK(!soa.Self()->IsExceptionPending()); + j++; + } + held_locks_field->SetObject<false>(handle.Get(), handle2.Get()); + } + + // Set blocked-on object. + if (i == 0) { + if (dumper.block_jobject_ != nullptr) { + blocked_on_field->SetObject<false>( + handle.Get(), soa.Decode<mirror::Object>(dumper.block_jobject_.get())); + } + } + + ScopedLocalRef<jobject> elem(soa.Env(), soa.AddLocalReference<jobject>(handle.Get())); + soa.Env()->SetObjectArrayElement(result.get(), i, elem.get()); + DCHECK(!soa.Self()->IsExceptionPending()); + } + + return result.release(); +} + void Thread::ThrowNewExceptionF(const char* exception_class_descriptor, const char* fmt, ...) { va_list args; va_start(args, fmt); diff --git a/runtime/thread.h b/runtime/thread.h index 1e89887c3e..426d27d1b4 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -599,6 +599,9 @@ class Thread { jobjectArray output_array = nullptr, int* stack_depth = nullptr) REQUIRES_SHARED(Locks::mutator_lock_); + jobjectArray CreateAnnotatedStackTrace(const ScopedObjectAccessAlreadyRunnable& soa) const + REQUIRES_SHARED(Locks::mutator_lock_); + bool HasDebuggerShadowFrames() const { return tlsPtr_.frame_id_to_shadow_frame != nullptr; } diff --git a/test/168-vmstack-annotated/expected.txt b/test/168-vmstack-annotated/expected.txt new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/test/168-vmstack-annotated/expected.txt diff --git a/test/168-vmstack-annotated/info.txt b/test/168-vmstack-annotated/info.txt new file mode 100644 index 0000000000..d849bc31ed --- /dev/null +++ b/test/168-vmstack-annotated/info.txt @@ -0,0 +1 @@ +Regression test for b/68703210 diff --git a/test/168-vmstack-annotated/run b/test/168-vmstack-annotated/run new file mode 100644 index 0000000000..93654113e6 --- /dev/null +++ b/test/168-vmstack-annotated/run @@ -0,0 +1,18 @@ +#!/bin/bash +# +# Copyright (C) 2017 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Use a smaller heap so it's easier to potentially fill up. +exec ${RUN} $@ --runtime-option -Xmx2m diff --git a/test/168-vmstack-annotated/src/Main.java b/test/168-vmstack-annotated/src/Main.java new file mode 100644 index 0000000000..8234f945c0 --- /dev/null +++ b/test/168-vmstack-annotated/src/Main.java @@ -0,0 +1,225 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.lang.Thread.State; +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; + +public class Main { + + static class Runner implements Runnable { + List<Object> locks; + List<CyclicBarrier> barriers; + + public Runner(List<Object> locks, List<CyclicBarrier> barriers) { + this.locks = locks; + this.barriers = barriers; + } + + @Override + public void run() { + step(locks, barriers); + } + + private void step(List<Object> l, List<CyclicBarrier> b) { + if (l.isEmpty()) { + // Nothing to do, sleep indefinitely. + try { + Thread.sleep(100000000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } else { + Object lockObject = l.remove(0); + CyclicBarrier barrierObject = b.remove(0); + + if (lockObject == null) { + // No lock object: only take barrier, recurse. + try { + barrierObject.await(); + } catch (InterruptedException | BrokenBarrierException e) { + throw new RuntimeException(e); + } + step(l, b); + } else if (barrierObject != null) { + // Have barrier: sync, wait and recurse. + synchronized(lockObject) { + try { + barrierObject.await(); + } catch (InterruptedException | BrokenBarrierException e) { + throw new RuntimeException(e); + } + step(l, b); + } + } else { + // Sync, and get next step (which is assumed to have object and barrier). + synchronized (lockObject) { + Object lockObject2 = l.remove(0); + CyclicBarrier barrierObject2 = b.remove(0); + synchronized(lockObject2) { + try { + barrierObject2.await(); + } catch (InterruptedException | BrokenBarrierException e) { + throw new RuntimeException(e); + } + step(l, b); + } + } + } + } + } + } + + public static void main(String[] args) throws Exception { + try { + testCluster1(); + } catch (Exception e) { + Map<Thread,StackTraceElement[]> stacks = Thread.getAllStackTraces(); + for (Map.Entry<Thread,StackTraceElement[]> entry : stacks.entrySet()) { + System.out.println(entry.getKey()); + System.out.println(Arrays.toString(entry.getValue())); + } + throw e; + } + } + + private static void testCluster1() throws Exception { + // Test setup (at deadlock): + // + // Thread 1: + // #0 step: synchornized(o3) { synchronized(o2) } + // #1 step: synchronized(o1) + // + // Thread 2: + // #0 step: synchronized(o1) + // #1 step: synchronized(o4) { synchronized(o2) } + // + LinkedList<Object> l1 = new LinkedList<>(); + LinkedList<CyclicBarrier> b1 = new LinkedList<>(); + LinkedList<Object> l2 = new LinkedList<>(); + LinkedList<CyclicBarrier> b2 = new LinkedList<>(); + + Object o1 = new Object(); + Object o2 = new Object(); + Object o3 = new Object(); + Object o4 = new Object(); + + l1.add(o1); + l1.add(o3); + l1.add(o2); + l2.add(o4); + l2.add(o2); + l2.add(o1); + + CyclicBarrier c1 = new CyclicBarrier(3); + CyclicBarrier c2 = new CyclicBarrier(2); + b1.add(c1); + b1.add(null); + b1.add(c2); + b2.add(null); + b2.add(c1); + b2.add(c2); + + Thread t1 = new Thread(new Runner(l1, b1)); + t1.setDaemon(true); + t1.start(); + Thread t2 = new Thread(new Runner(l2, b2)); + t2.setDaemon(true); + t2.start(); + + c1.await(); + + waitNotRunnable(t1); + waitNotRunnable(t2); + Thread.sleep(250); // Unfortunately this seems necessary. :-( + + // Thread 1. + { + Object[] stack1 = getAnnotatedStack(t1); + assertBlockedOn(stack1[0], o2); // Blocked on o2. + assertLocks(stack1[0], o3); // Locked o3. + assertStackTraceElementStep(stack1[0]); + + assertBlockedOn(stack1[1], null); // Frame can't be blocked. + assertLocks(stack1[1], o1); // Locked o1. + assertStackTraceElementStep(stack1[1]); + } + + // Thread 2. + { + Object[] stack2 = getAnnotatedStack(t2); + assertBlockedOn(stack2[0], o1); // Blocked on o1. + assertLocks(stack2[0]); // Nothing locked. + assertStackTraceElementStep(stack2[0]); + + assertBlockedOn(stack2[1], null); // Frame can't be blocked. + assertLocks(stack2[1], o4, o2); // Locked o4, o2. + assertStackTraceElementStep(stack2[1]); + } + } + + private static void waitNotRunnable(Thread t) throws InterruptedException { + while (t.getState() == State.RUNNABLE) { + Thread.sleep(100); + } + } + + private static Object[] getAnnotatedStack(Thread t) throws Exception { + Class<?> vmStack = Class.forName("dalvik.system.VMStack"); + Method m = vmStack.getDeclaredMethod("getAnnotatedThreadStackTrace", Thread.class); + return (Object[]) m.invoke(null, t); + } + + private static void assertEquals(Object o1, Object o2) { + if (o1 != o2) { + throw new RuntimeException("Expected " + o1 + " == " + o2); + } + } + private static void assertLocks(Object fromTrace, Object... locks) throws Exception { + Object fieldValue = fromTrace.getClass().getDeclaredMethod("getHeldLocks"). + invoke(fromTrace); + assertEquals((Object[]) fieldValue, + (locks == null) ? null : (locks.length == 0 ? null : locks)); + } + private static void assertBlockedOn(Object fromTrace, Object block) throws Exception { + Object fieldValue = fromTrace.getClass().getDeclaredMethod("getBlockedOn"). + invoke(fromTrace); + assertEquals(fieldValue, block); + } + private static void assertEquals(Object[] o1, Object[] o2) { + if (!Arrays.equals(o1, o2)) { + throw new RuntimeException( + "Expected " + Arrays.toString(o1) + " == " + Arrays.toString(o2)); + } + } + private static void assertStackTraceElementStep(Object o) throws Exception { + Object fieldValue = o.getClass().getDeclaredMethod("getStackTraceElement").invoke(o); + if (fieldValue instanceof StackTraceElement) { + StackTraceElement elem = (StackTraceElement) fieldValue; + if (!elem.getMethodName().equals("step")) { + throw new RuntimeException("Expected step method"); + } + return; + } + throw new RuntimeException("Expected StackTraceElement " + fieldValue + " / " + o); + } +} + |