summaryrefslogtreecommitdiff
path: root/runtime/thread.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/thread.cc')
-rw-r--r--runtime/thread.cc99
1 files changed, 65 insertions, 34 deletions
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 3c7a71aba9..d843de5e7f 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1047,9 +1047,10 @@ void Thread::ShortDump(std::ostream& os) const {
<< "]";
}
-void Thread::Dump(std::ostream& os, bool dump_native_stack, BacktraceMap* backtrace_map) const {
+void Thread::Dump(std::ostream& os, bool dump_native_stack, BacktraceMap* backtrace_map,
+ bool force_dump_stack) const {
DumpState(os);
- DumpStack(os, dump_native_stack, backtrace_map);
+ DumpStack(os, dump_native_stack, backtrace_map, force_dump_stack);
}
mirror::String* Thread::GetThreadName() const {
@@ -1750,7 +1751,8 @@ void Thread::DumpJavaStack(std::ostream& os) const {
void Thread::DumpStack(std::ostream& os,
bool dump_native_stack,
- BacktraceMap* backtrace_map) const {
+ BacktraceMap* backtrace_map,
+ bool force_dump_stack) const {
// TODO: we call this code when dying but may not have suspended the thread ourself. The
// IsSuspended check is therefore racy with the use for dumping (normally we inhibit
// the race with the thread_suspend_count_lock_).
@@ -1761,11 +1763,11 @@ void Thread::DumpStack(std::ostream& os,
// thread's stack in debug builds where we'll hit the not suspended check in the stack walk.
safe_to_dump = (safe_to_dump || dump_for_abort);
}
- if (safe_to_dump) {
+ if (safe_to_dump || force_dump_stack) {
// If we're currently in native code, dump that stack before dumping the managed stack.
- if (dump_native_stack && (dump_for_abort || ShouldShowNativeStack(this))) {
+ if (dump_native_stack && (dump_for_abort || force_dump_stack || ShouldShowNativeStack(this))) {
DumpKernelStack(os, GetTid(), " kernel: ", false);
- ArtMethod* method = GetCurrentMethod(nullptr, !dump_for_abort);
+ ArtMethod* method = GetCurrentMethod(nullptr, !(dump_for_abort || force_dump_stack));
DumpNativeStack(os, GetTid(), backtrace_map, " native: ", method);
}
DumpJavaStack(os);
@@ -2188,12 +2190,18 @@ void Thread::SetClassLoaderOverride(jobject class_loader_override) {
tlsPtr_.class_loader_override = GetJniEnv()->NewGlobalRef(class_loader_override);
}
-class CountStackDepthVisitor : public StackVisitor {
+using ArtMethodDexPcPair = std::pair<ArtMethod*, uint32_t>;
+
+// Counts the stack trace depth and also fetches the first max_saved_frames frames.
+class FetchStackTraceVisitor : public StackVisitor {
public:
- explicit CountStackDepthVisitor(Thread* thread)
+ explicit FetchStackTraceVisitor(Thread* thread,
+ ArtMethodDexPcPair* saved_frames = nullptr,
+ size_t max_saved_frames = 0)
REQUIRES_SHARED(Locks::mutator_lock_)
: StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
- depth_(0), skip_depth_(0), skipping_(true) {}
+ saved_frames_(saved_frames),
+ max_saved_frames_(max_saved_frames) {}
bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) {
// We want to skip frames up to and including the exception's constructor.
@@ -2206,6 +2214,10 @@ class CountStackDepthVisitor : public StackVisitor {
}
if (!skipping_) {
if (!m->IsRuntimeMethod()) { // Ignore runtime frames (in particular callee save).
+ if (depth_ < max_saved_frames_) {
+ saved_frames_[depth_].first = m;
+ saved_frames_[depth_].second = m->IsProxyMethod() ? DexFile::kDexNoIndex : GetDexPc();
+ }
++depth_;
}
} else {
@@ -2214,20 +2226,22 @@ class CountStackDepthVisitor : public StackVisitor {
return true;
}
- int GetDepth() const {
+ uint32_t GetDepth() const {
return depth_;
}
- int GetSkipDepth() const {
+ uint32_t GetSkipDepth() const {
return skip_depth_;
}
private:
- uint32_t depth_;
- uint32_t skip_depth_;
- bool skipping_;
+ uint32_t depth_ = 0;
+ uint32_t skip_depth_ = 0;
+ bool skipping_ = true;
+ ArtMethodDexPcPair* saved_frames_;
+ const size_t max_saved_frames_;
- DISALLOW_COPY_AND_ASSIGN(CountStackDepthVisitor);
+ DISALLOW_COPY_AND_ASSIGN(FetchStackTraceVisitor);
};
template<bool kTransactionActive>
@@ -2237,8 +2251,6 @@ class BuildInternalStackTraceVisitor : public StackVisitor {
: StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
self_(self),
skip_depth_(skip_depth),
- count_(0),
- trace_(nullptr),
pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()) {}
bool Init(int depth) REQUIRES_SHARED(Locks::mutator_lock_) ACQUIRE(Roles::uninterruptible_) {
@@ -2290,17 +2302,21 @@ class BuildInternalStackTraceVisitor : public StackVisitor {
if (m->IsRuntimeMethod()) {
return true; // Ignore runtime frames (in particular callee save).
}
+ AddFrame(m, m->IsProxyMethod() ? DexFile::kDexNoIndex : GetDexPc());
+ return true;
+ }
+
+ void AddFrame(ArtMethod* method, uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::PointerArray> trace_methods_and_pcs = GetTraceMethodsAndPCs();
- trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(count_, m, pointer_size_);
+ trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(count_, method, pointer_size_);
trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(
trace_methods_and_pcs->GetLength() / 2 + count_,
- m->IsProxyMethod() ? DexFile::kDexNoIndex : GetDexPc(),
+ dex_pc,
pointer_size_);
// Save the declaring class of the method to ensure that the declaring classes of the methods
// do not get unloaded while the stack trace is live.
- trace_->Set(count_ + 1, m->GetDeclaringClass());
+ trace_->Set(count_ + 1, method->GetDeclaringClass());
++count_;
- return true;
}
ObjPtr<mirror::PointerArray> GetTraceMethodsAndPCs() const REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -2316,12 +2332,12 @@ class BuildInternalStackTraceVisitor : public StackVisitor {
// How many more frames to skip.
int32_t skip_depth_;
// Current position down stack trace.
- uint32_t count_;
+ uint32_t count_ = 0;
// An object array where the first element is a pointer array that contains the ArtMethod
// pointers on the stack and dex PCs. The rest of the elements are the declaring
// class of the ArtMethod pointers. trace_[i+1] contains the declaring class of the ArtMethod of
// the i'th frame.
- mirror::ObjectArray<mirror::Object>* trace_;
+ mirror::ObjectArray<mirror::Object>* trace_ = nullptr;
// For cross compilation.
const PointerSize pointer_size_;
@@ -2330,11 +2346,15 @@ class BuildInternalStackTraceVisitor : public StackVisitor {
template<bool kTransactionActive>
jobject Thread::CreateInternalStackTrace(const ScopedObjectAccessAlreadyRunnable& soa) const {
- // Compute depth of stack
- CountStackDepthVisitor count_visitor(const_cast<Thread*>(this));
+ // Compute depth of stack, save frames if possible to avoid needing to recompute many.
+ constexpr size_t kMaxSavedFrames = 256;
+ std::unique_ptr<ArtMethodDexPcPair[]> saved_frames(new ArtMethodDexPcPair[kMaxSavedFrames]);
+ FetchStackTraceVisitor count_visitor(const_cast<Thread*>(this),
+ &saved_frames[0],
+ kMaxSavedFrames);
count_visitor.WalkStack();
- int32_t depth = count_visitor.GetDepth();
- int32_t skip_depth = count_visitor.GetSkipDepth();
+ const uint32_t depth = count_visitor.GetDepth();
+ const uint32_t skip_depth = count_visitor.GetSkipDepth();
// Build internal stack trace.
BuildInternalStackTraceVisitor<kTransactionActive> build_trace_visitor(soa.Self(),
@@ -2343,7 +2363,16 @@ jobject Thread::CreateInternalStackTrace(const ScopedObjectAccessAlreadyRunnable
if (!build_trace_visitor.Init(depth)) {
return nullptr; // Allocation failed.
}
- build_trace_visitor.WalkStack();
+ // If we saved all of the frames we don't even need to do the actual stack walk. This is faster
+ // than doing the stack walk twice.
+ if (depth < kMaxSavedFrames) {
+ for (size_t i = 0; i < depth; ++i) {
+ build_trace_visitor.AddFrame(saved_frames[i].first, saved_frames[i].second);
+ }
+ } else {
+ build_trace_visitor.WalkStack();
+ }
+
mirror::ObjectArray<mirror::Object>* trace = build_trace_visitor.GetInternalStackTrace();
if (kIsDebugBuild) {
ObjPtr<mirror::PointerArray> trace_methods = build_trace_visitor.GetTraceMethodsAndPCs();
@@ -2362,9 +2391,10 @@ template jobject Thread::CreateInternalStackTrace<true>(
const ScopedObjectAccessAlreadyRunnable& soa) const;
bool Thread::IsExceptionThrownByCurrentMethod(ObjPtr<mirror::Throwable> exception) const {
- CountStackDepthVisitor count_visitor(const_cast<Thread*>(this));
+ // Only count the depth since we do not pass a stack frame array as an argument.
+ FetchStackTraceVisitor count_visitor(const_cast<Thread*>(this));
count_visitor.WalkStack();
- return count_visitor.GetDepth() == exception->GetStackDepth();
+ return count_visitor.GetDepth() == static_cast<uint32_t>(exception->GetStackDepth());
}
jobjectArray Thread::InternalStackTraceToStackTraceElementArray(
@@ -3038,9 +3068,10 @@ class ReferenceMapVisitor : public StackVisitor {
T vreg_info(m, code_info, encoding, map, visitor_);
// Visit stack entries that hold pointers.
- size_t number_of_bits = code_info.GetNumberOfStackMaskBits(encoding);
+ const size_t number_of_bits = code_info.GetNumberOfStackMaskBits(encoding);
+ BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, map);
for (size_t i = 0; i < number_of_bits; ++i) {
- if (map.GetStackMaskBit(encoding.stack_map_encoding, i)) {
+ if (stack_mask.LoadBit(i)) {
auto* ref_addr = vreg_base + i;
mirror::Object* ref = ref_addr->AsMirrorPtr();
if (ref != nullptr) {
@@ -3048,12 +3079,12 @@ class ReferenceMapVisitor : public StackVisitor {
vreg_info.VisitStack(&new_ref, i, this);
if (ref != new_ref) {
ref_addr->Assign(new_ref);
- }
+ }
}
}
}
// Visit callee-save registers that hold pointers.
- uint32_t register_mask = map.GetRegisterMask(encoding.stack_map_encoding);
+ uint32_t register_mask = code_info.GetRegisterMaskOf(encoding, map);
for (size_t i = 0; i < BitSizeOf<uint32_t>(); ++i) {
if (register_mask & (1 << i)) {
mirror::Object** ref_addr = reinterpret_cast<mirror::Object**>(GetGPRAddress(i));