Revert "Revert^2 "ART: Add StackVisitor accepting a lambda""
This reverts commit 3d477f3a3eea757a49ca621cc579f711f22fccdd.
Bug: 115837065
Reason for revert: Breaks jdwp tests.
Change-Id: I09249331798970751a20c6b41675c3efef72adfb
diff --git a/openjdkjvmti/ti_stack.cc b/openjdkjvmti/ti_stack.cc
index 4a3eac8..5de4a81 100644
--- a/openjdkjvmti/ti_stack.cc
+++ b/openjdkjvmti/ti_stack.cc
@@ -673,24 +673,34 @@
return ERR(NONE);
}
+// Walks up the stack counting Java frames. This is not StackVisitor::ComputeNumFrames, as
+// runtime methods and transitions must not be counted.
+struct GetFrameCountVisitor : public art::StackVisitor {
+ explicit GetFrameCountVisitor(art::Thread* thread)
+ : art::StackVisitor(thread, nullptr, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ count(0) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ art::ArtMethod* m = GetMethod();
+ const bool do_count = !(m == nullptr || m->IsRuntimeMethod());
+ if (do_count) {
+ count++;
+ }
+ return true;
+ }
+
+ size_t count;
+};
+
struct GetFrameCountClosure : public art::Closure {
public:
GetFrameCountClosure() : count(0) {}
void Run(art::Thread* self) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
- // This is not StackVisitor::ComputeNumFrames, as runtime methods and transitions must not be
- // counted.
- art::StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(art::Locks::mutator_lock_) {
- art::ArtMethod* m = stack_visitor->GetMethod();
- if (m != nullptr && !m->IsRuntimeMethod()) {
- count++;
- }
- return true;
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+ GetFrameCountVisitor visitor(self);
+ visitor.WalkStack(false);
+
+ count = visitor.count;
}
size_t count;
@@ -733,30 +743,46 @@
return ERR(NONE);
}
+// Walks up the stack 'n' callers, when used with Thread::WalkStack.
+struct GetLocationVisitor : public art::StackVisitor {
+ GetLocationVisitor(art::Thread* thread, size_t n_in)
+ : art::StackVisitor(thread, nullptr, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ n(n_in),
+ count(0),
+ caller(nullptr),
+ caller_dex_pc(0) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ art::ArtMethod* m = GetMethod();
+ const bool do_count = !(m == nullptr || m->IsRuntimeMethod());
+ if (do_count) {
+ DCHECK(caller == nullptr);
+ if (count == n) {
+ caller = m;
+ caller_dex_pc = GetDexPc(false);
+ return false;
+ }
+ count++;
+ }
+ return true;
+ }
+
+ const size_t n;
+ size_t count;
+ art::ArtMethod* caller;
+ uint32_t caller_dex_pc;
+};
+
struct GetLocationClosure : public art::Closure {
public:
explicit GetLocationClosure(size_t n_in) : n(n_in), method(nullptr), dex_pc(0) {}
void Run(art::Thread* self) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
- // Walks up the stack 'n' callers.
- size_t count = 0u;
- art::StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(art::Locks::mutator_lock_) {
- art::ArtMethod* m = stack_visitor->GetMethod();
- if (m != nullptr && !m->IsRuntimeMethod()) {
- DCHECK(method == nullptr);
- if (count == n) {
- method = m;
- dex_pc = stack_visitor->GetDexPc(/*abort_on_failure=*/false);
- return false;
- }
- count++;
- }
- return true;
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+ GetLocationVisitor visitor(self, n);
+ visitor.WalkStack(false);
+
+ method = visitor.caller;
+ dex_pc = visitor.caller_dex_pc;
}
const size_t n;
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 39980f0..3ad7fc9 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -2362,18 +2362,25 @@
}
static int GetStackDepth(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
- size_t depth = 0u;
- StackVisitor::WalkStack(
- [&depth](const StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (!visitor->GetMethod()->IsRuntimeMethod()) {
- ++depth;
- }
- return true;
- },
- thread,
- /* context= */ nullptr,
- StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- return depth;
+ struct CountStackDepthVisitor : public StackVisitor {
+ explicit CountStackDepthVisitor(Thread* thread_in)
+ : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ depth(0) {}
+
+ // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
+ // annotalysis.
+ bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
+ if (!GetMethod()->IsRuntimeMethod()) {
+ ++depth;
+ }
+ return true;
+ }
+ size_t depth;
+ };
+
+ CountStackDepthVisitor visitor(thread);
+ visitor.WalkStack();
+ return visitor.depth;
}
JDWP::JdwpError Dbg::GetThreadFrameCount(JDWP::ObjectId thread_id, size_t* result) {
@@ -2391,10 +2398,47 @@
return JDWP::ERR_NONE;
}
-JDWP::JdwpError Dbg::GetThreadFrames(JDWP::ObjectId thread_id,
- const size_t start_frame,
- const size_t frame_count,
- JDWP::ExpandBuf* buf) {
+JDWP::JdwpError Dbg::GetThreadFrames(JDWP::ObjectId thread_id, size_t start_frame,
+ size_t frame_count, JDWP::ExpandBuf* buf) {
+ class GetFrameVisitor : public StackVisitor {
+ public:
+ GetFrameVisitor(Thread* thread, size_t start_frame_in, size_t frame_count_in,
+ JDWP::ExpandBuf* buf_in)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ depth_(0),
+ start_frame_(start_frame_in),
+ frame_count_(frame_count_in),
+ buf_(buf_in) {
+ expandBufAdd4BE(buf_, frame_count_);
+ }
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (GetMethod()->IsRuntimeMethod()) {
+ return true; // The debugger can't do anything useful with a frame that has no Method*.
+ }
+ if (depth_ >= start_frame_ + frame_count_) {
+ return false;
+ }
+ if (depth_ >= start_frame_) {
+ JDWP::FrameId frame_id(GetFrameId());
+ JDWP::JdwpLocation location;
+ SetJdwpLocation(&location, GetMethod(), GetDexPc());
+ VLOG(jdwp) << StringPrintf(" Frame %3zd: id=%3" PRIu64 " ", depth_, frame_id) << location;
+ expandBufAdd8BE(buf_, frame_id);
+ expandBufAddLocation(buf_, location);
+ }
+ ++depth_;
+ return true;
+ }
+
+ private:
+ size_t depth_;
+ const size_t start_frame_;
+ const size_t frame_count_;
+ JDWP::ExpandBuf* buf_;
+ };
+
ScopedObjectAccessUnchecked soa(Thread::Current());
JDWP::JdwpError error;
Thread* thread = DecodeThread(soa, thread_id, &error);
@@ -2404,32 +2448,8 @@
if (!IsSuspendedForDebugger(soa, thread)) {
return JDWP::ERR_THREAD_NOT_SUSPENDED;
}
-
- size_t depth = 0u;
- StackVisitor::WalkStack(
- [&](StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (visitor->GetMethod()->IsRuntimeMethod()) {
- return true; // The debugger can't do anything useful with a frame that has no Method*.
- }
- if (depth >= start_frame + frame_count) {
- return false;
- }
- if (depth >= start_frame) {
- JDWP::FrameId frame_id(visitor->GetFrameId());
- JDWP::JdwpLocation location;
- SetJdwpLocation(&location, visitor->GetMethod(), visitor->GetDexPc());
- VLOG(jdwp)
- << StringPrintf(" Frame %3zd: id=%3" PRIu64 " ", depth, frame_id) << location;
- expandBufAdd8BE(buf, frame_id);
- expandBufAddLocation(buf, location);
- }
- ++depth;
- return true;
- },
- thread,
- /* context= */ nullptr,
- StackVisitor::StackWalkKind::kIncludeInlinedFrames);
-
+ GetFrameVisitor visitor(thread, start_frame, frame_count, buf);
+ visitor.WalkStack();
return JDWP::ERR_NONE;
}
@@ -2510,6 +2530,28 @@
Runtime::Current()->GetThreadList()->SuspendSelfForDebugger();
}
+struct GetThisVisitor : public StackVisitor {
+ GetThisVisitor(Thread* thread, Context* context, JDWP::FrameId frame_id_in)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ this_object(nullptr),
+ frame_id(frame_id_in) {}
+
+ // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
+ // annotalysis.
+ bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
+ if (frame_id != GetFrameId()) {
+ return true; // continue
+ } else {
+ this_object = GetThisObject();
+ return false;
+ }
+ }
+
+ mirror::Object* this_object;
+ JDWP::FrameId frame_id;
+};
+
JDWP::JdwpError Dbg::GetThisObject(JDWP::ObjectId thread_id, JDWP::FrameId frame_id,
JDWP::ObjectId* result) {
ScopedObjectAccessUnchecked soa(Thread::Current());
@@ -2522,50 +2564,48 @@
return JDWP::ERR_THREAD_NOT_SUSPENDED;
}
std::unique_ptr<Context> context(Context::Create());
- mirror::Object* this_object = nullptr;
- StackVisitor::WalkStack(
- [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (frame_id != stack_visitor->GetFrameId()) {
- return true; // continue
- } else {
- this_object = stack_visitor->GetThisObject();
- return false;
- }
- },
- thread,
- context.get(),
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- *result = gRegistry->Add(this_object);
+ GetThisVisitor visitor(thread, context.get(), frame_id);
+ visitor.WalkStack();
+ *result = gRegistry->Add(visitor.this_object);
return JDWP::ERR_NONE;
}
-template <typename FrameHandler>
-static JDWP::JdwpError FindAndHandleNonNativeFrame(Thread* thread,
- JDWP::FrameId frame_id,
- const FrameHandler& handler)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- JDWP::JdwpError result = JDWP::ERR_INVALID_FRAMEID;
- std::unique_ptr<Context> context(Context::Create());
- StackVisitor::WalkStack(
- [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (stack_visitor->GetFrameId() != frame_id) {
- return true; // Not our frame, carry on.
- }
- ArtMethod* m = stack_visitor->GetMethod();
- if (m->IsNative()) {
- // We can't read/write local value from/into native method.
- result = JDWP::ERR_OPAQUE_FRAME;
- } else {
- // We found our frame.
- result = handler(stack_visitor);
- }
- return false;
- },
- thread,
- context.get(),
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- return result;
-}
+// Walks the stack until we find the frame with the given FrameId.
+class FindFrameVisitor final : public StackVisitor {
+ public:
+ FindFrameVisitor(Thread* thread, Context* context, JDWP::FrameId frame_id)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ frame_id_(frame_id),
+ error_(JDWP::ERR_INVALID_FRAMEID) {}
+
+ // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
+ // annotalysis.
+ bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
+ if (GetFrameId() != frame_id_) {
+ return true; // Not our frame, carry on.
+ }
+ ArtMethod* m = GetMethod();
+ if (m->IsNative()) {
+ // We can't read/write local value from/into native method.
+ error_ = JDWP::ERR_OPAQUE_FRAME;
+ } else {
+ // We found our frame.
+ error_ = JDWP::ERR_NONE;
+ }
+ return false;
+ }
+
+ JDWP::JdwpError GetError() const {
+ return error_;
+ }
+
+ private:
+ const JDWP::FrameId frame_id_;
+ JDWP::JdwpError error_;
+
+ DISALLOW_COPY_AND_ASSIGN(FindFrameVisitor);
+};
JDWP::JdwpError Dbg::GetLocalValues(JDWP::Request* request, JDWP::ExpandBuf* pReply) {
JDWP::ObjectId thread_id = request->ReadThreadId();
@@ -2580,29 +2620,31 @@
if (!IsSuspendedForDebugger(soa, thread)) {
return JDWP::ERR_THREAD_NOT_SUSPENDED;
}
+ // Find the frame with the given frame_id.
+ std::unique_ptr<Context> context(Context::Create());
+ FindFrameVisitor visitor(thread, context.get(), frame_id);
+ visitor.WalkStack();
+ if (visitor.GetError() != JDWP::ERR_NONE) {
+ return visitor.GetError();
+ }
- return FindAndHandleNonNativeFrame(
- thread,
- frame_id,
- [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- // Read the values from visitor's context.
- int32_t slot_count = request->ReadSigned32("slot count");
- expandBufAdd4BE(pReply, slot_count); /* "int values" */
- for (int32_t i = 0; i < slot_count; ++i) {
- uint32_t slot = request->ReadUnsigned32("slot");
- JDWP::JdwpTag reqSigByte = request->ReadTag();
+ // Read the values from visitor's context.
+ int32_t slot_count = request->ReadSigned32("slot count");
+ expandBufAdd4BE(pReply, slot_count); /* "int values" */
+ for (int32_t i = 0; i < slot_count; ++i) {
+ uint32_t slot = request->ReadUnsigned32("slot");
+ JDWP::JdwpTag reqSigByte = request->ReadTag();
- VLOG(jdwp) << " --> slot " << slot << " " << reqSigByte;
+ VLOG(jdwp) << " --> slot " << slot << " " << reqSigByte;
- size_t width = Dbg::GetTagWidth(reqSigByte);
- uint8_t* ptr = expandBufAddSpace(pReply, width + 1);
- error = Dbg::GetLocalValue(*stack_visitor, soa, slot, reqSigByte, ptr, width);
- if (error != JDWP::ERR_NONE) {
- return error;
- }
- }
- return JDWP::ERR_NONE;
- });
+ size_t width = Dbg::GetTagWidth(reqSigByte);
+ uint8_t* ptr = expandBufAddSpace(pReply, width + 1);
+ error = Dbg::GetLocalValue(visitor, soa, slot, reqSigByte, ptr, width);
+ if (error != JDWP::ERR_NONE) {
+ return error;
+ }
+ }
+ return JDWP::ERR_NONE;
}
constexpr JDWP::JdwpError kStackFrameLocalAccessError = JDWP::ERR_ABSENT_INFORMATION;
@@ -2749,27 +2791,29 @@
if (!IsSuspendedForDebugger(soa, thread)) {
return JDWP::ERR_THREAD_NOT_SUSPENDED;
}
+ // Find the frame with the given frame_id.
+ std::unique_ptr<Context> context(Context::Create());
+ FindFrameVisitor visitor(thread, context.get(), frame_id);
+ visitor.WalkStack();
+ if (visitor.GetError() != JDWP::ERR_NONE) {
+ return visitor.GetError();
+ }
- return FindAndHandleNonNativeFrame(
- thread,
- frame_id,
- [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- // Writes the values into visitor's context.
- int32_t slot_count = request->ReadSigned32("slot count");
- for (int32_t i = 0; i < slot_count; ++i) {
- uint32_t slot = request->ReadUnsigned32("slot");
- JDWP::JdwpTag sigByte = request->ReadTag();
- size_t width = Dbg::GetTagWidth(sigByte);
- uint64_t value = request->ReadValue(width);
+ // Writes the values into visitor's context.
+ int32_t slot_count = request->ReadSigned32("slot count");
+ for (int32_t i = 0; i < slot_count; ++i) {
+ uint32_t slot = request->ReadUnsigned32("slot");
+ JDWP::JdwpTag sigByte = request->ReadTag();
+ size_t width = Dbg::GetTagWidth(sigByte);
+ uint64_t value = request->ReadValue(width);
- VLOG(jdwp) << " --> slot " << slot << " " << sigByte << " " << value;
- error = Dbg::SetLocalValue(thread, *stack_visitor, slot, sigByte, value, width);
- if (error != JDWP::ERR_NONE) {
- return error;
- }
- }
- return JDWP::ERR_NONE;
- });
+ VLOG(jdwp) << " --> slot " << slot << " " << sigByte << " " << value;
+ error = Dbg::SetLocalValue(thread, visitor, slot, sigByte, value, width);
+ if (error != JDWP::ERR_NONE) {
+ return error;
+ }
+ }
+ return JDWP::ERR_NONE;
}
template<typename T>
@@ -2941,71 +2985,107 @@
gJdwpState->PostFieldEvent(&location, f, this_object, field_value, true);
}
+/**
+ * Finds the location where this exception will be caught. We search until we reach the top
+ * frame, in which case this exception is considered uncaught.
+ */
+class CatchLocationFinder : public StackVisitor {
+ public:
+ CatchLocationFinder(Thread* self, const Handle<mirror::Throwable>& exception, Context* context)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ exception_(exception),
+ handle_scope_(self),
+ this_at_throw_(handle_scope_.NewHandle<mirror::Object>(nullptr)),
+ catch_method_(nullptr),
+ throw_method_(nullptr),
+ catch_dex_pc_(dex::kDexNoIndex),
+ throw_dex_pc_(dex::kDexNoIndex) {
+ }
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* method = GetMethod();
+ DCHECK(method != nullptr);
+ if (method->IsRuntimeMethod()) {
+ // Ignore callee save method.
+ DCHECK(method->IsCalleeSaveMethod());
+ return true;
+ }
+
+ uint32_t dex_pc = GetDexPc();
+ if (throw_method_ == nullptr) {
+ // First Java method found. It is either the method that threw the exception,
+ // or the Java native method that is reporting an exception thrown by
+ // native code.
+ this_at_throw_.Assign(GetThisObject());
+ throw_method_ = method;
+ throw_dex_pc_ = dex_pc;
+ }
+
+ if (dex_pc != dex::kDexNoIndex) {
+ StackHandleScope<1> hs(GetThread());
+ uint32_t found_dex_pc;
+ Handle<mirror::Class> exception_class(hs.NewHandle(exception_->GetClass()));
+ bool unused_clear_exception;
+ found_dex_pc = method->FindCatchBlock(exception_class, dex_pc, &unused_clear_exception);
+ if (found_dex_pc != dex::kDexNoIndex) {
+ catch_method_ = method;
+ catch_dex_pc_ = found_dex_pc;
+ return false; // End stack walk.
+ }
+ }
+ return true; // Continue stack walk.
+ }
+
+ ArtMethod* GetCatchMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return catch_method_;
+ }
+
+ ArtMethod* GetThrowMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return throw_method_;
+ }
+
+ mirror::Object* GetThisAtThrow() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return this_at_throw_.Get();
+ }
+
+ uint32_t GetCatchDexPc() const {
+ return catch_dex_pc_;
+ }
+
+ uint32_t GetThrowDexPc() const {
+ return throw_dex_pc_;
+ }
+
+ private:
+ const Handle<mirror::Throwable>& exception_;
+ StackHandleScope<1> handle_scope_;
+ MutableHandle<mirror::Object> this_at_throw_;
+ ArtMethod* catch_method_;
+ ArtMethod* throw_method_;
+ uint32_t catch_dex_pc_;
+ uint32_t throw_dex_pc_;
+
+ DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
+};
+
void Dbg::PostException(mirror::Throwable* exception_object) {
if (!IsDebuggerActive()) {
return;
}
Thread* const self = Thread::Current();
- StackHandleScope<2> handle_scope(self);
+ StackHandleScope<1> handle_scope(self);
Handle<mirror::Throwable> h_exception(handle_scope.NewHandle(exception_object));
- MutableHandle<mirror::Object> this_at_throw = handle_scope.NewHandle<mirror::Object>(nullptr);
std::unique_ptr<Context> context(Context::Create());
-
- ArtMethod* catch_method = nullptr;
- ArtMethod* throw_method = nullptr;
- uint32_t catch_dex_pc = dex::kDexNoIndex;
- uint32_t throw_dex_pc = dex::kDexNoIndex;
- StackVisitor::WalkStack(
- /**
- * Finds the location where this exception will be caught. We search until we reach the top
- * frame, in which case this exception is considered uncaught.
- */
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* method = stack_visitor->GetMethod();
- DCHECK(method != nullptr);
- if (method->IsRuntimeMethod()) {
- // Ignore callee save method.
- DCHECK(method->IsCalleeSaveMethod());
- return true;
- }
-
- uint32_t dex_pc = stack_visitor->GetDexPc();
- if (throw_method == nullptr) {
- // First Java method found. It is either the method that threw the exception,
- // or the Java native method that is reporting an exception thrown by
- // native code.
- this_at_throw.Assign(stack_visitor->GetThisObject());
- throw_method = method;
- throw_dex_pc = dex_pc;
- }
-
- if (dex_pc != dex::kDexNoIndex) {
- StackHandleScope<1> hs(stack_visitor->GetThread());
- uint32_t found_dex_pc;
- Handle<mirror::Class> exception_class(hs.NewHandle(h_exception->GetClass()));
- bool unused_clear_exception;
- found_dex_pc = method->FindCatchBlock(exception_class, dex_pc, &unused_clear_exception);
- if (found_dex_pc != dex::kDexNoIndex) {
- catch_method = method;
- catch_dex_pc = found_dex_pc;
- return false; // End stack walk.
- }
- }
- return true; // Continue stack walk.
- },
- self,
- context.get(),
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
-
+ CatchLocationFinder clf(self, h_exception, context.get());
+ clf.WalkStack(/* include_transitions= */ false);
JDWP::EventLocation exception_throw_location;
- SetEventLocation(&exception_throw_location, throw_method, throw_dex_pc);
+ SetEventLocation(&exception_throw_location, clf.GetThrowMethod(), clf.GetThrowDexPc());
JDWP::EventLocation exception_catch_location;
- SetEventLocation(&exception_catch_location, catch_method, catch_dex_pc);
+ SetEventLocation(&exception_catch_location, clf.GetCatchMethod(), clf.GetCatchDexPc());
- gJdwpState->PostException(&exception_throw_location,
- h_exception.Get(),
- &exception_catch_location,
- this_at_throw.Get());
+ gJdwpState->PostException(&exception_throw_location, h_exception.Get(), &exception_catch_location,
+ clf.GetThisAtThrow());
}
void Dbg::PostClassPrepare(mirror::Class* c) {
@@ -3569,6 +3649,56 @@
return instrumentation->IsDeoptimized(m);
}
+class NeedsDeoptimizationVisitor : public StackVisitor {
+ public:
+ explicit NeedsDeoptimizationVisitor(Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ needs_deoptimization_(false) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ // The visitor is meant to be used when handling exception from compiled code only.
+ CHECK(!IsShadowFrame()) << "We only expect to visit compiled frame: "
+ << ArtMethod::PrettyMethod(GetMethod());
+ ArtMethod* method = GetMethod();
+ if (method == nullptr) {
+ // We reach an upcall and don't need to deoptimize this part of the stack (ManagedFragment)
+ // so we can stop the visit.
+ DCHECK(!needs_deoptimization_);
+ return false;
+ }
+ if (Runtime::Current()->GetInstrumentation()->InterpretOnly()) {
+ // We found a compiled frame in the stack but instrumentation is set to interpret
+ // everything: we need to deoptimize.
+ needs_deoptimization_ = true;
+ return false;
+ }
+ if (Runtime::Current()->GetInstrumentation()->IsDeoptimized(method)) {
+ // We found a deoptimized method in the stack.
+ needs_deoptimization_ = true;
+ return false;
+ }
+ ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(GetFrameId());
+ if (frame != nullptr) {
+ // The debugger allocated a ShadowFrame to update a variable in the stack: we need to
+ // deoptimize the stack to execute (and deallocate) this frame.
+ needs_deoptimization_ = true;
+ return false;
+ }
+ return true;
+ }
+
+ bool NeedsDeoptimization() const {
+ return needs_deoptimization_;
+ }
+
+ private:
+ // Do we need to deoptimize the stack?
+ bool needs_deoptimization_;
+
+ DISALLOW_COPY_AND_ASSIGN(NeedsDeoptimizationVisitor);
+};
+
// Do we need to deoptimize the stack to handle an exception?
bool Dbg::IsForcedInterpreterNeededForExceptionImpl(Thread* thread) {
const SingleStepControl* const ssc = thread->GetSingleStepControl();
@@ -3578,45 +3708,9 @@
}
// Deoptimization is required if at least one method in the stack needs it. However we
// skip frames that will be unwound (thus not executed).
- bool needs_deoptimization = false;
- StackVisitor::WalkStack(
- [&](art::StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- // The visitor is meant to be used when handling exception from compiled code only.
- CHECK(!visitor->IsShadowFrame()) << "We only expect to visit compiled frame: "
- << ArtMethod::PrettyMethod(visitor->GetMethod());
- ArtMethod* method = visitor->GetMethod();
- if (method == nullptr) {
- // We reach an upcall and don't need to deoptimize this part of the stack (ManagedFragment)
- // so we can stop the visit.
- DCHECK(!needs_deoptimization);
- return false;
- }
- if (Runtime::Current()->GetInstrumentation()->InterpretOnly()) {
- // We found a compiled frame in the stack but instrumentation is set to interpret
- // everything: we need to deoptimize.
- needs_deoptimization = true;
- return false;
- }
- if (Runtime::Current()->GetInstrumentation()->IsDeoptimized(method)) {
- // We found a deoptimized method in the stack.
- needs_deoptimization = true;
- return false;
- }
- ShadowFrame* frame = visitor->GetThread()->FindDebuggerShadowFrame(visitor->GetFrameId());
- if (frame != nullptr) {
- // The debugger allocated a ShadowFrame to update a variable in the stack: we need to
- // deoptimize the stack to execute (and deallocate) this frame.
- needs_deoptimization = true;
- return false;
- }
- return true;
- },
- thread,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames,
- /* check_suspended */ true,
- /* include_transitions */ true);
- return needs_deoptimization;
+ NeedsDeoptimizationVisitor visitor(thread);
+ visitor.WalkStack(true); // includes upcall.
+ return visitor.NeedsDeoptimization();
}
// Scoped utility class to suspend a thread so that we may do tasks such as walk its stack. Doesn't
diff --git a/runtime/gc/allocation_record.cc b/runtime/gc/allocation_record.cc
index 80e3394..e11fa5c 100644
--- a/runtime/gc/allocation_record.cc
+++ b/runtime/gc/allocation_record.cc
@@ -184,6 +184,34 @@
new_record_condition_.Broadcast(Thread::Current());
}
+class AllocRecordStackVisitor : public StackVisitor {
+ public:
+ AllocRecordStackVisitor(Thread* thread, size_t max_depth, AllocRecordStackTrace* trace_out)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ max_depth_(max_depth),
+ trace_(trace_out) {}
+
+ // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
+ // annotalysis.
+ bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
+ if (trace_->GetDepth() >= max_depth_) {
+ return false;
+ }
+ ArtMethod* m = GetMethod();
+ // m may be null if we have inlined methods of unresolved classes. b/27858645
+ if (m != nullptr && !m->IsRuntimeMethod()) {
+ m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize);
+ trace_->AddStackElement(AllocRecordStackTraceElement(m, GetDexPc()));
+ }
+ return true;
+ }
+
+ private:
+ const size_t max_depth_;
+ AllocRecordStackTrace* const trace_;
+};
+
void AllocRecordObjectMap::SetAllocTrackingEnabled(bool enable) {
Thread* self = Thread::Current();
Heap* heap = Runtime::Current()->GetHeap();
@@ -240,26 +268,11 @@
// Get stack trace outside of lock in case there are allocations during the stack walk.
// b/27858645.
AllocRecordStackTrace trace;
+ AllocRecordStackVisitor visitor(self, max_stack_depth_, /*out*/ &trace);
{
StackHandleScope<1> hs(self);
auto obj_wrapper = hs.NewHandleWrapper(obj);
-
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (trace.GetDepth() >= max_stack_depth_) {
- return false;
- }
- ArtMethod* m = stack_visitor->GetMethod();
- // m may be null if we have inlined methods of unresolved classes. b/27858645
- if (m != nullptr && !m->IsRuntimeMethod()) {
- m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize);
- trace.AddStackElement(AllocRecordStackTraceElement(m, stack_visitor->GetDexPc()));
- }
- return true;
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+ visitor.WalkStack();
}
MutexLock mu(self, *Locks::alloc_tracker_lock_);
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 12f1522..cbcaaef 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -18,8 +18,6 @@
#include <sstream>
-#include <android-base/logging.h>
-
#include "arch/context.h"
#include "art_field-inl.h"
#include "art_method-inl.h"
@@ -1357,66 +1355,65 @@
}
// Try to get the shorty of a runtime method if it's an invocation stub.
-static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
- char shorty = 'V';
- StackVisitor::WalkStack(
- [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- if (m == nullptr || m->IsRuntimeMethod()) {
- return true;
- }
- // The first Java method.
- if (m->IsNative()) {
- // Use JNI method's shorty for the jni stub.
- shorty = m->GetShorty()[0];
- } else if (m->IsProxyMethod()) {
- // Proxy method just invokes its proxied method via
- // art_quick_proxy_invoke_handler.
- shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
+struct RuntimeMethodShortyVisitor : public StackVisitor {
+ explicit RuntimeMethodShortyVisitor(Thread* thread)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ shorty('V') {}
+
+ static uint16_t GetMethodIndexOfInvoke(ArtMethod* caller,
+ const Instruction& inst,
+ uint32_t dex_pc)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ switch (inst.Opcode()) {
+ case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
+ case Instruction::INVOKE_VIRTUAL_QUICK: {
+ uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
+ CHECK_NE(method_idx, DexFile::kDexNoIndex16);
+ return method_idx;
+ }
+ default: {
+ return inst.VRegB();
+ }
+ }
+ }
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ if (m == nullptr || m->IsRuntimeMethod()) {
+ return true;
+ }
+ // The first Java method.
+ if (m->IsNative()) {
+ // Use JNI method's shorty for the jni stub.
+ shorty = m->GetShorty()[0];
+ } else if (m->IsProxyMethod()) {
+ // Proxy method just invokes its proxied method via
+ // art_quick_proxy_invoke_handler.
+ shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
+ } else {
+ const Instruction& instr = m->DexInstructions().InstructionAt(GetDexPc());
+ if (instr.IsInvoke()) {
+ uint16_t method_index = GetMethodIndexOfInvoke(m, instr, GetDexPc());
+ const DexFile* dex_file = m->GetDexFile();
+ if (interpreter::IsStringInit(dex_file, method_index)) {
+ // Invoking string init constructor is turned into invoking
+ // StringFactory.newStringFromChars() which returns a string.
+ shorty = 'L';
} else {
- const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
- if (instr.IsInvoke()) {
- auto get_method_index_fn = [](ArtMethod* caller,
- const Instruction& inst,
- uint32_t dex_pc)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- switch (inst.Opcode()) {
- case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
- case Instruction::INVOKE_VIRTUAL_QUICK: {
- uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
- CHECK_NE(method_idx, DexFile::kDexNoIndex16);
- return method_idx;
- }
- default: {
- return static_cast<uint16_t>(inst.VRegB());
- }
- }
- };
-
- uint16_t method_index = get_method_index_fn(m, instr, stack_visitor->GetDexPc());
- const DexFile* dex_file = m->GetDexFile();
- if (interpreter::IsStringInit(dex_file, method_index)) {
- // Invoking string init constructor is turned into invoking
- // StringFactory.newStringFromChars() which returns a string.
- shorty = 'L';
- } else {
- shorty = dex_file->GetMethodShorty(method_index)[0];
- }
-
- } else {
- // It could be that a non-invoke opcode invokes a stub, which in turn
- // invokes Java code. In such cases, we should never expect a return
- // value from the stub.
- }
+ shorty = dex_file->GetMethodShorty(method_index)[0];
}
- // Stop stack walking since we've seen a Java frame.
- return false;
- },
- thread,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- return shorty;
-}
+ } else {
+ // It could be that a non-invoke opcode invokes a stub, which in turn
+ // invokes Java code. In such cases, we should never expect a return
+ // value from the stub.
+ }
+ }
+ // Stop stack walking since we've seen a Java frame.
+ return false;
+ }
+
+ char shorty;
+};
TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
uintptr_t* return_pc,
@@ -1450,7 +1447,9 @@
// for clinit, we need to pass return results to the caller.
// We need the correct shorty to decide whether we need to pass the return
// result for deoptimization below.
- return_shorty = GetRuntimeMethodShorty(self);
+ RuntimeMethodShortyVisitor visitor(self);
+ visitor.WalkStack();
+ return_shorty = visitor.shorty;
} else {
// Some runtime methods such as allocations, unresolved field getters, etc.
// have return value. We don't need to set return_value since MethodExitEvent()
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 7729838..1701ca8 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -18,8 +18,7 @@
#include <sstream>
-#include <android-base/logging.h>
-#include <android-base/unique_fd.h>
+#include "android-base/unique_fd.h"
#include "arch/context.h"
#include "art_method-inl.h"
@@ -1273,6 +1272,31 @@
}
}
+class MarkCodeVisitor final : public StackVisitor {
+ public:
+ MarkCodeVisitor(Thread* thread_in, JitCodeCache* code_cache_in, CodeCacheBitmap* bitmap)
+ : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kSkipInlinedFrames),
+ code_cache_(code_cache_in),
+ bitmap_(bitmap) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
+ if (method_header == nullptr) {
+ return true;
+ }
+ const void* code = method_header->GetCode();
+ if (code_cache_->ContainsPc(code)) {
+ // Use the atomic set version, as multiple threads are executing this code.
+ bitmap_->AtomicTestAndSet(FromCodeToAllocation(code));
+ }
+ return true;
+ }
+
+ private:
+ JitCodeCache* const code_cache_;
+ CodeCacheBitmap* const bitmap_;
+};
+
class MarkCodeClosure final : public Closure {
public:
MarkCodeClosure(JitCodeCache* code_cache, CodeCacheBitmap* bitmap, Barrier* barrier)
@@ -1281,24 +1305,8 @@
void Run(Thread* thread) override REQUIRES_SHARED(Locks::mutator_lock_) {
ScopedTrace trace(__PRETTY_FUNCTION__);
DCHECK(thread == Thread::Current() || thread->IsSuspended());
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) {
- const OatQuickMethodHeader* method_header =
- stack_visitor->GetCurrentOatQuickMethodHeader();
- if (method_header == nullptr) {
- return true;
- }
- const void* code = method_header->GetCode();
- if (code_cache_->ContainsPc(code)) {
- // Use the atomic set version, as multiple threads are executing this code.
- bitmap_->AtomicTestAndSet(FromCodeToAllocation(code));
- }
- return true;
- },
- thread,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kSkipInlinedFrames);
-
+ MarkCodeVisitor visitor(thread, code_cache_, bitmap_);
+ visitor.WalkStack();
if (kIsDebugBuild) {
// The stack walking code queries the side instrumentation stack if it
// sees an instrumentation exit pc, so the JIT code of methods in that stack
diff --git a/runtime/monitor.cc b/runtime/monitor.cc
index 6479283..df2a8e2 100644
--- a/runtime/monitor.cc
+++ b/runtime/monitor.cc
@@ -277,6 +277,43 @@
obj_ = GcRoot<mirror::Object>(object);
}
+// Note: Adapted from CurrentMethodVisitor in thread.cc. We must not resolve here.
+
+struct NthCallerWithDexPcVisitor final : public StackVisitor {
+ explicit NthCallerWithDexPcVisitor(Thread* thread, size_t frame)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ method_(nullptr),
+ dex_pc_(0),
+ current_frame_number_(0),
+ wanted_frame_number_(frame) {}
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ if (m == nullptr || m->IsRuntimeMethod()) {
+ // Runtime method, upcall, or resolution issue. Skip.
+ return true;
+ }
+
+ // Is this the requested frame?
+ if (current_frame_number_ == wanted_frame_number_) {
+ method_ = m;
+ dex_pc_ = GetDexPc(/* abort_on_failure=*/ false);
+ return false;
+ }
+
+ // Look for more.
+ current_frame_number_++;
+ return true;
+ }
+
+ ArtMethod* method_;
+ uint32_t dex_pc_;
+
+ private:
+ size_t current_frame_number_;
+ const size_t wanted_frame_number_;
+};
+
// This function is inlined and just helps to not have the VLOG and ATRACE check at all the
// potential tracing points.
void Monitor::AtraceMonitorLock(Thread* self, mirror::Object* obj, bool is_wait) {
@@ -289,41 +326,13 @@
// Wait() requires a deeper call stack to be useful. Otherwise you'll see "Waiting at
// Object.java". Assume that we'll wait a nontrivial amount, so it's OK to do a longer
// stack walk than if !is_wait.
- const size_t wanted_frame_number = is_wait ? 1U : 0U;
-
- ArtMethod* method = nullptr;
- uint32_t dex_pc = 0u;
-
- size_t current_frame_number = 0u;
- StackVisitor::WalkStack(
- // Note: Adapted from CurrentMethodVisitor in thread.cc. We must not resolve here.
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- if (m == nullptr || m->IsRuntimeMethod()) {
- // Runtime method, upcall, or resolution issue. Skip.
- return true;
- }
-
- // Is this the requested frame?
- if (current_frame_number == wanted_frame_number) {
- method = m;
- dex_pc = stack_visitor->GetDexPc(false /* abort_on_error*/);
- return false;
- }
-
- // Look for more.
- current_frame_number++;
- return true;
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
-
+ NthCallerWithDexPcVisitor visitor(self, is_wait ? 1U : 0U);
+ visitor.WalkStack(false);
const char* prefix = is_wait ? "Waiting on " : "Locking ";
const char* filename;
int32_t line_number;
- TranslateLocation(method, dex_pc, &filename, &line_number);
+ TranslateLocation(visitor.method_, visitor.dex_pc_, &filename, &line_number);
// It would be nice to have a stable "ID" for the object here. However, the only stable thing
// would be the identity hashcode. But we cannot use IdentityHashcode here: For one, there are
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index d4e3d54..afdfefa 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -154,36 +154,46 @@
DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
};
-static size_t GetInstrumentationFramesToPop(Thread* self, size_t frame_depth)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- CHECK_NE(frame_depth, kInvalidFrameDepth);
- size_t instrumentation_frames_to_pop = 0;
- StackVisitor::WalkStack(
- [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- size_t current_frame_depth = stack_visitor->GetFrameDepth();
- if (current_frame_depth < frame_depth) {
- CHECK(stack_visitor->GetMethod() != nullptr);
- if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) ==
- stack_visitor->GetReturnPc())) {
- if (!stack_visitor->IsInInlinedFrame()) {
- // We do not count inlined frames, because we do not instrument them. The reason we
- // include them in the stack walking is the check against `frame_depth_`, which is
- // given to us by a visitor that visits inlined frames.
- ++instrumentation_frames_to_pop;
- }
- }
- return true;
+// Counts instrumentation stack frame prior to catch handler or upcall.
+class InstrumentationStackVisitor : public StackVisitor {
+ public:
+ InstrumentationStackVisitor(Thread* self, size_t frame_depth)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ frame_depth_(frame_depth),
+ instrumentation_frames_to_pop_(0) {
+ CHECK_NE(frame_depth_, kInvalidFrameDepth);
+ }
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ size_t current_frame_depth = GetFrameDepth();
+ if (current_frame_depth < frame_depth_) {
+ CHECK(GetMethod() != nullptr);
+ if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
+ if (!IsInInlinedFrame()) {
+ // We do not count inlined frames, because we do not instrument them. The reason we
+ // include them in the stack walking is the check against `frame_depth_`, which is
+ // given to us by a visitor that visits inlined frames.
+ ++instrumentation_frames_to_pop_;
}
- // We reached the frame of the catch handler or the upcall.
- return false;
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames,
- /* check_suspended */ true,
- /* include_transitions */ true);
- return instrumentation_frames_to_pop;
-}
+ }
+ return true;
+ } else {
+ // We reached the frame of the catch handler or the upcall.
+ return false;
+ }
+ }
+
+ size_t GetInstrumentationFramesToPop() const {
+ return instrumentation_frames_to_pop_;
+ }
+
+ private:
+ const size_t frame_depth_;
+ size_t instrumentation_frames_to_pop_;
+
+ DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
+};
// Finds the appropriate exception catch after calling all method exit instrumentation functions.
// Note that this might change the exception being thrown.
@@ -219,8 +229,9 @@
// Figure out how many of those frames have instrumentation we need to remove (Should be the
// exact same as number of new_pop_count if there aren't inlined frames).
- size_t instrumentation_frames_to_pop =
- GetInstrumentationFramesToPop(self_, handler_frame_depth_);
+ InstrumentationStackVisitor instrumentation_visitor(self_, handler_frame_depth_);
+ instrumentation_visitor.WalkStack(true);
+ size_t instrumentation_frames_to_pop = instrumentation_visitor.GetInstrumentationFramesToPop();
if (kDebugExceptionDelivery) {
if (*handler_quick_frame_ == nullptr) {
@@ -636,8 +647,10 @@
DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch";
uintptr_t return_pc = 0;
if (method_tracing_active_) {
- size_t instrumentation_frames_to_pop =
- GetInstrumentationFramesToPop(self_, handler_frame_depth_);
+ InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
+ visitor.WalkStack(true);
+
+ size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
return_pc = instrumentation->PopFramesForDeoptimization(self_, instrumentation_frames_to_pop);
}
@@ -658,41 +671,53 @@
UNREACHABLE();
}
+// Prints out methods with their type of frame.
+class DumpFramesWithTypeStackVisitor final : public StackVisitor {
+ public:
+ explicit DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ show_details_(show_details) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* method = GetMethod();
+ if (show_details_) {
+ LOG(INFO) << "|> pc = " << std::hex << GetCurrentQuickFramePc();
+ LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
+ if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
+ LOG(INFO) << "|> ret = " << std::hex << GetReturnPc();
+ }
+ }
+ if (method == nullptr) {
+ // Transition, do go on, we want to unwind over bridges, all the way.
+ if (show_details_) {
+ LOG(INFO) << "N <transition>";
+ }
+ return true;
+ } else if (method->IsRuntimeMethod()) {
+ if (show_details_) {
+ LOG(INFO) << "R " << method->PrettyMethod(true);
+ }
+ return true;
+ } else {
+ bool is_shadow = GetCurrentShadowFrame() != nullptr;
+ LOG(INFO) << (is_shadow ? "S" : "Q")
+ << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
+ << " "
+ << method->PrettyMethod(true);
+ return true; // Go on.
+ }
+ }
+
+ private:
+ bool show_details_;
+
+ DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
+};
+
void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* method = stack_visitor->GetMethod();
- if (details) {
- LOG(INFO) << "|> pc = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
- LOG(INFO) << "|> addr = " << std::hex
- << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
- if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
- LOG(INFO) << "|> ret = " << std::hex << stack_visitor->GetReturnPc();
- }
- }
- if (method == nullptr) {
- // Transition, do go on, we want to unwind over bridges, all the way.
- if (details) {
- LOG(INFO) << "N <transition>";
- }
- return true;
- } else if (method->IsRuntimeMethod()) {
- if (details) {
- LOG(INFO) << "R " << method->PrettyMethod(true);
- }
- return true;
- } else {
- bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
- LOG(INFO) << (is_shadow ? "S" : "Q")
- << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
- << " "
- << method->PrettyMethod(true);
- return true; // Go on.
- }
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+ DumpFramesWithTypeStackVisitor visitor(self, details);
+ visitor.WalkStack(true);
}
} // namespace art
diff --git a/runtime/stack.h b/runtime/stack.h
index 9d30115..02578d2 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -143,36 +143,6 @@
template <CountTransitions kCount = CountTransitions::kYes>
void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
- // Convenience helper function to walk the stack with a lambda as a visitor.
- template <CountTransitions kCountTransitions = CountTransitions::kYes,
- typename T>
- ALWAYS_INLINE static void WalkStack(const T& fn,
- Thread* thread,
- Context* context,
- StackWalkKind walk_kind,
- bool check_suspended = true,
- bool include_transitions = false)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- class LambdaStackVisitor : public StackVisitor {
- public:
- LambdaStackVisitor(const T& fn,
- Thread* thread,
- Context* context,
- StackWalkKind walk_kind,
- bool check_suspended = true)
- : StackVisitor(thread, context, walk_kind, check_suspended), fn_(fn) {}
-
- bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
- return fn_(this);
- }
-
- private:
- T fn_;
- };
- LambdaStackVisitor visitor(fn, thread, context, walk_kind, check_suspended);
- visitor.template WalkStack<kCountTransitions>(include_transitions);
- }
-
Thread* GetThread() const {
return thread_;
}
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 33cd9bb..e9fed76 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -3607,34 +3607,42 @@
return result;
}
-ArtMethod* Thread::GetCurrentMethod(uint32_t* dex_pc_out,
+ArtMethod* Thread::GetCurrentMethod(uint32_t* dex_pc,
bool check_suspended,
bool abort_on_error) const {
// Note: this visitor may return with a method set, but dex_pc_ being DexFile:kDexNoIndex. This is
// so we don't abort in a special situation (thinlocked monitor) when dumping the Java
// stack.
- ArtMethod* method = nullptr;
- uint32_t dex_pc = dex::kDexNoIndex;
- StackVisitor::WalkStack(
- [&](const StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = visitor->GetMethod();
- if (m->IsRuntimeMethod()) {
- // Continue if this is a runtime method.
- return true;
- }
- method = m;
- dex_pc = visitor->GetDexPc(abort_on_error);
- return false;
- },
- const_cast<Thread*>(this),
- /* context= */ nullptr,
- StackVisitor::StackWalkKind::kIncludeInlinedFrames,
- check_suspended);
-
- if (dex_pc_out != nullptr) {
- *dex_pc_out = dex_pc;
+ struct CurrentMethodVisitor final : public StackVisitor {
+ CurrentMethodVisitor(Thread* thread, bool check_suspended, bool abort_on_error)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread,
+ /* context= */nullptr,
+ StackVisitor::StackWalkKind::kIncludeInlinedFrames,
+ check_suspended),
+ method_(nullptr),
+ dex_pc_(0),
+ abort_on_error_(abort_on_error) {}
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ if (m->IsRuntimeMethod()) {
+ // Continue if this is a runtime method.
+ return true;
+ }
+ method_ = m;
+ dex_pc_ = GetDexPc(abort_on_error_);
+ return false;
+ }
+ ArtMethod* method_;
+ uint32_t dex_pc_;
+ const bool abort_on_error_;
+ };
+ CurrentMethodVisitor visitor(const_cast<Thread*>(this), check_suspended, abort_on_error);
+ visitor.WalkStack(false);
+ if (dex_pc != nullptr) {
+ *dex_pc = visitor.dex_pc_;
}
- return method;
+ return visitor.method_;
}
bool Thread::HoldsLock(ObjPtr<mirror::Object> object) const {
diff --git a/runtime/trace.cc b/runtime/trace.cc
index f6c36cf..ad58c2e 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -58,6 +58,32 @@
static constexpr uint8_t kOpNewThread = 2U;
static constexpr uint8_t kOpTraceSummary = 3U;
+class BuildStackTraceVisitor : public StackVisitor {
+ public:
+ explicit BuildStackTraceVisitor(Thread* thread)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ method_trace_(Trace::AllocStackTrace()) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ // Ignore runtime frames (in particular callee save).
+ if (!m->IsRuntimeMethod()) {
+ method_trace_->push_back(m);
+ }
+ return true;
+ }
+
+ // Returns a stack trace where the topmost frame corresponds with the first element of the vector.
+ std::vector<ArtMethod*>* GetStackTrace() const {
+ return method_trace_;
+ }
+
+ private:
+ std::vector<ArtMethod*>* const method_trace_;
+
+ DISALLOW_COPY_AND_ASSIGN(BuildStackTraceVisitor);
+};
+
static const char kTraceTokenChar = '*';
static const uint16_t kTraceHeaderLength = 32;
static const uint32_t kTraceMagicValue = 0x574f4c53;
@@ -202,19 +228,9 @@
}
static void GetSample(Thread* thread, void* arg) REQUIRES_SHARED(Locks::mutator_lock_) {
- std::vector<ArtMethod*>* const stack_trace = Trace::AllocStackTrace();
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- // Ignore runtime frames (in particular callee save).
- if (!m->IsRuntimeMethod()) {
- stack_trace->push_back(m);
- }
- return true;
- },
- thread,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+ BuildStackTraceVisitor build_trace_visitor(thread);
+ build_trace_visitor.WalkStack();
+ std::vector<ArtMethod*>* stack_trace = build_trace_visitor.GetStackTrace();
Trace* the_trace = reinterpret_cast<Trace*>(arg);
the_trace->CompareAndUpdateStackTrace(thread, stack_trace);
}
diff --git a/test/461-get-reference-vreg/get_reference_vreg_jni.cc b/test/461-get-reference-vreg/get_reference_vreg_jni.cc
index 817a647..ddc86df 100644
--- a/test/461-get-reference-vreg/get_reference_vreg_jni.cc
+++ b/test/461-get-reference-vreg/get_reference_vreg_jni.cc
@@ -25,50 +25,62 @@
namespace {
-jint FindMethodIndex(jobject this_value_jobj) {
- ScopedObjectAccess soa(Thread::Current());
- std::unique_ptr<Context> context(Context::Create());
- ObjPtr<mirror::Object> this_value = soa.Decode<mirror::Object>(this_value_jobj);
- jint found_method_index = 0;
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- std::string m_name(m->GetName());
+class TestVisitor : public StackVisitor {
+ public:
+ TestVisitor(Thread* thread, Context* context, mirror::Object* this_value)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ this_value_(this_value),
+ found_method_index_(0) {}
- if (m_name.compare("$noinline$testThisWithInstanceCall") == 0) {
- found_method_index = 1;
- uint32_t value = 0;
- CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &value));
- CHECK_EQ(reinterpret_cast<mirror::Object*>(value), this_value);
- CHECK_EQ(stack_visitor->GetThisObject(), this_value);
- } else if (m_name.compare("$noinline$testThisWithStaticCall") == 0) {
- found_method_index = 2;
- uint32_t value = 0;
- CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &value));
- } else if (m_name.compare("$noinline$testParameter") == 0) {
- found_method_index = 3;
- uint32_t value = 0;
- CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &value));
- } else if (m_name.compare("$noinline$testObjectInScope") == 0) {
- found_method_index = 4;
- uint32_t value = 0;
- CHECK(stack_visitor->GetVReg(m, 0, kReferenceVReg, &value));
- }
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ std::string m_name(m->GetName());
- return true;
- },
- soa.Self(),
- context.get(),
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- return found_method_index;
-}
+ if (m_name.compare("$noinline$testThisWithInstanceCall") == 0) {
+ found_method_index_ = 1;
+ uint32_t value = 0;
+ CHECK(GetVReg(m, 1, kReferenceVReg, &value));
+ CHECK_EQ(reinterpret_cast<mirror::Object*>(value), this_value_);
+ CHECK_EQ(GetThisObject(), this_value_);
+ } else if (m_name.compare("$noinline$testThisWithStaticCall") == 0) {
+ found_method_index_ = 2;
+ uint32_t value = 0;
+ CHECK(GetVReg(m, 1, kReferenceVReg, &value));
+ } else if (m_name.compare("$noinline$testParameter") == 0) {
+ found_method_index_ = 3;
+ uint32_t value = 0;
+ CHECK(GetVReg(m, 1, kReferenceVReg, &value));
+ } else if (m_name.compare("$noinline$testObjectInScope") == 0) {
+ found_method_index_ = 4;
+ uint32_t value = 0;
+ CHECK(GetVReg(m, 0, kReferenceVReg, &value));
+ }
+
+ return true;
+ }
+
+ mirror::Object* this_value_;
+
+ // Value returned to Java to ensure the methods testSimpleVReg and testPairVReg
+ // have been found and tested.
+ jint found_method_index_;
+};
extern "C" JNIEXPORT jint JNICALL Java_Main_doNativeCallRef(JNIEnv*, jobject value) {
- return FindMethodIndex(value);
+ ScopedObjectAccess soa(Thread::Current());
+ std::unique_ptr<Context> context(Context::Create());
+ TestVisitor visitor(soa.Self(), context.get(), soa.Decode<mirror::Object>(value).Ptr());
+ visitor.WalkStack();
+ return visitor.found_method_index_;
}
extern "C" JNIEXPORT jint JNICALL Java_Main_doStaticNativeCallRef(JNIEnv*, jclass) {
- return FindMethodIndex(nullptr);
+ ScopedObjectAccess soa(Thread::Current());
+ std::unique_ptr<Context> context(Context::Create());
+ TestVisitor visitor(soa.Self(), context.get(), nullptr);
+ visitor.WalkStack();
+ return visitor.found_method_index_;
}
} // namespace
diff --git a/test/543-env-long-ref/env_long_ref.cc b/test/543-env-long-ref/env_long_ref.cc
index 1885f8d..165f5bf 100644
--- a/test/543-env-long-ref/env_long_ref.cc
+++ b/test/543-env-long-ref/env_long_ref.cc
@@ -23,28 +23,44 @@
namespace art {
+namespace {
+
+class TestVisitor : public StackVisitor {
+ public:
+ TestVisitor(const ScopedObjectAccess& soa, Context* context, jobject expected_value)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(soa.Self(), context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ expected_value_(expected_value),
+ found_(false),
+ soa_(soa) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ std::string m_name(m->GetName());
+
+ if (m_name == "testCase") {
+ found_ = true;
+ uint32_t value = 0;
+ CHECK(GetVReg(m, 1, kReferenceVReg, &value));
+ CHECK_EQ(reinterpret_cast<mirror::Object*>(value),
+ soa_.Decode<mirror::Object>(expected_value_).Ptr());
+ }
+ return true;
+ }
+
+ jobject expected_value_;
+ bool found_;
+ const ScopedObjectAccess& soa_;
+};
+
+} // namespace
+
extern "C" JNIEXPORT void JNICALL Java_Main_lookForMyRegisters(JNIEnv*, jclass, jobject value) {
ScopedObjectAccess soa(Thread::Current());
std::unique_ptr<Context> context(Context::Create());
- bool found = false;
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- std::string m_name(m->GetName());
-
- if (m_name == "testCase") {
- found = true;
- uint32_t stack_value = 0;
- CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &stack_value));
- CHECK_EQ(reinterpret_cast<mirror::Object*>(stack_value),
- soa.Decode<mirror::Object>(value).Ptr());
- }
- return true;
- },
- soa.Self(),
- context.get(),
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- CHECK(found);
+ TestVisitor visitor(soa, context.get(), value);
+ visitor.WalkStack();
+ CHECK(visitor.found_);
}
} // namespace art
diff --git a/test/570-checker-osr/osr.cc b/test/570-checker-osr/osr.cc
index b2b3634..7b88842 100644
--- a/test/570-checker-osr/osr.cc
+++ b/test/570-checker-osr/osr.cc
@@ -23,33 +23,39 @@
#include "scoped_thread_state_change-inl.h"
#include "stack.h"
#include "stack_map.h"
-#include "thread-current-inl.h"
namespace art {
-namespace {
+class OsrVisitor : public StackVisitor {
+ public:
+ explicit OsrVisitor(Thread* thread, const char* method_name)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ method_name_(method_name),
+ in_osr_method_(false),
+ in_interpreter_(false) {}
-template <typename Handler>
-void ProcessMethodWithName(JNIEnv* env, jstring method_name, const Handler& handler) {
- ScopedUtfChars chars(env, method_name);
- CHECK(chars.c_str() != nullptr);
- ScopedObjectAccess soa(Thread::Current());
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- std::string m_name(stack_visitor->GetMethod()->GetName());
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ std::string m_name(m->GetName());
- if (m_name.compare(chars.c_str()) == 0) {
- handler(stack_visitor);
- return false;
- }
- return true;
- },
- soa.Self(),
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
-}
+ if (m_name.compare(method_name_) == 0) {
+ const OatQuickMethodHeader* header =
+ Runtime::Current()->GetJit()->GetCodeCache()->LookupOsrMethodHeader(m);
+ if (header != nullptr && header == GetCurrentOatQuickMethodHeader()) {
+ in_osr_method_ = true;
+ } else if (IsShadowFrame()) {
+ in_interpreter_ = true;
+ }
+ return false;
+ }
+ return true;
+ }
-} // namespace
+ const char* const method_name_;
+ bool in_osr_method_;
+ bool in_interpreter_;
+};
extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInOsrCode(JNIEnv* env,
jclass,
@@ -59,19 +65,12 @@
// Just return true for non-jit configurations to stop the infinite loop.
return JNI_TRUE;
}
- bool in_osr_code = false;
- ProcessMethodWithName(
- env,
- method_name,
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- const OatQuickMethodHeader* header =
- Runtime::Current()->GetJit()->GetCodeCache()->LookupOsrMethodHeader(m);
- if (header != nullptr && header == stack_visitor->GetCurrentOatQuickMethodHeader()) {
- in_osr_code = true;
- }
- });
- return in_osr_code;
+ ScopedUtfChars chars(env, method_name);
+ CHECK(chars.c_str() != nullptr);
+ ScopedObjectAccess soa(Thread::Current());
+ OsrVisitor visitor(soa.Self(), chars.c_str());
+ visitor.WalkStack();
+ return visitor.in_osr_method_;
}
extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInInterpreter(JNIEnv* env,
@@ -81,56 +80,86 @@
// The return value is irrelevant if we're not using JIT.
return false;
}
- bool in_interpreter = false;
- ProcessMethodWithName(
- env,
- method_name,
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- const OatQuickMethodHeader* header =
- Runtime::Current()->GetJit()->GetCodeCache()->LookupOsrMethodHeader(m);
- if ((header == nullptr || header != stack_visitor->GetCurrentOatQuickMethodHeader()) &&
- stack_visitor->IsShadowFrame()) {
- in_interpreter = true;
- }
- });
- return in_interpreter;
+ ScopedUtfChars chars(env, method_name);
+ CHECK(chars.c_str() != nullptr);
+ ScopedObjectAccess soa(Thread::Current());
+ OsrVisitor visitor(soa.Self(), chars.c_str());
+ visitor.WalkStack();
+ return visitor.in_interpreter_;
}
+class ProfilingInfoVisitor : public StackVisitor {
+ public:
+ explicit ProfilingInfoVisitor(Thread* thread, const char* method_name)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ method_name_(method_name) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ std::string m_name(m->GetName());
+
+ if (m_name.compare(method_name_) == 0) {
+ ProfilingInfo::Create(Thread::Current(), m, /* retry_allocation */ true);
+ return false;
+ }
+ return true;
+ }
+
+ const char* const method_name_;
+};
+
extern "C" JNIEXPORT void JNICALL Java_Main_ensureHasProfilingInfo(JNIEnv* env,
jclass,
jstring method_name) {
if (!Runtime::Current()->UseJitCompilation()) {
return;
}
- ProcessMethodWithName(
- env,
- method_name,
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- ProfilingInfo::Create(Thread::Current(), m, /* retry_allocation */ true);
- });
+ ScopedUtfChars chars(env, method_name);
+ CHECK(chars.c_str() != nullptr);
+ ScopedObjectAccess soa(Thread::Current());
+ ProfilingInfoVisitor visitor(soa.Self(), chars.c_str());
+ visitor.WalkStack();
}
+class OsrCheckVisitor : public StackVisitor {
+ public:
+ OsrCheckVisitor(Thread* thread, const char* method_name)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ method_name_(method_name) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = GetMethod();
+ std::string m_name(m->GetName());
+
+ jit::Jit* jit = Runtime::Current()->GetJit();
+ if (m_name.compare(method_name_) == 0) {
+ while (jit->GetCodeCache()->LookupOsrMethodHeader(m) == nullptr) {
+ // Sleep to yield to the compiler thread.
+ usleep(1000);
+ // Will either ensure it's compiled or do the compilation itself.
+ jit->CompileMethod(m, Thread::Current(), /* osr */ true);
+ }
+ return false;
+ }
+ return true;
+ }
+
+ const char* const method_name_;
+};
+
extern "C" JNIEXPORT void JNICALL Java_Main_ensureHasOsrCode(JNIEnv* env,
jclass,
jstring method_name) {
if (!Runtime::Current()->UseJitCompilation()) {
return;
}
- ProcessMethodWithName(
- env,
- method_name,
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* m = stack_visitor->GetMethod();
- jit::Jit* jit = Runtime::Current()->GetJit();
- while (jit->GetCodeCache()->LookupOsrMethodHeader(m) == nullptr) {
- // Sleep to yield to the compiler thread.
- usleep(1000);
- // Will either ensure it's compiled or do the compilation itself.
- jit->CompileMethod(m, Thread::Current(), /* osr */ true);
- }
- });
+ ScopedUtfChars chars(env, method_name);
+ CHECK(chars.c_str() != nullptr);
+ ScopedObjectAccess soa(Thread::Current());
+ OsrCheckVisitor visitor(soa.Self(), chars.c_str());
+ visitor.WalkStack();
}
} // namespace art
diff --git a/test/common/stack_inspect.cc b/test/common/stack_inspect.cc
index 393e773..581aa74 100644
--- a/test/common/stack_inspect.cc
+++ b/test/common/stack_inspect.cc
@@ -66,30 +66,42 @@
// public static native boolean isInterpretedFunction(String smali);
-static bool IsMethodInterpreted(Thread* self,
- const ArtMethod* goal,
- const bool require_deoptable,
- /* out */ bool* method_is_interpreted)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- *method_is_interpreted = true;
- bool method_found = false;
- bool prev_was_runtime = true;
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (goal == stack_visitor->GetMethod()) {
- *method_is_interpreted =
- (require_deoptable && prev_was_runtime) || stack_visitor->IsShadowFrame();
- method_found = true;
- return false;
- }
- prev_was_runtime = stack_visitor->GetMethod()->IsRuntimeMethod();
- return true;
- },
- self,
- /* context= */ nullptr,
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- return method_found;
-}
+// TODO Remove 'allow_runtime_frames' option once we have deoptimization through runtime frames.
+struct MethodIsInterpretedVisitor : public StackVisitor {
+ public:
+ MethodIsInterpretedVisitor(Thread* thread, ArtMethod* goal, bool require_deoptable)
+ : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+ goal_(goal),
+ method_is_interpreted_(true),
+ method_found_(false),
+ prev_was_runtime_(true),
+ require_deoptable_(require_deoptable) {}
+
+ bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (goal_ == GetMethod()) {
+ method_is_interpreted_ = (require_deoptable_ && prev_was_runtime_) || IsShadowFrame();
+ method_found_ = true;
+ return false;
+ }
+ prev_was_runtime_ = GetMethod()->IsRuntimeMethod();
+ return true;
+ }
+
+ bool IsInterpreted() {
+ return method_is_interpreted_;
+ }
+
+ bool IsFound() {
+ return method_found_;
+ }
+
+ private:
+ const ArtMethod* goal_;
+ bool method_is_interpreted_;
+ bool method_found_;
+ bool prev_was_runtime_;
+ bool require_deoptable_;
+};
// TODO Remove 'require_deoptimizable' option once we have deoptimization through runtime frames.
extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInterpretedFunction(
@@ -107,18 +119,23 @@
env->ThrowNew(env->FindClass("java/lang/Error"), "Unable to interpret method argument!");
return JNI_FALSE;
}
+ bool result;
+ bool found;
{
ScopedObjectAccess soa(env);
ArtMethod* goal = jni::DecodeArtMethod(id);
- bool is_interpreted;
- if (!IsMethodInterpreted(soa.Self(), goal, require_deoptimizable, &is_interpreted)) {
- env->ThrowNew(env->FindClass("java/lang/Error"), "Unable to find given method in stack!");
- return JNI_FALSE;
- }
+ MethodIsInterpretedVisitor v(soa.Self(), goal, require_deoptimizable);
+ v.WalkStack();
bool enters_interpreter = Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(
goal->GetEntryPointFromQuickCompiledCode());
- return (is_interpreted || enters_interpreter);
+ result = (v.IsInterpreted() || enters_interpreter);
+ found = v.IsFound();
}
+ if (!found) {
+ env->ThrowNew(env->FindClass("java/lang/Error"), "Unable to find given method in stack!");
+ return JNI_FALSE;
+ }
+ return result;
}
// public static native void assertIsInterpreted();
@@ -179,24 +196,24 @@
}
}
+struct GetCallingFrameVisitor : public StackVisitor {
+ GetCallingFrameVisitor(Thread* thread, Context* context)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
+
+ bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
+ // Discard stubs and Main.getThisOfCaller.
+ return GetMethod() == nullptr || GetMethod()->IsNative();
+ }
+};
+
extern "C" JNIEXPORT jobject JNICALL Java_Main_getThisOfCaller(
JNIEnv* env, jclass cls ATTRIBUTE_UNUSED) {
ScopedObjectAccess soa(env);
std::unique_ptr<art::Context> context(art::Context::Create());
- jobject result = nullptr;
- StackVisitor::WalkStack(
- [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
- // Discard stubs and Main.getThisOfCaller.
- if (stack_visitor->GetMethod() == nullptr || stack_visitor->GetMethod()->IsNative()) {
- return true;
- }
- result = soa.AddLocalReference<jobject>(stack_visitor->GetThisObject());
- return false;
- },
- soa.Self(),
- context.get(),
- art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
- return result;
+ GetCallingFrameVisitor visitor(soa.Self(), context.get());
+ visitor.WalkStack();
+ return soa.AddLocalReference<jobject>(visitor.GetThisObject());
}
} // namespace art