diff options
-rw-r--r-- | openjdkjvmti/deopt_manager.cc | 2 | ||||
-rw-r--r-- | runtime/entrypoints/quick/quick_trampoline_entrypoints.cc | 20 | ||||
-rw-r--r-- | runtime/instrumentation.cc | 234 | ||||
-rw-r--r-- | runtime/instrumentation.h | 22 | ||||
-rw-r--r-- | runtime/jit/jit_code_cache.cc | 5 | ||||
-rw-r--r-- | runtime/quick_exception_handler.cc | 46 | ||||
-rw-r--r-- | runtime/stack.cc | 78 | ||||
-rw-r--r-- | runtime/stack.h | 1 | ||||
-rw-r--r-- | runtime/thread.cc | 9 | ||||
-rw-r--r-- | runtime/thread.h | 22 | ||||
-rw-r--r-- | test/2011-stack-walk-concurrent-instrument/expected.txt | 2 | ||||
-rw-r--r-- | test/2011-stack-walk-concurrent-instrument/info.txt | 3 | ||||
-rw-r--r-- | test/2011-stack-walk-concurrent-instrument/src/Main.java | 66 | ||||
-rw-r--r-- | test/2011-stack-walk-concurrent-instrument/stack_walk_concurrent.cc | 97 | ||||
-rw-r--r-- | test/Android.bp | 1 |
15 files changed, 365 insertions, 243 deletions
diff --git a/openjdkjvmti/deopt_manager.cc b/openjdkjvmti/deopt_manager.cc index 3b04ed8be8..3e3691a16a 100644 --- a/openjdkjvmti/deopt_manager.cc +++ b/openjdkjvmti/deopt_manager.cc @@ -487,9 +487,11 @@ void DeoptManager::AddDeoptimizationRequester() { void DeoptManager::DeoptimizeThread(art::Thread* target) { // We might or might not be running on the target thread (self) so get Thread::Current // directly. + art::ScopedThreadSuspension sts(art::Thread::Current(), art::kSuspended); art::gc::ScopedGCCriticalSection sgccs(art::Thread::Current(), art::gc::GcCause::kGcCauseDebugger, art::gc::CollectorType::kCollectorTypeDebugger); + art::ScopedSuspendAll ssa("Instrument thread stack"); art::Runtime::Current()->GetInstrumentation()->InstrumentThreadStack(target); } diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 1049c5d89c..1304c0d676 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -358,12 +358,16 @@ class QuickArgumentVisitor { } } - // For the given quick ref and args quick frame, return the caller's PC. - static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { + static uint8_t* GetCallingPcAddr(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK((*sp)->IsCalleeSaveMethod()); uint8_t* return_adress_spill = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset; - return *reinterpret_cast<uintptr_t*>(return_adress_spill); + return return_adress_spill; + } + + // For the given quick ref and args quick frame, return the caller's PC. + static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { + return *reinterpret_cast<uintptr_t*>(GetCallingPcAddr(sp)); } QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, @@ -1156,6 +1160,8 @@ extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method, instrumentation->PushInstrumentationStackFrame(self, is_static ? nullptr : this_object, method, + reinterpret_cast<uintptr_t>( + QuickArgumentVisitor::GetCallingPcAddr(sp)), QuickArgumentVisitor::GetCallingPc(sp), interpreter_entry); @@ -1181,9 +1187,9 @@ extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self, // Compute address of return PC and sanity check that it currently holds 0. constexpr size_t return_pc_offset = RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveEverything); - uintptr_t* return_pc = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) + - return_pc_offset); - CHECK_EQ(*return_pc, 0U); + uintptr_t* return_pc_addr = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) + + return_pc_offset); + CHECK_EQ(*return_pc_addr, 0U); // Pop the frame filling in the return pc. The low half of the return value is 0 when // deoptimization shouldn't be performed with the high-half having the return address. When @@ -1191,7 +1197,7 @@ extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self, // deoptimization entry point. instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); TwoWordReturn return_or_deoptimize_pc = instrumentation->PopInstrumentationStackFrame( - self, return_pc, gpr_result, fpr_result); + self, return_pc_addr, gpr_result, fpr_result); if (self->IsExceptionPending() || self->ObserveAsyncException()) { return GetTwoWordFailureValue(); } diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc index 011d947ea2..60e7c9c80c 100644 --- a/runtime/instrumentation.cc +++ b/runtime/instrumentation.cc @@ -107,23 +107,23 @@ class InstallStubsClassVisitor : public ClassVisitor { InstrumentationStackPopper::InstrumentationStackPopper(Thread* self) : self_(self), instrumentation_(Runtime::Current()->GetInstrumentation()), - frames_to_remove_(0) {} + pop_until_(0u) {} InstrumentationStackPopper::~InstrumentationStackPopper() { - std::deque<instrumentation::InstrumentationStackFrame>* stack = self_->GetInstrumentationStack(); - for (size_t i = 0; i < frames_to_remove_; i++) { - stack->pop_front(); + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack = + self_->GetInstrumentationStack(); + for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until_;) { + i = stack->erase(i); } } -bool InstrumentationStackPopper::PopFramesTo(uint32_t desired_pops, +bool InstrumentationStackPopper::PopFramesTo(uintptr_t stack_pointer, MutableHandle<mirror::Throwable>& exception) { - std::deque<instrumentation::InstrumentationStackFrame>* stack = self_->GetInstrumentationStack(); - DCHECK_LE(frames_to_remove_, desired_pops); - DCHECK_GE(stack->size(), desired_pops); + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack = + self_->GetInstrumentationStack(); DCHECK(!self_->IsExceptionPending()); if (!instrumentation_->HasMethodUnwindListeners()) { - frames_to_remove_ = desired_pops; + pop_until_ = stack_pointer; return true; } if (kVerboseInstrumentation) { @@ -132,8 +132,14 @@ bool InstrumentationStackPopper::PopFramesTo(uint32_t desired_pops, // The instrumentation events expect the exception to be set. self_->SetException(exception.Get()); bool new_exception_thrown = false; - for (; frames_to_remove_ < desired_pops && !new_exception_thrown; frames_to_remove_++) { - InstrumentationStackFrame frame = stack->at(frames_to_remove_); + auto i = stack->upper_bound(pop_until_); + + // Now pop all frames until reaching stack_pointer, or a new exception is + // thrown. Note that `stack_pointer` doesn't need to be a return PC address + // (in fact the exception handling code passes the start of the frame where + // the catch handler is). + for (; i != stack->end() && i->first <= stack_pointer; i++) { + const InstrumentationStackFrame& frame = i->second; ArtMethod* method = frame.method_; // Notify listeners of method unwind. // TODO: improve the dex_pc information here. @@ -144,13 +150,19 @@ bool InstrumentationStackPopper::PopFramesTo(uint32_t desired_pops, if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) { instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc); new_exception_thrown = self_->GetException() != exception.Get(); + if (new_exception_thrown) { + pop_until_ = i->first; + break; + } } } + if (!new_exception_thrown) { + pop_until_ = stack_pointer; + } exception.Assign(self_->GetException()); self_->ClearException(); if (kVerboseInstrumentation && new_exception_thrown) { - LOG(INFO) << "Failed to pop " << (desired_pops - frames_to_remove_) - << " frames due to new exception"; + LOG(INFO) << "Did partial pop of frames due to new exception"; } return !new_exception_thrown; } @@ -284,7 +296,8 @@ void Instrumentation::InstallStubsForMethod(ArtMethod* method) { // Since we may already have done this previously, we need to push new instrumentation frame before // existing instrumentation frames. void InstrumentationInstallStack(Thread* thread, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_) { + REQUIRES(Locks::mutator_lock_) { + Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); struct InstallStackVisitor final : public StackVisitor { InstallStackVisitor(Thread* thread_in, Context* context, @@ -294,7 +307,6 @@ void InstrumentationInstallStack(Thread* thread, void* arg) instrumentation_stack_(thread_in->GetInstrumentationStack()), instrumentation_exit_pc_(instrumentation_exit_pc), reached_existing_instrumentation_frames_(false), - instrumentation_stack_depth_(0), last_return_pc_(0), force_deopt_id_(force_deopt_id) {} @@ -326,11 +338,10 @@ void InstrumentationInstallStack(Thread* thread, void* arg) LOG(INFO) << " Installing exit stub in " << DescribeLocation(); } if (return_pc == instrumentation_exit_pc_) { - CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size()); - + auto it = instrumentation_stack_->find(GetReturnPcAddr()); + CHECK(it != instrumentation_stack_->end()); + const InstrumentationStackFrame& frame = it->second; if (m->IsRuntimeMethod()) { - const InstrumentationStackFrame& frame = - (*instrumentation_stack_)[instrumentation_stack_depth_]; if (frame.interpreter_entry_) { // This instrumentation frame is for an interpreter bridge and is // pushed when executing the instrumented interpreter bridge. So method @@ -339,7 +350,6 @@ void InstrumentationInstallStack(Thread* thread, void* arg) uint32_t dex_pc = dex::kDexNoIndex; dex_pcs_.push_back(dex_pc); last_return_pc_ = frame.return_pc_; - ++instrumentation_stack_depth_; return true; } } @@ -348,8 +358,6 @@ void InstrumentationInstallStack(Thread* thread, void* arg) // We should have already installed instrumentation or be interpreter on previous frames. reached_existing_instrumentation_frames_ = true; - const InstrumentationStackFrame& frame = - (*instrumentation_stack_)[instrumentation_stack_depth_]; CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod()) << "Expected " << ArtMethod::PrettyMethod(m) << ", Found " << ArtMethod::PrettyMethod(frame.method_); @@ -387,16 +395,7 @@ void InstrumentationInstallStack(Thread* thread, void* arg) LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump(); } - // Insert frame at the right position so we do not corrupt the instrumentation stack. - // Instrumentation stack frames are in descending frame id order. - auto it = instrumentation_stack_->begin(); - for (auto end = instrumentation_stack_->end(); it != end; ++it) { - const InstrumentationStackFrame& current = *it; - if (instrumentation_frame.frame_id_ >= current.frame_id_) { - break; - } - } - instrumentation_stack_->insert(it, instrumentation_frame); + instrumentation_stack_->insert({GetReturnPcAddr(), instrumentation_frame}); SetReturnPc(instrumentation_exit_pc_); } uint32_t dex_pc = dex::kDexNoIndex; @@ -405,15 +404,13 @@ void InstrumentationInstallStack(Thread* thread, void* arg) } dex_pcs_.push_back(dex_pc); last_return_pc_ = return_pc; - ++instrumentation_stack_depth_; return true; // Continue. } - std::deque<InstrumentationStackFrame>* const instrumentation_stack_; + std::map<uintptr_t, InstrumentationStackFrame>* const instrumentation_stack_; std::vector<InstrumentationStackFrame> shadow_stack_; std::vector<uint32_t> dex_pcs_; const uintptr_t instrumentation_exit_pc_; bool reached_existing_instrumentation_frames_; - size_t instrumentation_stack_depth_; uintptr_t last_return_pc_; uint64_t force_deopt_id_; }; @@ -434,17 +431,20 @@ void InstrumentationInstallStack(Thread* thread, void* arg) if (instrumentation->ShouldNotifyMethodEnterExitEvents()) { // Create method enter events for all methods currently on the thread's stack. We only do this // if no debugger is attached to prevent from posting events twice. + // TODO: This is the only place we make use of frame_id_. We should create a + // std::vector instead and populate it as we walk the stack. auto ssi = visitor.shadow_stack_.rbegin(); for (auto isi = thread->GetInstrumentationStack()->rbegin(), end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) { - while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) { + while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < isi->second.frame_id_) { instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0); ++ssi; } uint32_t dex_pc = visitor.dex_pcs_.back(); visitor.dex_pcs_.pop_back(); - if (!isi->interpreter_entry_ && !isi->method_->IsRuntimeMethod()) { - instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc); + if (!isi->second.interpreter_entry_ && !isi->second.method_->IsRuntimeMethod()) { + instrumentation->MethodEnterEvent( + thread, isi->second.this_object_, isi->second.method_, dex_pc); } } } @@ -489,36 +489,31 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg) } return true; // Ignore upcalls. } - bool removed_stub = false; - // TODO: make this search more efficient? - const size_t frameId = GetFrameId(); - for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) { - if (instrumentation_frame.frame_id_ == frameId) { - if (kVerboseInstrumentation) { - LOG(INFO) << " Removing exit stub in " << DescribeLocation(); - } - if (instrumentation_frame.interpreter_entry_) { - CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs)); - } else { - CHECK_EQ(m->GetNonObsoleteMethod(), - instrumentation_frame.method_->GetNonObsoleteMethod()) - << ArtMethod::PrettyMethod(m); - } - SetReturnPc(instrumentation_frame.return_pc_); - if (instrumentation_->ShouldNotifyMethodEnterExitEvents() && - !m->IsRuntimeMethod()) { - // Create the method exit events. As the methods didn't really exit the result is 0. - // We only do this if no debugger is attached to prevent from posting events twice. - JValue val; - instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m, - GetDexPc(), OptionalFrame{}, val); - } - frames_removed_++; - removed_stub = true; - break; + auto it = instrumentation_stack_->find(GetReturnPcAddr()); + if (it != instrumentation_stack_->end()) { + const InstrumentationStackFrame& instrumentation_frame = it->second; + if (kVerboseInstrumentation) { + LOG(INFO) << " Removing exit stub in " << DescribeLocation(); } - } - if (!removed_stub) { + if (instrumentation_frame.interpreter_entry_) { + CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs)); + } else { + CHECK_EQ(m->GetNonObsoleteMethod(), + instrumentation_frame.method_->GetNonObsoleteMethod()) + << ArtMethod::PrettyMethod(m) + << " and " << instrumentation_frame.method_->GetNonObsoleteMethod()->PrettyMethod(); + } + SetReturnPc(instrumentation_frame.return_pc_); + if (instrumentation_->ShouldNotifyMethodEnterExitEvents() && + !m->IsRuntimeMethod()) { + // Create the method exit events. As the methods didn't really exit the result is 0. + // We only do this if no debugger is attached to prevent from posting events twice. + JValue val; + instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m, + GetDexPc(), OptionalFrame{}, val); + } + frames_removed_++; + } else { if (kVerboseInstrumentation) { LOG(INFO) << " No exit stub in " << DescribeLocation(); } @@ -528,7 +523,7 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg) Thread* const thread_; const uintptr_t instrumentation_exit_pc_; Instrumentation* const instrumentation_; - std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_; + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_; size_t frames_removed_; }; if (kVerboseInstrumentation) { @@ -536,7 +531,8 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg) thread->GetThreadName(thread_name); LOG(INFO) << "Removing exit stubs in " << thread_name; } - std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack(); + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack = + thread->GetInstrumentationStack(); if (stack->size() > 0) { Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg); uintptr_t instrumentation_exit_pc = @@ -544,9 +540,7 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg) RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation); visitor.WalkStack(true); CHECK_EQ(visitor.frames_removed_, stack->size()); - while (stack->size() > 0) { - stack->pop_front(); - } + stack->clear(); } } @@ -825,13 +819,17 @@ void Instrumentation::UpdateStubs() { bool no_remaining_deopts = true; // Check that there are no other forced deoptimizations. Do it here so we only need to lock // thread_list_lock once. - runtime->GetThreadList()->ForEach([&](Thread* t) { + // The compiler gets confused on the thread annotations, so use + // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock + // exclusively at this point. + Locks::mutator_lock_->AssertExclusiveHeld(self); + runtime->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS { no_remaining_deopts = no_remaining_deopts && !t->IsForceInterpreter() && std::all_of(t->GetInstrumentationStack()->cbegin(), t->GetInstrumentationStack()->cend(), [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) { - return frame.force_deopt_id_ == current_force_deopt_id_; + return frame.second.force_deopt_id_ == current_force_deopt_id_; }); }); if (no_remaining_deopts) { @@ -1385,34 +1383,15 @@ void Instrumentation::ExceptionHandledEvent(Thread* thread, } } -// Computes a frame ID by ignoring inlined frames. -size_t Instrumentation::ComputeFrameId(Thread* self, - size_t frame_depth, - size_t inlined_frames_before_frame) { - CHECK_GE(frame_depth, inlined_frames_before_frame); - size_t no_inline_depth = frame_depth - inlined_frames_before_frame; - return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth; -} - -static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame, - int delta) - REQUIRES_SHARED(Locks::mutator_lock_) { - size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta; - if (frame_id != instrumentation_frame.frame_id_) { - LOG(ERROR) << "Expected frame_id=" << frame_id << " but found " - << instrumentation_frame.frame_id_; - StackVisitor::DescribeStack(self); - CHECK_EQ(frame_id, instrumentation_frame.frame_id_); - } -} - void Instrumentation::PushInstrumentationStackFrame(Thread* self, ObjPtr<mirror::Object> this_object, ArtMethod* method, + uintptr_t stack_ptr, uintptr_t lr, bool interpreter_entry) { DCHECK(!self->IsExceptionPending()); - std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack(); + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack = + self->GetInstrumentationStack(); if (kVerboseInstrumentation) { LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr); @@ -1436,7 +1415,7 @@ void Instrumentation::PushInstrumentationStackFrame(Thread* self, instrumentation::InstrumentationStackFrame instrumentation_frame( h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_); - stack->push_front(instrumentation_frame); + stack->insert({stack_ptr, instrumentation_frame}); } DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) { @@ -1518,20 +1497,24 @@ static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutato } TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, - uintptr_t* return_pc, + uintptr_t* return_pc_addr, uint64_t* gpr_result, uint64_t* fpr_result) { DCHECK(gpr_result != nullptr); DCHECK(fpr_result != nullptr); // Do the pop. - std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack(); + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack = + self->GetInstrumentationStack(); CHECK_GT(stack->size(), 0U); - InstrumentationStackFrame instrumentation_frame = stack->front(); - stack->pop_front(); + auto it = stack->find(reinterpret_cast<uintptr_t>(return_pc_addr)); + CHECK(it != stack->end()); + InstrumentationStackFrame instrumentation_frame = it->second; + stack->erase(it); // Set return PC and check the sanity of the stack. - *return_pc = instrumentation_frame.return_pc_; - CheckStackDepth(self, instrumentation_frame, 0); + // We don't cache the return pc value in a local as it may change after + // sending a method exit event. + *return_pc_addr = instrumentation_frame.return_pc_; self->VerifyStack(); ArtMethod* method = instrumentation_frame.method_; @@ -1585,6 +1568,7 @@ TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uint32_t dex_pc = dex::kDexNoIndex; if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) { ObjPtr<mirror::Object> this_object = instrumentation_frame.this_object_; + // Note that sending the event may change the contents of *return_pc_addr. MethodExitEvent( self, this_object, instrumentation_frame.method_, dex_pc, OptionalFrame{}, return_value); } @@ -1608,7 +1592,7 @@ TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, // Restore the return value if it's a reference since it might have moved. *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get(); } - if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) { + if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) { if (kVerboseInstrumentation) { LOG(INFO) << "Deoptimizing " << visitor.caller->PrettyMethod() @@ -1625,43 +1609,35 @@ TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, /* exception= */ nullptr , /* from_code= */ false, deopt_method_type); - return GetTwoWordSuccessValue(*return_pc, + return GetTwoWordSuccessValue(*return_pc_addr, reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint())); } else { - if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) { + if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) { VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod() - << " at PC " << reinterpret_cast<void*>(*return_pc); + << " at PC " << reinterpret_cast<void*>(*return_pc_addr); } if (kVerboseInstrumentation) { LOG(INFO) << "Returning from " << method->PrettyMethod() - << " to PC " << reinterpret_cast<void*>(*return_pc); + << " to PC " << reinterpret_cast<void*>(*return_pc_addr); } - return GetTwoWordSuccessValue(0, *return_pc); + return GetTwoWordSuccessValue(0, *return_pc_addr); } } -uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, size_t nframes) const { - std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack(); - CHECK_GE(stack->size(), nframes); - if (nframes == 0) { - return 0u; - } - // Only need to send instrumentation events if it's not for deopt (do give the log messages if we - // have verbose-instrumentation anyway though). - if (kVerboseInstrumentation) { - for (size_t i = 0; i < nframes; i++) { - LOG(INFO) << "Popping for deoptimization " << stack->at(i).method_->PrettyMethod(); +uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, uintptr_t pop_until) const { + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack = + self->GetInstrumentationStack(); + // Pop all instrumentation frames below `pop_until`. + uintptr_t return_pc = 0u; + for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until;) { + auto e = i; + ++i; + if (kVerboseInstrumentation) { + LOG(INFO) << "Popping for deoptimization " << e->second.method_->PrettyMethod(); } + return_pc = e->second.return_pc_; + stack->erase(e); } - // Now that we've sent all the instrumentation events we can actually modify the - // instrumentation-stack. We cannot do this earlier since MethodUnwindEvent can re-enter java and - // do other things that require the instrumentation stack to be in a consistent state with the - // actual stack. - for (size_t i = 0; i < nframes - 1; i++) { - stack->pop_front(); - } - uintptr_t return_pc = stack->front().return_pc_; - stack->pop_front(); return return_pc; } diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h index 82e1a13a28..e30fc9a84c 100644 --- a/runtime/instrumentation.h +++ b/runtime/instrumentation.h @@ -169,16 +169,17 @@ class InstrumentationStackPopper { explicit InstrumentationStackPopper(Thread* self); ~InstrumentationStackPopper() REQUIRES_SHARED(Locks::mutator_lock_); - // Increase the number of frames being popped to 'desired_pops' return true if the frames were - // popped without any exceptions, false otherwise. The exception that caused the pop is - // 'exception'. - bool PopFramesTo(uint32_t desired_pops, /*in-out*/MutableHandle<mirror::Throwable>& exception) + // Increase the number of frames being popped up to `stack_pointer`. Return true if the + // frames were popped without any exceptions, false otherwise. The exception that caused + // the pop is 'exception'. + bool PopFramesTo(uintptr_t stack_pointer, /*in-out*/MutableHandle<mirror::Throwable>& exception) REQUIRES_SHARED(Locks::mutator_lock_); private: Thread* self_; Instrumentation* instrumentation_; - uint32_t frames_to_remove_; + // The stack pointer limit for frames to pop. + uintptr_t pop_until_; }; // Instrumentation is a catch-all for when extra information is required from the runtime. The @@ -494,6 +495,7 @@ class Instrumentation { void PushInstrumentationStackFrame(Thread* self, ObjPtr<mirror::Object> this_object, ArtMethod* method, + uintptr_t stack_pointer, uintptr_t lr, bool interpreter_entry) REQUIRES_SHARED(Locks::mutator_lock_); @@ -507,13 +509,15 @@ class Instrumentation { // result values of the function are stored. Both pointers must always be valid but the values // held there will only be meaningful if interpreted as the appropriate type given the function // being returned from. - TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc, - uint64_t* gpr_result, uint64_t* fpr_result) + TwoWordReturn PopInstrumentationStackFrame(Thread* self, + uintptr_t* return_pc_addr, + uint64_t* gpr_result, + uint64_t* fpr_result) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock()); // Pops nframes instrumentation frames from the current thread. Returns the return pc for the last // instrumentation frame that's popped. - uintptr_t PopFramesForDeoptimization(Thread* self, size_t nframes) const + uintptr_t PopFramesForDeoptimization(Thread* self, uintptr_t stack_pointer) const REQUIRES_SHARED(Locks::mutator_lock_); // Call back for configure stubs. @@ -534,7 +538,7 @@ class Instrumentation { // This is used by the debugger to cause a deoptimization of the thread's stack after updating // local variable(s). void InstrumentThreadStack(Thread* thread) - REQUIRES_SHARED(Locks::mutator_lock_); + REQUIRES(Locks::mutator_lock_); // Force all currently running frames to be deoptimized back to interpreter. This should only be // used in cases where basically all compiled code has been invalidated. diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc index 299a3d3ff8..717b2a3072 100644 --- a/runtime/jit/jit_code_cache.cc +++ b/runtime/jit/jit_code_cache.cc @@ -1002,13 +1002,12 @@ class MarkCodeClosure final : public Closure { // The stack walking code queries the side instrumentation stack if it // sees an instrumentation exit pc, so the JIT code of methods in that stack // must have been seen. We sanity check this below. - for (const instrumentation::InstrumentationStackFrame& frame - : *thread->GetInstrumentationStack()) { + for (const auto& it : *thread->GetInstrumentationStack()) { // The 'method_' in InstrumentationStackFrame is the one that has return_pc_ in // its stack frame, it is not the method owning return_pc_. We just pass null to // LookupMethodHeader: the method is only checked against in debug builds. OatQuickMethodHeader* method_header = - code_cache_->LookupMethodHeader(frame.return_pc_, /* method= */ nullptr); + code_cache_->LookupMethodHeader(it.second.return_pc_, /* method= */ nullptr); if (method_header != nullptr) { const void* code = method_header->GetCode(); CHECK(bitmap_->Test(FromCodeToAllocation(code))); diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc index 910b389cf3..bd69aa49fb 100644 --- a/runtime/quick_exception_handler.cc +++ b/runtime/quick_exception_handler.cc @@ -156,37 +156,6 @@ class CatchBlockStackVisitor final : public StackVisitor { DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor); }; -static size_t GetInstrumentationFramesToPop(Thread* self, size_t frame_depth) - REQUIRES_SHARED(Locks::mutator_lock_) { - CHECK_NE(frame_depth, kInvalidFrameDepth); - size_t instrumentation_frames_to_pop = 0; - StackVisitor::WalkStack( - [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) { - size_t current_frame_depth = stack_visitor->GetFrameDepth(); - if (current_frame_depth < frame_depth) { - CHECK(stack_visitor->GetMethod() != nullptr); - if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == - stack_visitor->GetReturnPc())) { - if (!stack_visitor->IsInInlinedFrame()) { - // We do not count inlined frames, because we do not instrument them. The reason we - // include them in the stack walking is the check against `frame_depth_`, which is - // given to us by a visitor that visits inlined frames. - ++instrumentation_frames_to_pop; - } - } - return true; - } - // We reached the frame of the catch handler or the upcall. - return false; - }, - self, - /* context= */ nullptr, - art::StackVisitor::StackWalkKind::kIncludeInlinedFrames, - /* check_suspended */ true, - /* include_transitions */ true); - return instrumentation_frames_to_pop; -} - // Finds the appropriate exception catch after calling all method exit instrumentation functions. // Note that this might change the exception being thrown. void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) { @@ -219,11 +188,6 @@ void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) { DCHECK_GE(new_pop_count, already_popped); already_popped = new_pop_count; - // Figure out how many of those frames have instrumentation we need to remove (Should be the - // exact same as number of new_pop_count if there aren't inlined frames). - size_t instrumentation_frames_to_pop = - GetInstrumentationFramesToPop(self_, handler_frame_depth_); - if (kDebugExceptionDelivery) { if (*handler_quick_frame_ == nullptr) { LOG(INFO) << "Handler is upcall"; @@ -234,8 +198,6 @@ void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) { LOG(INFO) << "Handler: " << handler_method_->PrettyMethod() << " (line: " << line_number << ")"; } - LOG(INFO) << "Will attempt to pop " << instrumentation_frames_to_pop - << " off of the instrumentation stack"; } // Exception was cleared as part of delivery. DCHECK(!self_->IsExceptionPending()); @@ -245,7 +207,8 @@ void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) { handler_method_header_->IsOptimized()) { SetCatchEnvironmentForOptimizedHandler(&visitor); } - popped_to_top = popper.PopFramesTo(instrumentation_frames_to_pop, exception_ref); + popped_to_top = + popper.PopFramesTo(reinterpret_cast<uintptr_t>(handler_quick_frame_), exception_ref); } while (!popped_to_top); if (!clear_exception_) { // Put exception back in root set with clear throw location. @@ -679,10 +642,9 @@ uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() { DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch"; uintptr_t return_pc = 0; if (method_tracing_active_) { - size_t instrumentation_frames_to_pop = - GetInstrumentationFramesToPop(self_, handler_frame_depth_); instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); - return_pc = instrumentation->PopFramesForDeoptimization(self_, instrumentation_frames_to_pop); + return_pc = instrumentation->PopFramesForDeoptimization( + self_, reinterpret_cast<uintptr_t>(handler_quick_frame_)); } return return_pc; } diff --git a/runtime/stack.cc b/runtime/stack.cc index 2c76db5bae..4148dc71bb 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -556,18 +556,18 @@ uintptr_t StackVisitor::GetFPR(uint32_t reg) const { return context_->GetFPR(reg); } +uintptr_t StackVisitor::GetReturnPcAddr() const { + uintptr_t sp = reinterpret_cast<uintptr_t>(GetCurrentQuickFrame()); + DCHECK_NE(sp, 0u); + return sp + GetCurrentQuickFrameInfo().GetReturnPcOffset(); +} + uintptr_t StackVisitor::GetReturnPc() const { - uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame()); - DCHECK(sp != nullptr); - uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset(); - return *reinterpret_cast<uintptr_t*>(pc_addr); + return *reinterpret_cast<uintptr_t*>(GetReturnPcAddr()); } void StackVisitor::SetReturnPc(uintptr_t new_ret_pc) { - uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame()); - CHECK(sp != nullptr); - uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset(); - *reinterpret_cast<uintptr_t*>(pc_addr) = new_ret_pc; + *reinterpret_cast<uintptr_t*>(GetReturnPcAddr()) = new_ret_pc; } size_t StackVisitor::ComputeNumFrames(Thread* thread, StackWalkKind walk_kind) { @@ -851,8 +851,6 @@ void StackVisitor::WalkStack(bool include_transitions) { DCHECK(thread_ == Thread::Current() || thread_->IsSuspended()); } CHECK_EQ(cur_depth_, 0U); - bool exit_stubs_installed = Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled(); - uint32_t instrumentation_stack_depth = 0; size_t inlined_frames_count = 0; for (const ManagedStack* current_fragment = thread_->GetManagedStack(); @@ -943,47 +941,35 @@ void StackVisitor::WalkStack(bool include_transitions) { } // Compute PC for next stack frame from return PC. size_t frame_size = frame_info.FrameSizeInBytes(); - size_t return_pc_offset = frame_size - sizeof(void*); - uint8_t* return_pc_addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + return_pc_offset; + uintptr_t return_pc_addr = GetReturnPcAddr(); uintptr_t return_pc = *reinterpret_cast<uintptr_t*>(return_pc_addr); - if (UNLIKELY(exit_stubs_installed || - reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc)) { + if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc)) { // While profiling, the return pc is restored from the side stack, except when walking // the stack for an exception where the side stack will be unwound in VisitFrame. - if (reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc) { - CHECK_LT(instrumentation_stack_depth, thread_->GetInstrumentationStack()->size()); - const instrumentation::InstrumentationStackFrame& instrumentation_frame = - (*thread_->GetInstrumentationStack())[instrumentation_stack_depth]; - instrumentation_stack_depth++; - if (GetMethod() == - Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves)) { - // Skip runtime save all callee frames which are used to deliver exceptions. - } else if (instrumentation_frame.interpreter_entry_) { - ArtMethod* callee = - Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs); - CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee) - << " Found: " << ArtMethod::PrettyMethod(GetMethod()); - } else { - // Instrumentation generally doesn't distinguish between a method's obsolete and - // non-obsolete version. - CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(), - GetMethod()->GetNonObsoleteMethod()) - << "Expected: " - << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod()) - << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod()); - } - if (num_frames_ != 0) { - // Check agreement of frame Ids only if num_frames_ is computed to avoid infinite - // recursion. - size_t frame_id = instrumentation::Instrumentation::ComputeFrameId( - thread_, - cur_depth_, - inlined_frames_count); - CHECK_EQ(instrumentation_frame.frame_id_, frame_id); - } - return_pc = instrumentation_frame.return_pc_; + const std::map<uintptr_t, instrumentation::InstrumentationStackFrame>& + instrumentation_stack = *thread_->GetInstrumentationStack(); + auto it = instrumentation_stack.find(return_pc_addr); + CHECK(it != instrumentation_stack.end()); + const instrumentation::InstrumentationStackFrame& instrumentation_frame = it->second; + if (GetMethod() == + Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves)) { + // Skip runtime save all callee frames which are used to deliver exceptions. + } else if (instrumentation_frame.interpreter_entry_) { + ArtMethod* callee = + Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs); + CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee) + << " Found: " << ArtMethod::PrettyMethod(GetMethod()); + } else { + // Instrumentation generally doesn't distinguish between a method's obsolete and + // non-obsolete version. + CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(), + GetMethod()->GetNonObsoleteMethod()) + << "Expected: " + << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod()) + << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod()); } + return_pc = instrumentation_frame.return_pc_; } cur_quick_frame_pc_ = return_pc; diff --git a/runtime/stack.h b/runtime/stack.h index ad73e75fb5..af33e6ca7d 100644 --- a/runtime/stack.h +++ b/runtime/stack.h @@ -258,6 +258,7 @@ class StackVisitor { uintptr_t* GetGPRAddress(uint32_t reg) const; uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_); + uintptr_t GetReturnPcAddr() const REQUIRES_SHARED(Locks::mutator_lock_); void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_); diff --git a/runtime/thread.cc b/runtime/thread.cc index a996bccf1c..3add372fd6 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -2298,7 +2298,8 @@ Thread::Thread(bool daemon) is_runtime_thread_(false) { wait_mutex_ = new Mutex("a thread wait mutex", LockLevel::kThreadWaitLock); wait_cond_ = new ConditionVariable("a thread wait condition variable", *wait_mutex_); - tlsPtr_.instrumentation_stack = new std::deque<instrumentation::InstrumentationStackFrame>; + tlsPtr_.instrumentation_stack = + new std::map<uintptr_t, instrumentation::InstrumentationStackFrame>; tlsPtr_.name = new std::string(kThreadNameDuringStartup); static_assert((sizeof(Thread) % 4) == 0U, @@ -3621,7 +3622,7 @@ void Thread::QuickDeliverException() { method_type); artDeoptimize(this); UNREACHABLE(); - } else { + } else if (visitor.caller != nullptr) { LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method " << visitor.caller->PrettyMethod(); } @@ -4065,8 +4066,8 @@ void Thread::VisitRoots(RootVisitor* visitor) { RootCallbackVisitor visitor_to_callback(visitor, thread_id); ReferenceMapVisitor<RootCallbackVisitor, kPrecise> mapper(this, &context, visitor_to_callback); mapper.template WalkStack<StackVisitor::CountTransitions::kNo>(false); - for (instrumentation::InstrumentationStackFrame& frame : *GetInstrumentationStack()) { - visitor->VisitRootIfNonNull(&frame.this_object_, RootInfo(kRootVMInternal, thread_id)); + for (auto& entry : *GetInstrumentationStack()) { + visitor->VisitRootIfNonNull(&entry.second.this_object_, RootInfo(kRootVMInternal, thread_id)); } } diff --git a/runtime/thread.h b/runtime/thread.h index 483191eddd..7129526237 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -1064,7 +1064,19 @@ class Thread { void RemoveDebuggerShadowFrameMapping(size_t frame_id) REQUIRES_SHARED(Locks::mutator_lock_); - std::deque<instrumentation::InstrumentationStackFrame>* GetInstrumentationStack() { + // While getting this map requires shared the mutator lock, manipulating it + // should actually follow these rules: + // (1) The owner of this map (the thread) can change it with its mutator lock. + // (2) Other threads can read this map when the owner is suspended and they + // hold the mutator lock. + // (3) Other threads can change this map when owning the mutator lock exclusively. + // + // The reason why (3) needs the mutator lock exclusively (and not just having + // the owner suspended) is that we don't want other threads to concurrently read the map. + // + // TODO: Add a class abstraction to express these rules. + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* GetInstrumentationStack() + REQUIRES_SHARED(Locks::mutator_lock_) { return tlsPtr_.instrumentation_stack; } @@ -1746,8 +1758,12 @@ class Thread { Context* long_jump_context; // Additional stack used by method instrumentation to store method and return pc values. - // Stored as a pointer since std::deque is not PACKED. - std::deque<instrumentation::InstrumentationStackFrame>* instrumentation_stack; + // Stored as a pointer since std::map is not PACKED. + // !DO NOT CHANGE! to std::unordered_map: the users of this map require an + // ordered iteration on the keys (which are stack addresses). + // Also see Thread::GetInstrumentationStack for the requirements on + // manipulating and reading this map. + std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* instrumentation_stack; // For gc purpose, a shadow frame record stack that keeps track of: // 1) shadow frames under construction. diff --git a/test/2011-stack-walk-concurrent-instrument/expected.txt b/test/2011-stack-walk-concurrent-instrument/expected.txt new file mode 100644 index 0000000000..77a1486479 --- /dev/null +++ b/test/2011-stack-walk-concurrent-instrument/expected.txt @@ -0,0 +1,2 @@ +JNI_OnLoad called +Done diff --git a/test/2011-stack-walk-concurrent-instrument/info.txt b/test/2011-stack-walk-concurrent-instrument/info.txt new file mode 100644 index 0000000000..91f0106279 --- /dev/null +++ b/test/2011-stack-walk-concurrent-instrument/info.txt @@ -0,0 +1,3 @@ +Tests concurrently instrumenting a thread while walking a stack doesn't crash/break. + +Bug: 72608560 diff --git a/test/2011-stack-walk-concurrent-instrument/src/Main.java b/test/2011-stack-walk-concurrent-instrument/src/Main.java new file mode 100644 index 0000000000..8f96f937c9 --- /dev/null +++ b/test/2011-stack-walk-concurrent-instrument/src/Main.java @@ -0,0 +1,66 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.concurrent.*; + +public class Main { + public Main() { + } + + void $noinline$f(Runnable r) throws Exception { + $noinline$g(r); + } + + void $noinline$g(Runnable r) { + $noinline$h(r); + } + + void $noinline$h(Runnable r) { + r.run(); + } + + public native void resetTest(); + public native void waitAndDeopt(Thread t); + public native void doSelfStackWalk(); + + void testConcurrent() throws Exception { + resetTest(); + final Thread current = Thread.currentThread(); + Thread t = new Thread(() -> { + try { + this.waitAndDeopt(current); + } catch (Exception e) { + throw new Error("Fail!", e); + } + }); + t.start(); + $noinline$f(() -> { + try { + this.doSelfStackWalk(); + } catch (Exception e) { + throw new Error("Fail!", e); + } + }); + t.join(); + } + + public static void main(String[] args) throws Exception { + System.loadLibrary(args[0]); + Main st = new Main(); + st.testConcurrent(); + System.out.println("Done"); + } +} diff --git a/test/2011-stack-walk-concurrent-instrument/stack_walk_concurrent.cc b/test/2011-stack-walk-concurrent-instrument/stack_walk_concurrent.cc new file mode 100644 index 0000000000..a185446ca5 --- /dev/null +++ b/test/2011-stack-walk-concurrent-instrument/stack_walk_concurrent.cc @@ -0,0 +1,97 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <atomic> +#include <string_view> + +#include "arch/context.h" +#include "art_method-inl.h" +#include "jni.h" +#include "scoped_thread_state_change.h" +#include "stack.h" +#include "thread.h" + +namespace art { +namespace StackWalkConcurrentInstrument { + +std::atomic<bool> instrument_waiting = false; +std::atomic<bool> instrumented = false; + +// Spin lock. +static void WaitForInstrument() REQUIRES_SHARED(Locks::mutator_lock_) { + ScopedThreadSuspension sts(Thread::Current(), ThreadState::kWaitingForDeoptimization); + instrument_waiting = true; + while (!instrumented) { + } +} + +class SelfStackWalkVisitor : public StackVisitor { + public: + explicit SelfStackWalkVisitor(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) + : StackVisitor(thread, Context::Create(), StackWalkKind::kIncludeInlinedFrames) {} + + bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) { + if (GetMethod()->GetNameView() == "$noinline$f") { + CHECK(!found_f_); + found_f_ = true; + } else if (GetMethod()->GetNameView() == "$noinline$g") { + CHECK(!found_g_); + found_g_ = true; + WaitForInstrument(); + } else if (GetMethod()->GetNameView() == "$noinline$h") { + CHECK(!found_h_); + found_h_ = true; + } + return true; + } + + bool found_f_ = false; + bool found_g_ = false; + bool found_h_ = false; +}; + +extern "C" JNIEXPORT void JNICALL Java_Main_resetTest(JNIEnv*, jobject) { + instrument_waiting = false; + instrumented = false; +} + +extern "C" JNIEXPORT void JNICALL Java_Main_doSelfStackWalk(JNIEnv*, jobject) { + ScopedObjectAccess soa(Thread::Current()); + SelfStackWalkVisitor sswv(Thread::Current()); + sswv.WalkStack(); + CHECK(sswv.found_f_); + CHECK(sswv.found_g_); + CHECK(sswv.found_h_); +} +extern "C" JNIEXPORT void JNICALL Java_Main_waitAndDeopt(JNIEnv*, jobject, jobject target) { + while (!instrument_waiting) { + } + bool timed_out = false; + Thread* other = Runtime::Current()->GetThreadList()->SuspendThreadByPeer( + target, true, SuspendReason::kInternal, &timed_out); + CHECK(!timed_out); + CHECK(other != nullptr); + ScopedSuspendAll ssa(__FUNCTION__); + Runtime::Current()->GetInstrumentation()->InstrumentThreadStack(other); + MutexLock mu(Thread::Current(), *Locks::thread_suspend_count_lock_); + bool updated = other->ModifySuspendCount(Thread::Current(), -1, nullptr, SuspendReason::kInternal); + CHECK(updated); + instrumented = true; + return; +} + +} // namespace StackWalkConcurrentInstrument +} // namespace art diff --git a/test/Android.bp b/test/Android.bp index de3f516d70..71825131bf 100644 --- a/test/Android.bp +++ b/test/Android.bp @@ -600,6 +600,7 @@ cc_defaults { "1947-breakpoint-redefine-deopt/check_deopt.cc", "1972-jni-id-swap-indices/jni_id.cc", "1985-structural-redefine-stack-scope/stack_scope.cc", + "2011-stack-walk-concurrent-instrument/stack_walk_concurrent.cc", "common/runtime_state.cc", "common/stack_inspect.cc", ], |