blob: 8029c033151d9cc5084719dfdef9c0ed498146e4 [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
Alex Light6fc471e2020-03-03 16:51:33 -080018#include <ios>
Mythri Alle9fc35052022-05-13 21:22:28 +000019#include <queue>
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020020
Ian Rogerse63db272014-07-15 15:36:11 -070021#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070023#include "base/enums.h"
Alex Light6fc471e2020-03-03 16:51:33 -080024#include "base/globals.h"
Andreas Gampe170331f2017-12-07 18:41:03 -080025#include "base/logging.h" // For VLOG_IS_ON.
Nicolas Geoffray62e7c092019-01-08 09:43:01 +000026#include "base/systrace.h"
David Sehr9e734c72018-01-04 17:56:19 -080027#include "dex/dex_file_types.h"
28#include "dex/dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020029#include "entrypoints/entrypoint_utils.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070030#include "entrypoints/quick/quick_entrypoints_enum.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070031#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070032#include "handle_scope-inl.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010033#include "interpreter/shadow_frame-inl.h"
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +000034#include "jit/jit.h"
35#include "jit/jit_code_cache.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070036#include "mirror/class-inl.h"
37#include "mirror/class_loader.h"
38#include "mirror/throwable.h"
Nicolas Geoffray013d1ee2019-12-04 16:18:15 +000039#include "nterp_helpers.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010040#include "oat_quick_method_header.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010041#include "stack.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010042#include "stack_map.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010043
44namespace art {
45
Ian Rogers5cf98192014-05-29 21:31:50 -070046static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070047static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070048
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020049QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010050 : self_(self),
51 context_(self->GetLongJumpContext()),
52 is_deoptimization_(is_deoptimization),
53 method_tracing_active_(is_deoptimization ||
54 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
55 handler_quick_frame_(nullptr),
56 handler_quick_frame_pc_(0),
57 handler_method_header_(nullptr),
58 handler_quick_arg0_(0),
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010059 handler_dex_pc_(0),
60 clear_exception_(false),
Mingyao Yangf711f2c2016-05-23 12:29:39 -070061 handler_frame_depth_(kInvalidFrameDepth),
62 full_fragment_done_(false) {}
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010063
Sebastien Hertz520633b2015-09-08 17:03:36 +020064// Finds catch handler.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010065class CatchBlockStackVisitor final : public StackVisitor {
Ian Rogers5cf98192014-05-29 21:31:50 -070066 public:
Alex Light2c8206f2018-06-08 14:51:09 -070067 CatchBlockStackVisitor(Thread* self,
68 Context* context,
69 Handle<mirror::Throwable>* exception,
70 QuickExceptionHandler* exception_handler,
Mythri Alle442371b2022-06-06 08:10:48 +000071 uint32_t skip_frames,
72 bool skip_top_unwind_callback)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070073 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010074 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010075 exception_(exception),
Alex Light2c8206f2018-06-08 14:51:09 -070076 exception_handler_(exception_handler),
Mythri Alle442371b2022-06-06 08:10:48 +000077 skip_frames_(skip_frames),
78 skip_unwind_callback_(skip_top_unwind_callback) {
79 DCHECK_IMPLIES(skip_unwind_callback_, skip_frames_ == 0);
Ian Rogers5cf98192014-05-29 21:31:50 -070080 }
81
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010082 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070083 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070084 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070085 if (method == nullptr) {
Alex Light2c8206f2018-06-08 14:51:09 -070086 DCHECK_EQ(skip_frames_, 0u)
87 << "We tried to skip an upcall! We should have returned to the upcall to finish delivery";
Ian Rogers5cf98192014-05-29 21:31:50 -070088 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
89 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
90 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Ian Rogers5cf98192014-05-29 21:31:50 -070091 return false; // End stack walk.
92 }
Alex Light2c8206f2018-06-08 14:51:09 -070093 if (skip_frames_ != 0) {
94 skip_frames_--;
95 return true;
96 }
Ian Rogers5cf98192014-05-29 21:31:50 -070097 if (method->IsRuntimeMethod()) {
98 // Ignore callee save method.
99 DCHECK(method->IsCalleeSaveMethod());
100 return true;
101 }
Mythri Alle9fc35052022-05-13 21:22:28 +0000102 bool continue_stack_walk = HandleTryItems(method);
103 // Collect methods for which MethodUnwind callback needs to be invoked. MethodUnwind callback
104 // can potentially throw, so we want to call these after we find the catch block.
105 // We stop the stack walk when we find the catch block. If we are ending the stack walk we don't
106 // have to unwind this method so don't record it.
Mythri Alle442371b2022-06-06 08:10:48 +0000107 if (continue_stack_walk && !skip_unwind_callback_) {
108 // Skip unwind callback is only used when method exit callback has thrown an exception. In
109 // that case, we should have runtime method (artMethodExitHook) on top of stack and the
110 // second should be the method for which method exit was called.
111 DCHECK_IMPLIES(skip_unwind_callback_, GetFrameDepth() == 2);
Mythri Alle9fc35052022-05-13 21:22:28 +0000112 unwound_methods_.push(method);
113 }
Mythri Alle442371b2022-06-06 08:10:48 +0000114 skip_unwind_callback_ = false;
Mythri Alle9fc35052022-05-13 21:22:28 +0000115 return continue_stack_walk;
116 }
117
118 std::queue<ArtMethod*>& GetUnwoundMethods() {
119 return unwound_methods_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700120 }
121
122 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700123 bool HandleTryItems(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700124 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampee2abbc62017-09-15 11:59:26 -0700125 uint32_t dex_pc = dex::kDexNoIndex;
Ian Rogers5cf98192014-05-29 21:31:50 -0700126 if (!method->IsNative()) {
127 dex_pc = GetDexPc();
128 }
Andreas Gampee2abbc62017-09-15 11:59:26 -0700129 if (dex_pc != dex::kDexNoIndex) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700130 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +0200131 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -0700132 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700133 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -0700134 exception_handler_->SetClearException(clear_exception);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700135 if (found_dex_pc != dex::kDexNoIndex) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700136 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +0100137 exception_handler_->SetHandlerQuickFramePc(
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100138 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700139 method, found_dex_pc, /* is_for_catch_handler= */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700140 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100141 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -0700142 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700143 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
144 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
145 size_t frame_id = GetFrameId();
146 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
147 if (frame != nullptr) {
148 // We will not execute this shadow frame so we can safely deallocate it.
149 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
150 ShadowFrame::DeleteDeoptimizedFrame(frame);
151 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700152 }
153 }
154 return true; // Continue stack walk.
155 }
156
Ian Rogers5cf98192014-05-29 21:31:50 -0700157 // The exception we're looking for the catch block of.
158 Handle<mirror::Throwable>* exception_;
159 // The quick exception handler we're visiting for.
160 QuickExceptionHandler* const exception_handler_;
Alex Light2c8206f2018-06-08 14:51:09 -0700161 // The number of frames to skip searching for catches in.
162 uint32_t skip_frames_;
Mythri Alle9fc35052022-05-13 21:22:28 +0000163 // The list of methods we would skip to reach the catch block. We record these to call
164 // MethodUnwind callbacks.
165 std::queue<ArtMethod*> unwound_methods_;
Mythri Alle442371b2022-06-06 08:10:48 +0000166 // Specifies if the unwind callback should be ignored for method at the top of the stack.
167 bool skip_unwind_callback_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700168
169 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
170};
171
Alex Light2c8206f2018-06-08 14:51:09 -0700172// Finds the appropriate exception catch after calling all method exit instrumentation functions.
Mythri Alle442371b2022-06-06 08:10:48 +0000173// Note that this might change the exception being thrown. If is_method_exit_exception is true
174// skip the method unwind call for the method on top of the stack as the exception was thrown by
175// method exit callback.
176void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception,
177 bool is_method_exit_exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200178 DCHECK(!is_deoptimization_);
Mythri Alle9fc35052022-05-13 21:22:28 +0000179 instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
Alex Light2c8206f2018-06-08 14:51:09 -0700180 // The number of total frames we have so far popped.
181 uint32_t already_popped = 0;
182 bool popped_to_top = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700183 StackHandleScope<1> hs(self_);
Alex Light2c8206f2018-06-08 14:51:09 -0700184 MutableHandle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
Mythri Alle442371b2022-06-06 08:10:48 +0000185 bool skip_top_unwind_callback = is_method_exit_exception;
Alex Light2c8206f2018-06-08 14:51:09 -0700186 // Sending the instrumentation events (done by the InstrumentationStackPopper) can cause new
187 // exceptions to be thrown which will override the current exception. Therefore we need to perform
188 // the search for a catch in a loop until we have successfully popped all the way to a catch or
189 // the top of the stack.
190 do {
191 if (kDebugExceptionDelivery) {
192 ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
193 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
194 self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
195 << ": " << str_msg << "\n");
Ian Rogers5cf98192014-05-29 21:31:50 -0700196 }
Alex Light2c8206f2018-06-08 14:51:09 -0700197
198 // Walk the stack to find catch handler.
Mythri Alle442371b2022-06-06 08:10:48 +0000199 CatchBlockStackVisitor visitor(self_,
200 context_,
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700201 &exception_ref,
202 this,
Mythri Alle442371b2022-06-06 08:10:48 +0000203 /*skip_frames=*/already_popped,
204 skip_top_unwind_callback);
Alex Light2c8206f2018-06-08 14:51:09 -0700205 visitor.WalkStack(true);
Mythri Alle442371b2022-06-06 08:10:48 +0000206 skip_top_unwind_callback = false;
207
Alex Light2c8206f2018-06-08 14:51:09 -0700208 uint32_t new_pop_count = handler_frame_depth_;
209 DCHECK_GE(new_pop_count, already_popped);
210 already_popped = new_pop_count;
211
Alex Light2c8206f2018-06-08 14:51:09 -0700212 if (kDebugExceptionDelivery) {
213 if (*handler_quick_frame_ == nullptr) {
214 LOG(INFO) << "Handler is upcall";
215 }
Nicolas Geoffray51ad7fe2020-02-04 12:46:47 +0000216 if (GetHandlerMethod() != nullptr) {
217 const DexFile* dex_file = GetHandlerMethod()->GetDexFile();
218 int line_number =
219 annotations::GetLineNumFromPC(dex_file, GetHandlerMethod(), handler_dex_pc_);
220 LOG(INFO) << "Handler: " << GetHandlerMethod()->PrettyMethod() << " (line: "
Alex Light2c8206f2018-06-08 14:51:09 -0700221 << line_number << ")";
222 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100223 }
Alex Light2c8206f2018-06-08 14:51:09 -0700224 // Exception was cleared as part of delivery.
225 DCHECK(!self_->IsExceptionPending());
226 // If the handler is in optimized code, we need to set the catch environment.
227 if (*handler_quick_frame_ != nullptr &&
228 handler_method_header_ != nullptr &&
229 handler_method_header_->IsOptimized()) {
230 SetCatchEnvironmentForOptimizedHandler(&visitor);
231 }
Mythri Alle9fc35052022-05-13 21:22:28 +0000232 popped_to_top = instr->ProcessMethodUnwindCallbacks(self_,
233 visitor.GetUnwoundMethods(),
234 exception_ref);
Alex Light2c8206f2018-06-08 14:51:09 -0700235 } while (!popped_to_top);
Mythri Alle9fc35052022-05-13 21:22:28 +0000236
237 // Pop off frames on instrumentation stack to keep it in sync with what is on the stack.
238 instr->PopInstrumentationStackUntil(self_, reinterpret_cast<uintptr_t>(handler_quick_frame_));
Roland Levillainb77b6982017-06-08 18:03:48 +0100239 if (!clear_exception_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100240 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000241 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100242 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000243}
244
245static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
246 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
247 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
248 // distinguish between core/FPU registers and low/high bits on 64-bit.
249 switch (kind) {
250 case DexRegisterLocation::Kind::kConstant:
251 case DexRegisterLocation::Kind::kInStack:
252 // VRegKind is ignored.
253 return VRegKind::kUndefined;
254
255 case DexRegisterLocation::Kind::kInRegister:
256 // Selects core register. For 64-bit registers, selects low 32 bits.
257 return VRegKind::kLongLoVReg;
258
259 case DexRegisterLocation::Kind::kInRegisterHigh:
260 // Selects core register. For 64-bit registers, selects high 32 bits.
261 return VRegKind::kLongHiVReg;
262
263 case DexRegisterLocation::Kind::kInFpuRegister:
264 // Selects FPU register. For 64-bit registers, selects low 32 bits.
265 return VRegKind::kDoubleLoVReg;
266
267 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
268 // Selects FPU register. For 64-bit registers, selects high 32 bits.
269 return VRegKind::kDoubleHiVReg;
270
271 default:
David Srbecky7dc11782016-02-25 13:23:56 +0000272 LOG(FATAL) << "Unexpected vreg location " << kind;
David Brazdil77a48ae2015-09-15 12:34:04 +0000273 UNREACHABLE();
274 }
275}
276
277void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
278 DCHECK(!is_deoptimization_);
279 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
Nicolas Geoffray51ad7fe2020-02-04 12:46:47 +0000280 DCHECK(GetHandlerMethod() != nullptr && handler_method_header_->IsOptimized());
David Brazdil77a48ae2015-09-15 12:34:04 +0000281
282 if (kDebugExceptionDelivery) {
Andreas Gampe3fec9ac2016-09-13 10:47:28 -0700283 self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
David Brazdil77a48ae2015-09-15 12:34:04 +0000284 }
285
Nicolas Geoffray51ad7fe2020-02-04 12:46:47 +0000286 CodeItemDataAccessor accessor(GetHandlerMethod()->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800287 const size_t number_of_vregs = accessor.RegistersSize();
David Srbecky052f8ca2018-04-26 15:42:54 +0100288 CodeInfo code_info(handler_method_header_);
David Brazdil77a48ae2015-09-15 12:34:04 +0000289
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000290 // Find stack map of the catch block.
David Srbecky052f8ca2018-04-26 15:42:54 +0100291 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc());
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000292 DCHECK(catch_stack_map.IsValid());
David Srbeckyfd89b072018-06-03 12:00:22 +0100293 DexRegisterMap catch_vreg_map = code_info.GetDexRegisterMapOf(catch_stack_map);
Artem Serov2808be82018-12-20 19:15:11 +0000294 DCHECK_EQ(catch_vreg_map.size(), number_of_vregs);
295
David Srbeckyfd89b072018-06-03 12:00:22 +0100296 if (!catch_vreg_map.HasAnyLiveDexRegisters()) {
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000297 return;
298 }
299
David Brazdil77a48ae2015-09-15 12:34:04 +0000300 // Find stack map of the throwing instruction.
301 StackMap throw_stack_map =
David Srbecky052f8ca2018-04-26 15:42:54 +0100302 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset());
David Brazdil77a48ae2015-09-15 12:34:04 +0000303 DCHECK(throw_stack_map.IsValid());
David Srbeckyfd89b072018-06-03 12:00:22 +0100304 DexRegisterMap throw_vreg_map = code_info.GetDexRegisterMapOf(throw_stack_map);
305 DCHECK_EQ(throw_vreg_map.size(), number_of_vregs);
David Brazdil77a48ae2015-09-15 12:34:04 +0000306
307 // Copy values between them.
308 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
David Srbeckye1402122018-06-13 18:20:45 +0100309 DexRegisterLocation::Kind catch_location = catch_vreg_map[vreg].GetKind();
David Brazdil77a48ae2015-09-15 12:34:04 +0000310 if (catch_location == DexRegisterLocation::Kind::kNone) {
311 continue;
312 }
313 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
314
315 // Get vreg value from its current location.
316 uint32_t vreg_value;
David Srbeckye1402122018-06-13 18:20:45 +0100317 VRegKind vreg_kind = ToVRegKind(throw_vreg_map[vreg].GetKind());
David Srbeckycffa2542019-07-01 15:31:41 +0100318 bool get_vreg_success =
319 stack_visitor->GetVReg(stack_visitor->GetMethod(),
320 vreg,
321 vreg_kind,
322 &vreg_value,
323 throw_vreg_map[vreg]);
David Brazdil77a48ae2015-09-15 12:34:04 +0000324 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
David Sehr709b0702016-10-13 09:12:37 -0700325 << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
326 << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
David Brazdil77a48ae2015-09-15 12:34:04 +0000327 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
328
329 // Copy value to the catch phi's stack slot.
David Srbeckye1402122018-06-13 18:20:45 +0100330 int32_t slot_offset = catch_vreg_map[vreg].GetStackOffsetInBytes();
David Brazdil77a48ae2015-09-15 12:34:04 +0000331 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
332 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
333 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
334 *slot_ptr = vreg_value;
335 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200336}
337
Ian Rogers5cf98192014-05-29 21:31:50 -0700338// Prepares deoptimization.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100339class DeoptimizeStackVisitor final : public StackVisitor {
Ian Rogers5cf98192014-05-29 21:31:50 -0700340 public:
Andreas Gampe639bdd12015-06-03 11:22:45 -0700341 DeoptimizeStackVisitor(Thread* self,
342 Context* context,
343 QuickExceptionHandler* exception_handler,
344 bool single_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700345 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100346 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100347 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700348 prev_shadow_frame_(nullptr),
Andreas Gampe639bdd12015-06-03 11:22:45 -0700349 stacked_shadow_frame_pushed_(false),
350 single_frame_deopt_(single_frame),
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100351 single_frame_done_(false),
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000352 single_frame_deopt_method_(nullptr),
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700353 single_frame_deopt_quick_method_header_(nullptr),
354 callee_method_(nullptr) {
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100355 }
356
357 ArtMethod* GetSingleFrameDeoptMethod() const {
358 return single_frame_deopt_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700359 }
360
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000361 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
362 return single_frame_deopt_quick_method_header_;
363 }
364
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700365 void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700366 // This is the upcall, or the next full frame in single-frame deopt, or the
367 // code isn't deoptimizeable. We remember the frame and last pc so that we
368 // may long jump to them.
369 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
370 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
371 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
372 if (!stacked_shadow_frame_pushed_) {
373 // In case there is no deoptimized shadow frame for this upcall, we still
374 // need to push a nullptr to the stack since there is always a matching pop after
375 // the long jump.
376 GetThread()->PushStackedShadowFrame(nullptr,
377 StackedShadowFrameType::kDeoptimizationShadowFrame);
378 stacked_shadow_frame_pushed_ = true;
379 }
380 if (GetMethod() == nullptr) {
381 exception_handler_->SetFullFragmentDone(true);
382 } else {
David Sehr709b0702016-10-13 09:12:37 -0700383 CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700384 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
385 }
386 }
387
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100388 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700389 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700390 ArtMethod* method = GetMethod();
Alex Light0aa7a5a2018-10-10 15:58:14 +0000391 VLOG(deopt) << "Deoptimizing stack: depth: " << GetFrameDepth()
392 << " at method " << ArtMethod::PrettyMethod(method);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700393 if (method == nullptr || single_frame_done_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700394 FinishStackWalk();
Ian Rogers5cf98192014-05-29 21:31:50 -0700395 return false; // End stack walk.
396 } else if (method->IsRuntimeMethod()) {
397 // Ignore callee save method.
398 DCHECK(method->IsCalleeSaveMethod());
399 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200400 } else if (method->IsNative()) {
401 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
Mythri Alle5eb7ad22022-07-05 12:44:52 +0000402 // the native method. The top method is a runtime method, the native method comes next.
403 // We also deoptimize due to method instrumentation reasons from method entry / exit
404 // callbacks. In these cases native method is at the top of stack.
405 CHECK((GetFrameDepth() == 1U) || (GetFrameDepth() == 0U));
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700406 callee_method_ = method;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200407 return true;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700408 } else if (!single_frame_deopt_ &&
Mythri Alled9e83772022-07-14 09:38:49 +0000409 !Runtime::Current()->IsAsyncDeoptimizeable(GetOuterMethod(),
410 GetCurrentQuickFramePc())) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700411 // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
412 // from compiled code is always allowed since HDeoptimize always saves the full environment.
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000413 LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
414 << method->PrettyMethod();
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700415 FinishStackWalk();
416 return false; // End stack walk.
Ian Rogers5cf98192014-05-29 21:31:50 -0700417 } else {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100418 // Check if a shadow frame already exists for debugger's set-local-value purpose.
419 const size_t frame_id = GetFrameId();
420 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
421 const bool* updated_vregs;
David Sehr0225f8e2018-01-31 08:52:24 +0000422 CodeItemDataAccessor accessor(method->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800423 const size_t num_regs = accessor.RegistersSize();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100424 if (new_frame == nullptr) {
425 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
426 updated_vregs = nullptr;
427 } else {
428 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
429 DCHECK(updated_vregs != nullptr);
430 }
Nicolas Geoffray00391822019-12-10 10:17:23 +0000431 if (GetCurrentOatQuickMethodHeader()->IsNterpMethodHeader()) {
432 HandleNterpDeoptimization(method, new_frame, updated_vregs);
433 } else {
434 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
435 }
Nicolas Geoffray33856502015-10-20 15:52:58 +0100436 if (updated_vregs != nullptr) {
437 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
438 // array so this must come after we processed the frame.
439 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
440 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
441 }
442 if (prev_shadow_frame_ != nullptr) {
443 prev_shadow_frame_->SetLink(new_frame);
444 } else {
445 // Will be popped after the long jump after DeoptimizeStack(),
446 // right before interpreter::EnterInterpreterFromDeoptimize().
447 stacked_shadow_frame_pushed_ = true;
448 GetThread()->PushStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700449 new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100450 }
451 prev_shadow_frame_ = new_frame;
452
Andreas Gampe639bdd12015-06-03 11:22:45 -0700453 if (single_frame_deopt_ && !IsInInlinedFrame()) {
454 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700455 single_frame_done_ = true;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100456 single_frame_deopt_method_ = method;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000457 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700458 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700459 callee_method_ = method;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700460 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700461 }
462 }
463
464 private:
Nicolas Geoffray00391822019-12-10 10:17:23 +0000465 void HandleNterpDeoptimization(ArtMethod* m,
466 ShadowFrame* new_frame,
467 const bool* updated_vregs)
468 REQUIRES_SHARED(Locks::mutator_lock_) {
469 ArtMethod** cur_quick_frame = GetCurrentQuickFrame();
470 StackReference<mirror::Object>* vreg_ref_base =
471 reinterpret_cast<StackReference<mirror::Object>*>(NterpGetReferenceArray(cur_quick_frame));
472 int32_t* vreg_int_base =
473 reinterpret_cast<int32_t*>(NterpGetRegistersArray(cur_quick_frame));
474 CodeItemDataAccessor accessor(m->DexInstructionData());
475 const uint16_t num_regs = accessor.RegistersSize();
476 // An nterp frame has two arrays: a dex register array and a reference array
477 // that shadows the dex register array but only containing references
478 // (non-reference dex registers have nulls). See nterp_helpers.cc.
479 for (size_t reg = 0; reg < num_regs; ++reg) {
480 if (updated_vregs != nullptr && updated_vregs[reg]) {
481 // Keep the value set by debugger.
482 continue;
483 }
484 StackReference<mirror::Object>* ref_addr = vreg_ref_base + reg;
485 mirror::Object* ref = ref_addr->AsMirrorPtr();
486 if (ref != nullptr) {
487 new_frame->SetVRegReference(reg, ref);
488 } else {
489 new_frame->SetVReg(reg, vreg_int_base[reg]);
490 }
491 }
492 }
493
Nicolas Geoffray33856502015-10-20 15:52:58 +0100494 void HandleOptimizingDeoptimization(ArtMethod* m,
495 ShadowFrame* new_frame,
496 const bool* updated_vregs)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700497 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100498 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
David Srbecky052f8ca2018-04-26 15:42:54 +0100499 CodeInfo code_info(method_header);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100500 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
David Srbecky052f8ca2018-04-26 15:42:54 +0100501 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
David Sehr0225f8e2018-01-31 08:52:24 +0000502 CodeItemDataAccessor accessor(m->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800503 const size_t number_of_vregs = accessor.RegistersSize();
David Srbecky052f8ca2018-04-26 15:42:54 +0100504 uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
505 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
David Brazdilefc3f022015-10-28 12:19:06 -0500506 DexRegisterMap vreg_map = IsInInlinedFrame()
David Srbecky93bd3612018-07-02 19:30:18 +0100507 ? code_info.GetInlineDexRegisterMapOf(stack_map, GetCurrentInlinedFrame())
David Srbeckyfd89b072018-06-03 12:00:22 +0100508 : code_info.GetDexRegisterMapOf(stack_map);
Artem Serov2808be82018-12-20 19:15:11 +0000509
Alex Light6fc471e2020-03-03 16:51:33 -0800510 if (kIsDebugBuild || UNLIKELY(Runtime::Current()->IsJavaDebuggable())) {
511 CHECK_EQ(vreg_map.size(), number_of_vregs) << *Thread::Current()
512 << "Deopting: " << m->PrettyMethod()
513 << " inlined? "
514 << std::boolalpha << IsInInlinedFrame();
515 }
David Srbeckyfd89b072018-06-03 12:00:22 +0100516 if (vreg_map.empty()) {
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000517 return;
518 }
519
Nicolas Geoffray33856502015-10-20 15:52:58 +0100520 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
521 if (updated_vregs != nullptr && updated_vregs[vreg]) {
522 // Keep the value set by debugger.
523 continue;
524 }
525
David Srbeckye1402122018-06-13 18:20:45 +0100526 DexRegisterLocation::Kind location = vreg_map[vreg].GetKind();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100527 static constexpr uint32_t kDeadValue = 0xEBADDE09;
528 uint32_t value = kDeadValue;
529 bool is_reference = false;
530
531 switch (location) {
532 case DexRegisterLocation::Kind::kInStack: {
David Srbeckye1402122018-06-13 18:20:45 +0100533 const int32_t offset = vreg_map[vreg].GetStackOffsetInBytes();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100534 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
535 value = *reinterpret_cast<const uint32_t*>(addr);
536 uint32_t bit = (offset >> 2);
David Srbecky4b59d102018-05-29 21:46:10 +0000537 if (bit < stack_mask.size_in_bits() && stack_mask.LoadBit(bit)) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100538 is_reference = true;
539 }
540 break;
541 }
542 case DexRegisterLocation::Kind::kInRegister:
543 case DexRegisterLocation::Kind::kInRegisterHigh:
544 case DexRegisterLocation::Kind::kInFpuRegister:
545 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
David Srbeckye1402122018-06-13 18:20:45 +0100546 uint32_t reg = vreg_map[vreg].GetMachineRegister();
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100547 bool result = GetRegisterIfAccessible(reg, location, &value);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100548 CHECK(result);
549 if (location == DexRegisterLocation::Kind::kInRegister) {
550 if (((1u << reg) & register_mask) != 0) {
551 is_reference = true;
552 }
553 }
554 break;
555 }
556 case DexRegisterLocation::Kind::kConstant: {
David Srbeckye1402122018-06-13 18:20:45 +0100557 value = vreg_map[vreg].GetConstant();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100558 if (value == 0) {
559 // Make it a reference for extra safety.
560 is_reference = true;
561 }
562 break;
563 }
564 case DexRegisterLocation::Kind::kNone: {
565 break;
566 }
567 default: {
David Srbeckye1402122018-06-13 18:20:45 +0100568 LOG(FATAL) << "Unexpected location kind " << vreg_map[vreg].GetKind();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100569 UNREACHABLE();
570 }
571 }
572 if (is_reference) {
573 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
574 } else {
575 new_frame->SetVReg(vreg, value);
576 }
577 }
578 }
579
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200580 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
Vladimir Marko35d5b8a2018-07-03 09:18:32 +0100581 return static_cast<VRegKind>(kinds[reg * 2]);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200582 }
583
Ian Rogers5cf98192014-05-29 21:31:50 -0700584 QuickExceptionHandler* const exception_handler_;
585 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700586 bool stacked_shadow_frame_pushed_;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700587 const bool single_frame_deopt_;
588 bool single_frame_done_;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100589 ArtMethod* single_frame_deopt_method_;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000590 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700591 ArtMethod* callee_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700592
593 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
594};
595
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700596void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
597 if (full_fragment_done_) {
598 // Restore deoptimization exception. When returning from the invoke stub,
599 // ArtMethod::Invoke() will see the special exception to know deoptimization
600 // is needed.
601 self_->SetException(Thread::GetDeoptimizationException());
602 } else {
603 // PC needs to be of the quick-to-interpreter bridge.
604 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700605 offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700606 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
607 reinterpret_cast<uint8_t*>(self_) + offset);
608 }
609}
610
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200611void QuickExceptionHandler::DeoptimizeStack() {
612 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700613 if (kDebugExceptionDelivery) {
Andreas Gampe3fec9ac2016-09-13 10:47:28 -0700614 self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
Ian Rogers5cf98192014-05-29 21:31:50 -0700615 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200616
Andreas Gampe639bdd12015-06-03 11:22:45 -0700617 DeoptimizeStackVisitor visitor(self_, context_, this, false);
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200618 visitor.WalkStack(true);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700619 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100620}
621
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100622void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
Andreas Gampe639bdd12015-06-03 11:22:45 -0700623 DCHECK(is_deoptimization_);
624
Andreas Gampe639bdd12015-06-03 11:22:45 -0700625 DeoptimizeStackVisitor visitor(self_, context_, this, true);
626 visitor.WalkStack(true);
627
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000628 // Compiled code made an explicit deoptimization.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100629 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
Nicolas Geoffray62e7c092019-01-08 09:43:01 +0000630 SCOPED_TRACE << "Deoptimizing "
631 << deopt_method->PrettyMethod()
632 << ": " << GetDeoptimizationKindName(kind);
633
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100634 DCHECK(deopt_method != nullptr);
Nicolas Geoffray646d6382017-08-09 10:50:00 +0100635 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
636 LOG(INFO) << "Single-frame deopting: "
637 << deopt_method->PrettyMethod()
638 << " due to "
639 << GetDeoptimizationKindName(kind);
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700640 DumpFramesWithType(self_, /* details= */ true);
Nicolas Geoffray646d6382017-08-09 10:50:00 +0100641 }
Mythri Alle5097f832021-11-02 14:52:30 +0000642 // When deoptimizing for debug support the optimized code is still valid and
643 // can be reused when debugging support (like breakpoints) are no longer
644 // needed fot this method.
645 if (Runtime::Current()->UseJitCompilation() && (kind != DeoptimizationKind::kDebugging)) {
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000646 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000647 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000648 } else {
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +0000649 Runtime::Current()->GetInstrumentation()->InitializeMethodsCode(
650 deopt_method, /*aot_code=*/ nullptr);
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000651 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100652
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700653 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700654}
655
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700656void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
657 // At this point, the instrumentation stack has been updated. We need to install
658 // the real return pc on stack, in case instrumentation stub is stored there,
Mythri Allee1f96382022-01-19 16:44:59 +0000659 // so that the interpreter bridge code can return to the right place. JITed
660 // frames in Java debuggable runtimes may not have an instrumentation stub, so
661 // update the PC only when required.
662 uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
663 CHECK(pc_addr != nullptr);
664 pc_addr--;
665 if (return_pc != 0 &&
666 (*reinterpret_cast<uintptr_t*>(pc_addr)) ==
667 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700668 *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
669 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700670
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700671 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700672 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
673 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
674 // change how longjump works.
675 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
676 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
677 }
678}
679
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700680uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
Alex Light2c8206f2018-06-08 14:51:09 -0700681 DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch";
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700682 uintptr_t return_pc = 0;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100683 if (method_tracing_active_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100684 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
Mythri Alle9fc35052022-05-13 21:22:28 +0000685 return_pc = instrumentation->PopInstrumentationStackUntil(
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000686 self_, reinterpret_cast<uintptr_t>(handler_quick_frame_));
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100687 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700688 return return_pc;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100689}
690
Andreas Gampe639bdd12015-06-03 11:22:45 -0700691void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100692 // Place context back on thread so it will be available when we continue.
693 self_->ReleaseLongJumpContext(context_);
694 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
695 CHECK_NE(handler_quick_frame_pc_, 0u);
696 context_->SetPC(handler_quick_frame_pc_);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700697 context_->SetArg0(handler_quick_arg0_);
698 if (smash_caller_saves) {
699 context_->SmashCallerSaves();
700 }
Nicolas Geoffray51ad7fe2020-02-04 12:46:47 +0000701 if (!is_deoptimization_ &&
Nicolas Geoffray013d1ee2019-12-04 16:18:15 +0000702 handler_method_header_ != nullptr &&
703 handler_method_header_->IsNterpMethodHeader()) {
704 context_->SetNterpDexPC(reinterpret_cast<uintptr_t>(
Nicolas Geoffray51ad7fe2020-02-04 12:46:47 +0000705 GetHandlerMethod()->DexInstructions().Insns() + handler_dex_pc_));
Nicolas Geoffray013d1ee2019-12-04 16:18:15 +0000706 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100707 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800708 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100709}
710
Andreas Gampe639bdd12015-06-03 11:22:45 -0700711void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
Andreas Gampec7d878d2018-11-19 18:42:06 +0000712 StackVisitor::WalkStack(
713 [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
714 ArtMethod* method = stack_visitor->GetMethod();
715 if (details) {
716 LOG(INFO) << "|> pc = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
717 LOG(INFO) << "|> addr = " << std::hex
718 << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
719 if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
720 LOG(INFO) << "|> ret = " << std::hex << stack_visitor->GetReturnPc();
721 }
722 }
723 if (method == nullptr) {
724 // Transition, do go on, we want to unwind over bridges, all the way.
725 if (details) {
726 LOG(INFO) << "N <transition>";
727 }
728 return true;
729 } else if (method->IsRuntimeMethod()) {
730 if (details) {
731 LOG(INFO) << "R " << method->PrettyMethod(true);
732 }
733 return true;
734 } else {
735 bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
736 LOG(INFO) << (is_shadow ? "S" : "Q")
737 << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
738 << " "
739 << method->PrettyMethod(true);
740 return true; // Go on.
741 }
742 },
743 self,
744 /* context= */ nullptr,
745 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700746}
747
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100748} // namespace art