summaryrefslogtreecommitdiff
path: root/runtime/stack.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/stack.h')
-rw-r--r--runtime/stack.h41
1 files changed, 32 insertions, 9 deletions
diff --git a/runtime/stack.h b/runtime/stack.h
index 1b00b54acb..a4bcf17ca5 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -58,11 +58,6 @@ enum VRegKind {
};
std::ostream& operator<<(std::ostream& os, VRegKind rhs);
-// Size in bytes of the should_deoptimize flag on stack.
-// We just need 4 bytes for our purpose regardless of the architecture. Frame size
-// calculation will automatically do alignment for the final frame size.
-static constexpr size_t kShouldDeoptimizeFlagSize = 4;
-
/*
* Our current stack layout.
* The Dalvik registers come first, followed by the
@@ -197,6 +192,12 @@ class StackVisitor {
uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
+ // Returns a vector of the inlined dex pcs, in order from outermost to innermost but it replaces
+ // the innermost one with `handler_dex_pc`. In essence, (outermost dex pc, mid dex pc #1, ..., mid
+ // dex pc #n-1, `handler_dex_pc`).
+ std::vector<uint32_t> ComputeDexPcList(uint32_t handler_dex_pc) const
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
ObjPtr<mirror::Object> GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
@@ -230,9 +231,8 @@ class StackVisitor {
uint16_t vreg,
VRegKind kind,
uint32_t* val,
- std::optional<DexRegisterLocation> location =
- std::optional<DexRegisterLocation>()) const
- REQUIRES_SHARED(Locks::mutator_lock_);
+ std::optional<DexRegisterLocation> location = std::optional<DexRegisterLocation>(),
+ bool need_full_register_list = false) const REQUIRES_SHARED(Locks::mutator_lock_);
bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
uint64_t* val) const
@@ -268,10 +268,16 @@ class StackVisitor {
return !current_inline_frames_.empty();
}
+ size_t InlineDepth() const { return current_inline_frames_.size(); }
+
InlineInfo GetCurrentInlinedFrame() const {
return current_inline_frames_.back();
}
+ const BitTableRange<InlineInfo>& GetCurrentInlinedFrames() const {
+ return current_inline_frames_;
+ }
+
uintptr_t GetCurrentQuickFramePc() const {
return cur_quick_frame_pc_;
}
@@ -302,10 +308,26 @@ class StackVisitor {
*should_deoptimize_addr = *should_deoptimize_addr | static_cast<uint8_t>(value);
};
+ void UnsetShouldDeoptimizeFlag(DeoptimizeFlagValue value) REQUIRES_SHARED(Locks::mutator_lock_) {
+ uint8_t* should_deoptimize_addr = GetShouldDeoptimizeFlagAddr();
+ *should_deoptimize_addr = *should_deoptimize_addr & ~static_cast<uint8_t>(value);
+ };
+
uint8_t GetShouldDeoptimizeFlag() const REQUIRES_SHARED(Locks::mutator_lock_) {
return *GetShouldDeoptimizeFlagAddr();
}
+ bool ShouldForceDeoptForRedefinition() const REQUIRES_SHARED(Locks::mutator_lock_) {
+ uint8_t should_deopt_flag = GetShouldDeoptimizeFlag();
+ return (should_deopt_flag &
+ static_cast<uint8_t>(DeoptimizeFlagValue::kForceDeoptForRedefinition)) != 0;
+ }
+
+ // Return the number of dex register in the map from the outermost frame to the number of inlined
+ // frames indicated by `depth`. If `depth` is 0, grab just the registers from the outermost level.
+ // If it is greater than 0, grab as many inline frames as `depth` indicates.
+ size_t GetNumberOfRegisters(CodeInfo* code_info, int depth) const;
+
private:
// Private constructor known in the case that num_frames_ has already been computed.
StackVisitor(Thread* thread,
@@ -334,7 +356,8 @@ class StackVisitor {
bool GetVRegFromOptimizedCode(ArtMethod* m,
uint16_t vreg,
VRegKind kind,
- uint32_t* val) const
+ uint32_t* val,
+ bool need_full_register_list = false) const
REQUIRES_SHARED(Locks::mutator_lock_);
bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,