summaryrefslogtreecommitdiff
path: root/runtime/stack.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/stack.cc')
-rw-r--r--runtime/stack.cc42
1 files changed, 32 insertions, 10 deletions
diff --git a/runtime/stack.cc b/runtime/stack.cc
index eb9c661d18..80a563b57f 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -139,9 +139,9 @@ mirror::Object* StackVisitor::GetThisObject() const {
} else {
uint16_t reg = accessor.RegistersSize() - accessor.InsSize();
uint32_t value = 0;
- bool success = GetVReg(m, reg, kReferenceVReg, &value);
- // We currently always guarantee the `this` object is live throughout the method.
- CHECK(success) << "Failed to read the this object in " << ArtMethod::PrettyMethod(m);
+ if (!GetVReg(m, reg, kReferenceVReg, &value)) {
+ return nullptr;
+ }
return reinterpret_cast<mirror::Object*>(value);
}
}
@@ -223,20 +223,39 @@ bool StackVisitor::GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKin
switch (location_kind) {
case DexRegisterLocation::Kind::kInStack: {
const int32_t offset = dex_register_map[vreg].GetStackOffsetInBytes();
+ BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
+ if (kind == kReferenceVReg && !stack_mask.LoadBit(offset / kFrameSlotSize)) {
+ return false;
+ }
const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
*val = *reinterpret_cast<const uint32_t*>(addr);
return true;
}
- case DexRegisterLocation::Kind::kInRegister:
+ case DexRegisterLocation::Kind::kInRegister: {
+ uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
+ uint32_t reg = dex_register_map[vreg].GetMachineRegister();
+ if (kind == kReferenceVReg && !(register_mask & (1 << reg))) {
+ return false;
+ }
+ return GetRegisterIfAccessible(reg, kind, val);
+ }
case DexRegisterLocation::Kind::kInRegisterHigh:
case DexRegisterLocation::Kind::kInFpuRegister:
case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
+ if (kind == kReferenceVReg) {
+ return false;
+ }
uint32_t reg = dex_register_map[vreg].GetMachineRegister();
return GetRegisterIfAccessible(reg, kind, val);
}
- case DexRegisterLocation::Kind::kConstant:
- *val = dex_register_map[vreg].GetConstant();
+ case DexRegisterLocation::Kind::kConstant: {
+ uint32_t result = dex_register_map[vreg].GetConstant();
+ if (kind == kReferenceVReg && result != 0) {
+ return false;
+ }
+ *val = result;
return true;
+ }
case DexRegisterLocation::Kind::kNone:
return false;
default:
@@ -549,7 +568,9 @@ void StackVisitor::SetMethod(ArtMethod* method) {
cur_shadow_frame_->SetMethod(method);
} else {
DCHECK(cur_quick_frame_ != nullptr);
- CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod!";
+ CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod: "
+ << GetMethod()->PrettyMethod() << " is inlined into "
+ << GetOuterMethod()->PrettyMethod();
*cur_quick_frame_ = method;
}
}
@@ -795,13 +816,14 @@ void StackVisitor::WalkStack(bool include_transitions) {
// JNI methods cannot have any inlined frames.
&& !method->IsNative()) {
DCHECK_NE(cur_quick_frame_pc_, 0u);
- CodeInfo code_info(cur_oat_quick_method_header_, CodeInfo::DecodeFlags::InlineInfoOnly);
+ current_code_info_ = CodeInfo(cur_oat_quick_method_header_,
+ CodeInfo::DecodeFlags::InlineInfoOnly);
uint32_t native_pc_offset =
cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_);
- StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
+ StackMap stack_map = current_code_info_.GetStackMapForNativePcOffset(native_pc_offset);
if (stack_map.IsValid() && stack_map.HasInlineInfo()) {
DCHECK_EQ(current_inline_frames_.size(), 0u);
- for (current_inline_frames_ = code_info.GetInlineInfosOf(stack_map);
+ for (current_inline_frames_ = current_code_info_.GetInlineInfosOf(stack_map);
!current_inline_frames_.empty();
current_inline_frames_.pop_back()) {
bool should_continue = VisitFrame();