summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2025-01-17 14:57:07 +0000
committer Santiago Aboy Solanes <solanes@google.com> 2025-01-28 01:46:34 -0800
commitbabd7207698261b59894f8520dc464526e411b5c (patch)
tree33886f51a94f72751976636a69df9ccd00df775a /compiler
parentb3ca9f3c87bc935ae56e7647c91e2158971fb47d (diff)
Remove explicit dex_pc from RecordPcInfo
Special cases considered: * Frame entry (hardcoded to be 0) or block entry. * Native debuggable + slow paths, which is the only case where we use the instruction's dex_pc. Test: m test-art-host-gtest Test: art/test/testrunner/testrunner.py --host --64 -b --optimizing Change-Id: Ic5e0a6b5106395b891a9a45ea48da39dfb44a0a5
Diffstat (limited to 'compiler')
-rw-r--r--compiler/optimizing/code_generator.cc84
-rw-r--r--compiler/optimizing/code_generator.h18
-rw-r--r--compiler/optimizing/code_generator_arm64.cc20
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc18
-rw-r--r--compiler/optimizing/code_generator_riscv64.cc18
-rw-r--r--compiler/optimizing/code_generator_x86.cc18
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc18
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc2
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc2
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc2
10 files changed, 112 insertions, 88 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index cfccdb8934..fb4e7b647b 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -348,7 +348,7 @@ void CodeGenerator::Compile() {
// This ensures that we have correct native line mapping for all native instructions.
// It is necessary to make stepping over a statement work. Otherwise, any initial
// instructions (e.g. moves) would be assumed to be the start of next statement.
- MaybeRecordNativeDebugInfo(/* instruction= */ nullptr, block->GetDexPc());
+ MaybeRecordNativeDebugInfoForBlockEntry(block->GetDexPc());
for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
HInstruction* current = it.Current();
if (current->HasEnvironment()) {
@@ -1138,11 +1138,24 @@ static bool NeedsVregInfo(HInstruction* instruction, bool osr) {
instruction->CanThrowIntoCatchBlock();
}
+void CodeGenerator::RecordPcInfoForFrameOrBlockEntry(uint32_t dex_pc) {
+ StackMapStream* stack_map_stream = GetStackMapStream();
+ stack_map_stream->BeginStackMapEntry(dex_pc, GetAssembler()->CodePosition());
+ stack_map_stream->EndStackMapEntry();
+}
+
void CodeGenerator::RecordPcInfo(HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path,
bool native_debug_info) {
- RecordPcInfo(instruction, dex_pc, GetAssembler()->CodePosition(), slow_path, native_debug_info);
+ // Only for native debuggable apps we take a look at the dex_pc from the instruction itself. For
+ // the regular case, we retrieve the dex_pc from the instruction's environment.
+ DCHECK_IMPLIES(native_debug_info, GetCompilerOptions().GetNativeDebuggable());
+ DCHECK_IMPLIES(!native_debug_info, instruction->HasEnvironment()) << *instruction;
+ RecordPcInfo(instruction,
+ native_debug_info ? instruction->GetDexPc() : kNoDexPc,
+ GetAssembler()->CodePosition(),
+ slow_path,
+ native_debug_info);
}
void CodeGenerator::RecordPcInfo(HInstruction* instruction,
@@ -1150,36 +1163,30 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction,
uint32_t native_pc,
SlowPathCode* slow_path,
bool native_debug_info) {
- if (instruction != nullptr) {
- // The code generated for some type conversions
- // may call the runtime, thus normally requiring a subsequent
- // call to this method. However, the method verifier does not
- // produce PC information for certain instructions, which are
- // considered "atomic" (they cannot join a GC).
- // Therefore we do not currently record PC information for such
- // instructions. As this may change later, we added this special
- // case so that code generators may nevertheless call
- // CodeGenerator::RecordPcInfo without triggering an error in
- // CodeGenerator::BuildNativeGCMap ("Missing ref for dex pc 0x")
- // thereafter.
- if (instruction->IsTypeConversion()) {
+ DCHECK(instruction != nullptr);
+ // Only for native debuggable apps we take a look at the dex_pc from the instruction itself. For
+ // the regular case, we retrieve the dex_pc from the instruction's environment.
+ DCHECK_IMPLIES(native_debug_info, GetCompilerOptions().GetNativeDebuggable());
+ DCHECK_IMPLIES(!native_debug_info, instruction->HasEnvironment()) << *instruction;
+ // The code generated for some type conversions
+ // may call the runtime, thus normally requiring a subsequent
+ // call to this method. However, the method verifier does not
+ // produce PC information for certain instructions, which are
+ // considered "atomic" (they cannot join a GC).
+ // Therefore we do not currently record PC information for such
+ // instructions. As this may change later, we added this special
+ // case so that code generators may nevertheless call
+ // CodeGenerator::RecordPcInfo without triggering an error in
+ // CodeGenerator::BuildNativeGCMap ("Missing ref for dex pc 0x")
+ // thereafter.
+ if (instruction->IsTypeConversion()) {
+ return;
+ }
+ if (instruction->IsRem()) {
+ DataType::Type type = instruction->AsRem()->GetResultType();
+ if ((type == DataType::Type::kFloat32) || (type == DataType::Type::kFloat64)) {
return;
}
- if (instruction->IsRem()) {
- DataType::Type type = instruction->AsRem()->GetResultType();
- if ((type == DataType::Type::kFloat32) || (type == DataType::Type::kFloat64)) {
- return;
- }
- }
- }
-
- StackMapStream* stack_map_stream = GetStackMapStream();
- if (instruction == nullptr) {
- // For stack overflow checks and native-debug-info entries without dex register
- // mapping (i.e. start of basic block or start of slow path).
- stack_map_stream->BeginStackMapEntry(dex_pc, native_pc);
- stack_map_stream->EndStackMapEntry();
- return;
}
LocationSummary* locations = instruction->GetLocations();
@@ -1220,6 +1227,7 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction,
? StackMap::Kind::Debug
: (osr ? StackMap::Kind::OSR : StackMap::Kind::Default);
bool needs_vreg_info = NeedsVregInfo(instruction, osr);
+ StackMapStream* stack_map_stream = GetStackMapStream();
stack_map_stream->BeginStackMapEntry(outer_dex_pc,
native_pc,
register_mask,
@@ -1263,6 +1271,16 @@ bool CodeGenerator::HasStackMapAtCurrentPc() {
return stack_map_stream->GetStackMapNativePcOffset(count - 1) == pc;
}
+void CodeGenerator::MaybeRecordNativeDebugInfoForBlockEntry(uint32_t dex_pc) {
+ if (GetCompilerOptions().GetNativeDebuggable() && dex_pc != kNoDexPc) {
+ if (HasStackMapAtCurrentPc()) {
+ // Ensure that we do not collide with the stack map of the previous instruction.
+ GenerateNop();
+ }
+ RecordPcInfoForFrameOrBlockEntry(dex_pc);
+ }
+}
+
void CodeGenerator::MaybeRecordNativeDebugInfo(HInstruction* instruction,
uint32_t dex_pc,
SlowPathCode* slow_path) {
@@ -1271,7 +1289,7 @@ void CodeGenerator::MaybeRecordNativeDebugInfo(HInstruction* instruction,
// Ensure that we do not collide with the stack map of the previous instruction.
GenerateNop();
}
- RecordPcInfo(instruction, dex_pc, slow_path, /* native_debug_info= */ true);
+ RecordPcInfo(instruction, slow_path, /* native_debug_info= */ true);
}
}
@@ -1572,7 +1590,7 @@ bool CodeGenerator::CanMoveNullCheckToUser(HNullCheck* null_check) {
void CodeGenerator::MaybeRecordImplicitNullCheck(HInstruction* instr) {
HNullCheck* null_check = instr->GetImplicitNullCheck();
if (null_check != nullptr) {
- RecordPcInfo(null_check, null_check->GetDexPc(), GetAssembler()->CodePosition());
+ RecordPcInfo(null_check);
}
}
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 950bae5c8f..f242866412 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -339,12 +339,9 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
return GetFrameSize() - FrameEntrySpillSize() - kShouldDeoptimizeFlagSize;
}
- // Record native to dex mapping for a suspend point. Required by runtime.
- void RecordPcInfo(HInstruction* instruction,
- uint32_t dex_pc,
- uint32_t native_pc,
- SlowPathCode* slow_path = nullptr,
- bool native_debug_info = false);
+ // For stack overflow checks and native-debug-info entries without dex register
+ // mapping i.e. start of basic block or at frame entry.
+ void RecordPcInfoForFrameOrBlockEntry(uint32_t dex_pc = 0);
// Record native to dex mapping for a suspend point.
// The native_pc is used from Assembler::CodePosition.
@@ -352,7 +349,14 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
// Note: As Assembler::CodePosition is target dependent, it does not guarantee the exact native_pc
// for the instruction. If the exact native_pc is required it must be provided explicitly.
void RecordPcInfo(HInstruction* instruction,
+ SlowPathCode* slow_path = nullptr,
+ bool native_debug_info = false);
+
+ // Record native to dex mapping for a suspend point. Required by runtime.
+ // Do not use directly. Use the method above.
+ void RecordPcInfo(HInstruction* instruction,
uint32_t dex_pc,
+ uint32_t native_pc,
SlowPathCode* slow_path = nullptr,
bool native_debug_info = false);
@@ -363,6 +367,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
//
// ARM specific behaviour: The recorded native PC might be a branch over pools to instructions
// corresponding the dex PC.
+ void MaybeRecordNativeDebugInfoForBlockEntry(uint32_t dex_pc);
void MaybeRecordNativeDebugInfo(HInstruction* instruction,
uint32_t dex_pc,
SlowPathCode* slow_path = nullptr);
@@ -672,6 +677,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
void SetDisassemblyInformation(DisassemblyInformation* info) { disasm_info_ = info; }
DisassemblyInformation* GetDisassemblyInformation() const { return disasm_info_; }
+ // TODO(solanes): Remove `dex_pc` now that it is unused.
virtual void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
uint32_t dex_pc,
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 90c78bb920..3585808132 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1491,7 +1491,7 @@ void CodeGeneratorARM64::GenerateFrameEntry() {
kInstructionSize,
CodeBufferCheckScope::kExactSize);
__ ldr(wzr, MemOperand(temp, 0));
- RecordPcInfo(nullptr, 0);
+ RecordPcInfoForFrameOrBlockEntry();
}
}
@@ -2152,7 +2152,7 @@ void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
@@ -2166,14 +2166,14 @@ void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
__ blr(lr);
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
} else {
// Ensure the pc position is recorded immediately after the `bl` instruction.
ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
EmitEntrypointThunkCall(entrypoint_offset);
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
}
}
@@ -2264,7 +2264,7 @@ void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruct
if (codegen_->CanUseImplicitSuspendCheck()) {
__ Ldr(kImplicitSuspendCheckRegister, MemOperand(kImplicitSuspendCheckRegister));
- codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
+ codegen_->RecordPcInfo(instruction);
if (successor != nullptr) {
__ B(codegen_->GetLabelOf(successor));
}
@@ -4953,7 +4953,7 @@ void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invok
// lr();
__ blr(lr);
DCHECK(!codegen_->IsLeafMethod());
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ codegen_->RecordPcInfo(invoke);
}
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
@@ -5110,7 +5110,7 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(
CodeBufferCheckScope::kExactSize);
// lr()
__ blr(lr);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
};
switch (invoke->GetCodePtrLocation()) {
case CodePtrLocation::kCallSelf:
@@ -5121,7 +5121,7 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(
kInstructionSize,
CodeBufferCheckScope::kExactSize);
__ bl(&frame_entry_label_);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
break;
case CodePtrLocation::kCallCriticalNative: {
@@ -5224,7 +5224,7 @@ void CodeGeneratorARM64::GenerateVirtualCall(
ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
// lr();
__ blr(lr);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
}
@@ -6276,7 +6276,7 @@ void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Location obj = instruction->GetLocations()->InAt(0);
__ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
- RecordPcInfo(instruction, instruction->GetDexPc());
+ RecordPcInfo(instruction);
}
}
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index e88d14b3eb..4191827fe0 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -2420,7 +2420,7 @@ void CodeGeneratorARMVIXL::GenerateFrameEntry() {
vixl32::kMaxInstructionSizeInBytes,
CodeBufferCheckScope::kMaximumSize);
__ ldr(temp, MemOperand(temp));
- RecordPcInfo(nullptr, 0);
+ RecordPcInfoForFrameOrBlockEntry();
}
uint32_t frame_size = GetFrameSize();
@@ -2787,7 +2787,7 @@ void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location, LocationSummary*
void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
@@ -2804,7 +2804,7 @@ void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
CodeBufferCheckScope::kExactSize);
__ blx(lr);
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
} else {
// Ensure the pc position is recorded immediately after the `bl` instruction.
@@ -2813,7 +2813,7 @@ void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
CodeBufferCheckScope::kExactSize);
EmitEntrypointThunkCall(entrypoint_offset);
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
}
}
@@ -3803,7 +3803,7 @@ void InstructionCodeGeneratorARMVIXL::VisitInvokeInterface(HInvokeInterface* inv
CodeBufferCheckScope::kExactSize);
// LR();
__ blx(lr);
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ codegen_->RecordPcInfo(invoke);
DCHECK(!codegen_->IsLeafMethod());
}
@@ -6530,7 +6530,7 @@ void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) {
vixl32::kMaxInstructionSizeInBytes,
CodeBufferCheckScope::kMaximumSize);
__ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0)));
- RecordPcInfo(instruction, instruction->GetDexPc());
+ RecordPcInfo(instruction);
}
void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) {
@@ -9631,7 +9631,7 @@ void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
CodeBufferCheckScope::kExactSize);
// LR()
__ blx(lr);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
};
switch (invoke->GetCodePtrLocation()) {
@@ -9643,7 +9643,7 @@ void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
vixl32::k32BitT32InstructionSizeInBytes,
CodeBufferCheckScope::kMaximumSize);
__ bl(GetFrameEntryLabel());
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
break;
case CodePtrLocation::kCallCriticalNative: {
@@ -9733,7 +9733,7 @@ void CodeGeneratorARMVIXL::GenerateVirtualCall(
CodeBufferCheckScope::kExactSize);
// LR();
__ blx(lr);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
}
diff --git a/compiler/optimizing/code_generator_riscv64.cc b/compiler/optimizing/code_generator_riscv64.cc
index c2d82b8033..0c452b59ac 100644
--- a/compiler/optimizing/code_generator_riscv64.cc
+++ b/compiler/optimizing/code_generator_riscv64.cc
@@ -4207,7 +4207,7 @@ void InstructionCodeGeneratorRISCV64::VisitInvokeInterface(HInvokeInterface* ins
// RA();
__ Jalr(RA);
DCHECK(!codegen_->IsLeafMethod());
- codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
+ codegen_->RecordPcInfo(instruction);
}
void LocationsBuilderRISCV64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) {
@@ -6137,7 +6137,7 @@ void CodeGeneratorRISCV64::GenerateFrameEntry() {
DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
__ Loadw(
Zero, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kRiscv64)));
- RecordPcInfo(nullptr, 0);
+ RecordPcInfoForFrameOrBlockEntry();
}
if (!HasEmptyFrame()) {
@@ -6511,7 +6511,7 @@ void CodeGeneratorRISCV64::Finalize() {
// Generate code to invoke a runtime entry point.
void CodeGeneratorRISCV64::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
@@ -6522,7 +6522,7 @@ void CodeGeneratorRISCV64::InvokeRuntime(QuickEntrypointEnum entrypoint,
__ Loadd(RA, TR, entrypoint_offset.Int32Value());
__ Jalr(RA);
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
}
@@ -6559,7 +6559,7 @@ void CodeGeneratorRISCV64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Location obj = instruction->GetLocations()->InAt(0);
__ Lw(Zero, obj.AsRegister<XRegister>(), 0);
- RecordPcInfo(instruction, instruction->GetDexPc());
+ RecordPcInfo(instruction);
}
void CodeGeneratorRISCV64::GenerateExplicitNullCheck(HNullCheck* instruction) {
@@ -7035,7 +7035,7 @@ void CodeGeneratorRISCV64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* inv
case CodePtrLocation::kCallSelf:
DCHECK(!GetGraph()->HasShouldDeoptimizeFlag());
__ Jal(&frame_entry_label_);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
break;
case CodePtrLocation::kCallArtMethod:
// RA = callee_method->entry_point_from_quick_compiled_code_;
@@ -7044,7 +7044,7 @@ void CodeGeneratorRISCV64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* inv
ArtMethod::EntryPointFromQuickCompiledCodeOffset(kRiscv64PointerSize).Int32Value());
// RA()
__ Jalr(RA);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
break;
case CodePtrLocation::kCallCriticalNative: {
size_t out_frame_size =
@@ -7059,7 +7059,7 @@ void CodeGeneratorRISCV64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* inv
__ Loadd(RA, callee_method.AsRegister<XRegister>(), offset.Int32Value());
}
__ Jalr(RA);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
// The result is returned the same way in native ABI and managed ABI. No result conversion is
// needed, see comments in `Riscv64JniCallingConvention::RequiresSmallResultTypeExtension()`.
if (out_frame_size != 0u) {
@@ -7143,7 +7143,7 @@ void CodeGeneratorRISCV64::GenerateVirtualCall(HInvokeVirtual* invoke,
__ Loadd(RA, temp, entry_point.Int32Value());
// RA();
__ Jalr(RA);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
void CodeGeneratorRISCV64::MoveFromReturnRegister(Location trg, DataType::Type type) {
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 6db49c7771..370d8fed8a 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1105,12 +1105,12 @@ size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32
void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
}
@@ -1429,7 +1429,7 @@ void CodeGeneratorX86::GenerateFrameEntry() {
if (!skip_overflow_check) {
size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86);
__ testl(EAX, Address(ESP, -static_cast<int32_t>(reserved_bytes)));
- RecordPcInfo(nullptr, 0);
+ RecordPcInfoForFrameOrBlockEntry();
}
if (!HasEmptyFrame()) {
@@ -2944,7 +2944,7 @@ void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke)
ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
DCHECK(!codegen_->IsLeafMethod());
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ codegen_->RecordPcInfo(invoke);
}
void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
@@ -5616,7 +5616,7 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(
case CodePtrLocation::kCallSelf:
DCHECK(!GetGraph()->HasShouldDeoptimizeFlag());
__ call(GetFrameEntryLabel());
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
break;
case CodePtrLocation::kCallCriticalNative: {
size_t out_frame_size =
@@ -5633,7 +5633,7 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(
__ call(Address(callee_method.AsRegister<Register>(),
ArtMethod::EntryPointFromJniOffset(kX86PointerSize).Int32Value()));
}
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
if (out_frame_size == 0u && DataType::IsFloatingPointType(invoke->GetType())) {
// Create space for conversion.
out_frame_size = 8u;
@@ -5679,7 +5679,7 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(
__ call(Address(callee_method.AsRegister<Register>(),
ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kX86PointerSize).Int32Value()));
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
break;
}
@@ -5718,7 +5718,7 @@ void CodeGeneratorX86::GenerateVirtualCall(
// call temp->GetEntryPoint();
__ call(Address(
temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
void CodeGeneratorX86::RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address,
@@ -6482,7 +6482,7 @@ void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
Location obj = locations->InAt(0);
__ testl(EAX, Address(obj.AsRegister<Register>(), 0));
- RecordPcInfo(instruction, instruction->GetDexPc());
+ RecordPcInfo(instruction);
}
void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index dbbf80c744..4c180be5fb 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1209,7 +1209,7 @@ void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
case CodePtrLocation::kCallSelf:
DCHECK(!GetGraph()->HasShouldDeoptimizeFlag());
__ call(&frame_entry_label_);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
break;
case CodePtrLocation::kCallCriticalNative: {
size_t out_frame_size =
@@ -1225,7 +1225,7 @@ void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
__ call(Address(callee_method.AsRegister<CpuRegister>(),
ArtMethod::EntryPointFromJniOffset(kX86_64PointerSize).SizeValue()));
}
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
// Zero-/sign-extend the result when needed due to native and managed ABI mismatch.
switch (invoke->GetType()) {
case DataType::Type::kBool:
@@ -1260,7 +1260,7 @@ void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
__ call(Address(callee_method.AsRegister<CpuRegister>(),
ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kX86_64PointerSize).SizeValue()));
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
break;
}
@@ -1300,7 +1300,7 @@ void CodeGeneratorX86_64::GenerateVirtualCall(
// call temp->GetEntryPoint();
__ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kX86_64PointerSize).SizeValue()));
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ RecordPcInfo(invoke, slow_path);
}
void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
@@ -1562,12 +1562,12 @@ size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uin
void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
if (EntrypointRequiresStackMap(entrypoint)) {
- RecordPcInfo(instruction, dex_pc, slow_path);
+ RecordPcInfo(instruction, slow_path);
}
}
@@ -1892,7 +1892,7 @@ void CodeGeneratorX86_64::GenerateFrameEntry() {
if (!skip_overflow_check) {
size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
__ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
- RecordPcInfo(nullptr, 0);
+ RecordPcInfoForFrameOrBlockEntry();
}
if (!HasEmptyFrame()) {
@@ -3270,7 +3270,7 @@ void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invo
temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
DCHECK(!codegen_->IsLeafMethod());
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ codegen_->RecordPcInfo(invoke);
}
void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
@@ -5798,7 +5798,7 @@ void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Location obj = locations->InAt(0);
__ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
- RecordPcInfo(instruction, instruction->GetDexPc());
+ RecordPcInfo(instruction);
}
void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index a4463cb248..db8d6cac05 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -6118,7 +6118,7 @@ void IntrinsicCodeGeneratorARM64::VisitMethodHandleInvokeExact(HInvoke* invoke)
Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
__ Ldr(lr, MemOperand(method, entry_point.SizeValue()));
__ Blr(lr);
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ codegen_->RecordPcInfo(invoke, slow_path);
__ Bind(slow_path->GetExitLabel());
}
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index 385ac0e857..c750633450 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -5814,7 +5814,7 @@ void IntrinsicCodeGeneratorRISCV64::VisitMethodHandleInvokeExact(HInvoke* invoke
Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kRiscv64PointerSize);
__ Loadd(RA, method, entry_point.SizeValue());
__ Jalr(RA);
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ codegen_->RecordPcInfo(invoke, slow_path);
__ Bind(slow_path->GetExitLabel());
}
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 18963bf135..14c1ef90aa 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -4375,7 +4375,7 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
__ call(Address(
method,
ArtMethod::EntryPointFromQuickCompiledCodeOffset(art::PointerSize::k64).SizeValue()));
- codegen_->RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
+ codegen_->RecordPcInfo(invoke, slow_path);
__ Bind(slow_path->GetExitLabel());
}