summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2025-01-17 15:28:44 +0000
committer Treehugger Robot <android-test-infra-autosubmit@system.gserviceaccount.com> 2025-01-28 08:19:01 -0800
commita4bb8c918283b24da3939faf44d13db3bab597d8 (patch)
tree87ef6774e1fa4c760dfaa389dc7c66052e4a5910
parent128e41a29a5f75bbb0b69fd57e4bd0a1b9a2cf37 (diff)
Remove unused dex_pc from InvokeRuntime
Bug: 392802982 Test: art/test/testrunner/testrunner.py --host --64 -b --optimizing Test: m test-art-host-gtest Change-Id: I6e40215a5b1b18223c5f17e9e0ac70e05515fa94
-rw-r--r--compiler/optimizing/code_generator.cc20
-rw-r--r--compiler/optimizing/code_generator.h5
-rw-r--r--compiler/optimizing/code_generator_arm64.cc64
-rw-r--r--compiler/optimizing/code_generator_arm64.h1
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc79
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h1
-rw-r--r--compiler/optimizing/code_generator_riscv64.cc55
-rw-r--r--compiler/optimizing/code_generator_riscv64.h1
-rw-r--r--compiler/optimizing/code_generator_x86.cc68
-rw-r--r--compiler/optimizing/code_generator_x86.h1
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc65
-rw-r--r--compiler/optimizing/code_generator_x86_64.h1
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc15
-rw-r--r--compiler/optimizing/intrinsics_arm_vixl.cc14
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc49
-rw-r--r--compiler/optimizing/intrinsics_x86.cc12
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc15
17 files changed, 189 insertions, 277 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index fb4e7b647b..79386defae 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -541,7 +541,7 @@ void CodeGenerator::GenerateInvokeStaticOrDirectRuntimeCall(
UNREACHABLE();
}
- InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), slow_path);
+ InvokeRuntime(entrypoint, invoke, slow_path);
}
void CodeGenerator::GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invoke) {
MethodReference method_reference(invoke->GetMethodReference());
@@ -570,7 +570,7 @@ void CodeGenerator::GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invok
LOG(FATAL) << "Unexpected invoke type: " << invoke->GetInvokeType();
UNREACHABLE();
}
- InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), nullptr);
+ InvokeRuntime(entrypoint, invoke);
}
void CodeGenerator::GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke,
@@ -579,13 +579,13 @@ void CodeGenerator::GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke,
// method index) since it requires multiple info from the instruction (registers A, B, H). Not
// using the reservation has no effect on the registers used in the runtime call.
QuickEntrypointEnum entrypoint = kQuickInvokePolymorphic;
- InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), slow_path);
+ InvokeRuntime(entrypoint, invoke, slow_path);
}
void CodeGenerator::GenerateInvokeCustomCall(HInvokeCustom* invoke) {
MoveConstant(invoke->GetLocations()->GetTemp(0), invoke->GetCallSiteIndex());
QuickEntrypointEnum entrypoint = kQuickInvokeCustom;
- InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), nullptr);
+ InvokeRuntime(entrypoint, invoke);
}
void CodeGenerator::CreateStringBuilderAppendLocations(HStringBuilderAppend* instruction,
@@ -690,7 +690,7 @@ void CodeGenerator::GenerateUnresolvedFieldAccess(
HInstruction* field_access,
DataType::Type field_type,
uint32_t field_index,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
const FieldAccessCallingConvention& calling_convention) {
LocationSummary* locations = field_access->GetLocations();
@@ -754,7 +754,7 @@ void CodeGenerator::GenerateUnresolvedFieldAccess(
default:
LOG(FATAL) << "Invalid type " << field_type;
}
- InvokeRuntime(entrypoint, field_access, dex_pc, nullptr);
+ InvokeRuntime(entrypoint, field_access);
if (is_get && DataType::IsFloatingPointType(field_type)) {
MoveLocation(locations->Out(), calling_convention.GetReturnLocation(field_type), field_type);
@@ -780,10 +780,10 @@ void CodeGenerator::GenerateLoadClassRuntimeCall(HLoadClass* cls) {
MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
if (cls->NeedsAccessCheck()) {
CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
- InvokeRuntime(kQuickResolveTypeAndVerifyAccess, cls, cls->GetDexPc());
+ InvokeRuntime(kQuickResolveTypeAndVerifyAccess, cls);
} else {
CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
- InvokeRuntime(kQuickResolveType, cls, cls->GetDexPc());
+ InvokeRuntime(kQuickResolveType, cls);
}
}
@@ -804,7 +804,7 @@ void CodeGenerator::GenerateLoadMethodHandleRuntimeCall(HLoadMethodHandle* metho
LocationSummary* locations = method_handle->GetLocations();
MoveConstant(locations->GetTemp(0), method_handle->GetMethodHandleIndex());
CheckEntrypointTypes<kQuickResolveMethodHandle, void*, uint32_t>();
- InvokeRuntime(kQuickResolveMethodHandle, method_handle, method_handle->GetDexPc());
+ InvokeRuntime(kQuickResolveMethodHandle, method_handle);
}
void CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(
@@ -824,7 +824,7 @@ void CodeGenerator::GenerateLoadMethodTypeRuntimeCall(HLoadMethodType* method_ty
LocationSummary* locations = method_type->GetLocations();
MoveConstant(locations->GetTemp(0), method_type->GetProtoIndex().index_);
CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>();
- InvokeRuntime(kQuickResolveMethodType, method_type, method_type->GetDexPc());
+ InvokeRuntime(kQuickResolveMethodType, method_type);
}
static uint32_t GetBootImageOffsetImpl(const void* object, ImageHeader::ImageSections section) {
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index f242866412..741d3fb589 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -641,11 +641,12 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
DataType::Type field_type,
const FieldAccessCallingConvention& calling_convention);
+// TODO(solanes): Remove dex_pc from this method
void GenerateUnresolvedFieldAccess(
HInstruction* field_access,
DataType::Type field_type,
uint32_t field_index,
- uint32_t dex_pc,
+ [[maybe_unused]] uint32_t dex_pc,
const FieldAccessCallingConvention& calling_convention);
static void CreateLoadClassRuntimeCallLocationSummary(HLoadClass* cls,
@@ -677,10 +678,8 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
void SetDisassemblyInformation(DisassemblyInformation* info) { disasm_info_ = info; }
DisassemblyInformation* GetDisassemblyInformation() const { return disasm_info_; }
- // TODO(solanes): Remove `dex_pc` now that it is unused.
virtual void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path = nullptr) = 0;
// Check if the desired_string_load_kind is supported. If it is, return it,
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 3585808132..b6fe630f64 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -243,7 +243,7 @@ class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
? kQuickThrowStringBounds
: kQuickThrowArrayBounds;
- arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(entrypoint, instruction_, this);
CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
}
@@ -263,7 +263,7 @@ class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
void EmitNativeCode(CodeGenerator* codegen) override {
CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
__ Bind(GetEntryLabel());
- arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, this);
CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
}
@@ -291,10 +291,7 @@ class LoadMethodTypeSlowPathARM64 : public SlowPathCodeARM64 {
const dex::ProtoIndex proto_index = instruction_->AsLoadMethodType()->GetProtoIndex();
__ Mov(calling_convention.GetRegisterAt(0).W(), proto_index.index_);
- arm64_codegen->InvokeRuntime(kQuickResolveMethodType,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm64_codegen->InvokeRuntime(kQuickResolveMethodType, instruction_, this);
CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>();
DataType::Type type = instruction_->GetType();
@@ -322,7 +319,6 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
Location out = locations->Out();
- const uint32_t dex_pc = instruction_->GetDexPc();
bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
@@ -340,10 +336,10 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
__ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
if (cls_->NeedsAccessCheck()) {
CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
- arm64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
+ arm64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, this);
} else {
CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
- arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
+ arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, this);
}
// If we also must_do_clinit, the resolved type is now in the correct register.
} else {
@@ -354,7 +350,7 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
cls_->GetType());
}
if (must_do_clinit) {
- arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
+ arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, this);
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
}
@@ -393,7 +389,7 @@ class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
InvokeRuntimeCallingConvention calling_convention;
const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
__ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
- arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
DataType::Type type = instruction_->GetType();
arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
@@ -420,10 +416,7 @@ class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
// Live registers will be restored in the catch block if caught.
SaveLiveRegisters(codegen, instruction_->GetLocations());
}
- arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm64_codegen->InvokeRuntime(kQuickThrowNullPointer, instruction_, this);
CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
}
@@ -445,7 +438,7 @@ class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations); // Only saves live vector regs for SIMD.
- arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, this);
CheckEntrypointTypes<kQuickTestSuspend, void, void>();
RestoreLiveRegisters(codegen, locations); // Only restores live vector regs for SIMD.
if (successor_ == nullptr) {
@@ -487,7 +480,6 @@ class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
DCHECK(instruction_->IsCheckCast()
|| !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
- uint32_t dex_pc = instruction_->GetDexPc();
__ Bind(GetEntryLabel());
@@ -505,14 +497,14 @@ class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
LocationFrom(calling_convention.GetRegisterAt(1)),
DataType::Type::kReference);
if (instruction_->IsInstanceOf()) {
- arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
+ arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, this);
CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
DataType::Type ret_type = instruction_->GetType();
Location ret_loc = calling_convention.GetReturnLocation(ret_type);
arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
} else {
DCHECK(instruction_->IsCheckCast());
- arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
+ arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, this);
CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
}
@@ -544,7 +536,7 @@ class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
InvokeRuntimeCallingConvention calling_convention;
__ Mov(calling_convention.GetRegisterAt(0),
static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
- arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, this);
CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
}
@@ -583,7 +575,7 @@ class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
- arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, this);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
RestoreLiveRegisters(codegen, locations);
__ B(GetExitLabel());
@@ -768,10 +760,7 @@ class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
}
- arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, this);
CheckEntrypointTypes<
kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
@@ -849,10 +838,7 @@ class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
// which would emit a 32-bit move, as `type` is a (32-bit wide)
// reference type (`DataType::Type::kReference`).
__ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
- arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow, instruction_, this);
CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
@@ -910,7 +896,7 @@ class MethodEntryExitHooksSlowPathARM64 : public SlowPathCodeARM64 {
if (instruction_->IsMethodExitHook()) {
__ Mov(vixl::aarch64::x4, arm64_codegen->GetFrameSize());
}
- arm64_codegen->InvokeRuntime(entry_point, instruction_, instruction_->GetDexPc(), this);
+ arm64_codegen->InvokeRuntime(entry_point, instruction_, this);
RestoreLiveRegisters(codegen, locations);
__ B(GetExitLabel());
}
@@ -2152,7 +2138,6 @@ void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
@@ -4864,7 +4849,7 @@ void CodeGeneratorARM64::MaybeGenerateInlineCacheCheck(HInstruction* instruction
// Fast path for a monomorphic cache.
__ Cmp(klass.W(), w9);
__ B(eq, &done);
- InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
+ InvokeRuntime(kQuickUpdateInlineCache, instruction);
__ Bind(&done);
} else {
// This is unexpected, but we don't guarantee stable compilation across
@@ -6092,7 +6077,7 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD
InvokeRuntimeCallingConvention calling_convention;
DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
__ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
- codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
+ codegen_->InvokeRuntime(kQuickResolveString, load);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
}
@@ -6115,8 +6100,7 @@ void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction
void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
- instruction,
- instruction->GetDexPc());
+ instruction);
if (instruction->IsEnter()) {
CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
} else {
@@ -6215,7 +6199,7 @@ void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
// Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
- codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, instruction);
CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
}
@@ -6229,7 +6213,7 @@ void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
}
void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
- codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
}
@@ -6471,7 +6455,7 @@ void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
case DataType::Type::kFloat64: {
QuickEntrypointEnum entrypoint =
(type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
- codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, rem);
if (type == DataType::Type::kFloat32) {
CheckEntrypointTypes<kQuickFmodf, float, float, float>();
} else {
@@ -6656,7 +6640,7 @@ void LocationsBuilderARM64::VisitStringBuilderAppend(HStringBuilderAppend* instr
void InstructionCodeGeneratorARM64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
__ Mov(w0, instruction->GetFormat()->GetValue());
- codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction);
}
void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
@@ -6763,7 +6747,7 @@ void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
}
void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
- codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickDeliverException, instruction);
CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
}
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 78bbff2a49..4566cdf0ca 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -797,7 +797,6 @@ class CodeGeneratorARM64 : public CodeGenerator {
// Generate code to invoke a runtime entry point.
void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path = nullptr) override;
// Generate code to invoke a runtime entry point, but do not record
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 4191827fe0..20e9d4e0ff 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -375,10 +375,7 @@ class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
// Live registers will be restored in the catch block if caught.
SaveLiveRegisters(codegen, instruction_->GetLocations());
}
- arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm_codegen->InvokeRuntime(kQuickThrowNullPointer, instruction_, this);
CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
}
@@ -398,7 +395,7 @@ class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
void EmitNativeCode(CodeGenerator* codegen) override {
CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
__ Bind(GetEntryLabel());
- arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, this);
CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
}
@@ -418,7 +415,7 @@ class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
void EmitNativeCode(CodeGenerator* codegen) override {
CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
__ Bind(GetEntryLabel());
- arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, this);
CheckEntrypointTypes<kQuickTestSuspend, void, void>();
if (successor_ == nullptr) {
__ B(GetReturnLabel());
@@ -475,7 +472,7 @@ class BoundsCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
? kQuickThrowStringBounds
: kQuickThrowArrayBounds;
- arm_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(entrypoint, instruction_, this);
CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
}
@@ -499,7 +496,6 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
Location out = locations->Out();
- const uint32_t dex_pc = instruction_->GetDexPc();
bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
@@ -517,10 +513,10 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
__ Mov(calling_convention.GetRegisterAt(0), type_index.index_);
if (cls_->NeedsAccessCheck()) {
CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
- arm_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
+ arm_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, this);
} else {
CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
- arm_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
+ arm_codegen->InvokeRuntime(kQuickResolveType, instruction_, this);
}
// If we also must_do_clinit, the resolved type is now in the correct register.
} else {
@@ -529,7 +525,7 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
arm_codegen->Move32(LocationFrom(calling_convention.GetRegisterAt(0)), source);
}
if (must_do_clinit) {
- arm_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
+ arm_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, this);
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
}
@@ -569,7 +565,7 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
InvokeRuntimeCallingConventionARMVIXL calling_convention;
__ Mov(calling_convention.GetRegisterAt(0), string_index.index_);
- arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
arm_codegen->Move32(locations->Out(), LocationFrom(r0));
@@ -612,18 +608,12 @@ class TypeCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
LocationFrom(calling_convention.GetRegisterAt(1)),
DataType::Type::kReference);
if (instruction_->IsInstanceOf()) {
- arm_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, this);
CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
arm_codegen->Move32(locations->Out(), LocationFrom(r0));
} else {
DCHECK(instruction_->IsCheckCast());
- arm_codegen->InvokeRuntime(kQuickCheckInstanceOf,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, this);
CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
}
@@ -657,7 +647,7 @@ class DeoptimizationSlowPathARMVIXL : public SlowPathCodeARMVIXL {
__ Mov(calling_convention.GetRegisterAt(0),
static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
- arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, this);
CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
}
@@ -696,7 +686,7 @@ class ArraySetSlowPathARMVIXL : public SlowPathCodeARMVIXL {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
- arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, this);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
RestoreLiveRegisters(codegen, locations);
__ B(GetExitLabel());
@@ -854,7 +844,7 @@ class ReadBarrierForHeapReferenceSlowPathARMVIXL : public SlowPathCodeARMVIXL {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
__ Mov(calling_convention.GetRegisterAt(2), offset_);
}
- arm_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, this);
CheckEntrypointTypes<
kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
arm_codegen->Move32(out_, LocationFrom(r0));
@@ -922,10 +912,7 @@ class ReadBarrierForRootSlowPathARMVIXL : public SlowPathCodeARMVIXL {
InvokeRuntimeCallingConventionARMVIXL calling_convention;
CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
arm_codegen->Move32(LocationFrom(calling_convention.GetRegisterAt(0)), root_);
- arm_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ arm_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow, instruction_, this);
CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
arm_codegen->Move32(out_, LocationFrom(r0));
@@ -958,7 +945,7 @@ class MethodEntryExitHooksSlowPathARMVIXL : public SlowPathCodeARMVIXL {
// Load frame size to pass to the exit hooks
__ Mov(vixl::aarch32::Register(R2), arm_codegen->GetFrameSize());
}
- arm_codegen->InvokeRuntime(entry_point, instruction_, instruction_->GetDexPc(), this);
+ arm_codegen->InvokeRuntime(entry_point, instruction_, this);
RestoreLiveRegisters(codegen, locations);
__ B(GetExitLabel());
}
@@ -2787,7 +2774,6 @@ void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location, LocationSummary*
void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
@@ -3709,7 +3695,7 @@ void CodeGeneratorARMVIXL::MaybeGenerateInlineCacheCheck(HInstruction* instructi
// Fast path for a monomorphic cache.
__ Cmp(klass, ip);
__ B(eq, &done, /* is_far_target= */ false);
- InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
+ InvokeRuntime(kQuickUpdateInlineCache, instruction);
__ Bind(&done);
} else {
// This is unexpected, but we don't guarantee stable compilation across
@@ -4180,12 +4166,12 @@ void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conve
break;
case DataType::Type::kFloat32:
- codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
+ codegen_->InvokeRuntime(kQuickF2l, conversion);
CheckEntrypointTypes<kQuickF2l, int64_t, float>();
break;
case DataType::Type::kFloat64:
- codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
+ codegen_->InvokeRuntime(kQuickD2l, conversion);
CheckEntrypointTypes<kQuickD2l, int64_t, double>();
break;
@@ -4208,7 +4194,7 @@ void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conve
break;
case DataType::Type::kInt64:
- codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
+ codegen_->InvokeRuntime(kQuickL2f, conversion);
CheckEntrypointTypes<kQuickL2f, float, int64_t>();
break;
@@ -4773,7 +4759,7 @@ void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
DCHECK(calling_convention.GetRegisterAt(1).Is(RegisterFrom(rhs)));
DCHECK(r0.Is(OutputRegister(div)));
- codegen_->InvokeRuntime(kQuickIdivmod, div, div->GetDexPc());
+ codegen_->InvokeRuntime(kQuickIdivmod, div);
CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
}
break;
@@ -4788,7 +4774,7 @@ void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
DCHECK(LowRegisterFrom(div->GetLocations()->Out()).Is(r0));
DCHECK(HighRegisterFrom(div->GetLocations()->Out()).Is(r1));
- codegen_->InvokeRuntime(kQuickLdiv, div, div->GetDexPc());
+ codegen_->InvokeRuntime(kQuickLdiv, div);
CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
break;
}
@@ -4909,26 +4895,26 @@ void InstructionCodeGeneratorARMVIXL::VisitRem(HRem* rem) {
DCHECK(RegisterFrom(second).Is(calling_convention.GetRegisterAt(1)));
DCHECK(out_reg.Is(r1));
- codegen_->InvokeRuntime(kQuickIdivmod, rem, rem->GetDexPc());
+ codegen_->InvokeRuntime(kQuickIdivmod, rem);
CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
}
break;
}
case DataType::Type::kInt64: {
- codegen_->InvokeRuntime(kQuickLmod, rem, rem->GetDexPc());
- CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
+ codegen_->InvokeRuntime(kQuickLmod, rem);
+ CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
break;
}
case DataType::Type::kFloat32: {
- codegen_->InvokeRuntime(kQuickFmodf, rem, rem->GetDexPc());
+ codegen_->InvokeRuntime(kQuickFmodf, rem);
CheckEntrypointTypes<kQuickFmodf, float, float, float>();
break;
}
case DataType::Type::kFloat64: {
- codegen_->InvokeRuntime(kQuickFmod, rem, rem->GetDexPc());
+ codegen_->InvokeRuntime(kQuickFmod, rem);
CheckEntrypointTypes<kQuickFmod, double, double, double>();
break;
}
@@ -5716,7 +5702,7 @@ void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
}
void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) {
- codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 12);
}
@@ -5733,7 +5719,7 @@ void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) {
void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) {
// Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
- codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, instruction);
CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
DCHECK(!codegen_->IsLeafMethod());
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 13);
@@ -6443,7 +6429,7 @@ void LocationsBuilderARMVIXL::VisitStringBuilderAppend(HStringBuilderAppend* ins
void InstructionCodeGeneratorARMVIXL::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
__ Mov(r0, instruction->GetFormat()->GetValue());
- codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction);
}
void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldGet(
@@ -8082,7 +8068,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE
DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kRuntimeCall);
InvokeRuntimeCallingConventionARMVIXL calling_convention;
__ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
- codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
+ codegen_->InvokeRuntime(kQuickResolveString, load);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 18);
}
@@ -8122,7 +8108,7 @@ void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) {
}
void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) {
- codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickDeliverException, instruction);
CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
}
@@ -8757,8 +8743,7 @@ void LocationsBuilderARMVIXL::VisitMonitorOperation(HMonitorOperation* instructi
void InstructionCodeGeneratorARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
- instruction,
- instruction->GetDexPc());
+ instruction);
if (instruction->IsEnter()) {
CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
} else {
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index ea8ec7e485..2e20591c98 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -615,7 +615,6 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
// Generate code to invoke a runtime entry point.
void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path = nullptr) override;
// Generate code to invoke a runtime entry point, but do not record
diff --git a/compiler/optimizing/code_generator_riscv64.cc b/compiler/optimizing/code_generator_riscv64.cc
index 0c452b59ac..cd34b90f98 100644
--- a/compiler/optimizing/code_generator_riscv64.cc
+++ b/compiler/optimizing/code_generator_riscv64.cc
@@ -313,7 +313,7 @@ class SuspendCheckSlowPathRISCV64 : public SlowPathCodeRISCV64 {
CodeGeneratorRISCV64* riscv64_codegen = down_cast<CodeGeneratorRISCV64*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
- riscv64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, this);
CheckEntrypointTypes<kQuickTestSuspend, void, void>();
RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
if (successor_ == nullptr) {
@@ -353,8 +353,7 @@ class NullCheckSlowPathRISCV64 : public SlowPathCodeRISCV64 {
// Live registers will be restored in the catch block if caught.
SaveLiveRegisters(codegen, instruction_->GetLocations());
}
- riscv64_codegen->InvokeRuntime(
- kQuickThrowNullPointer, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(kQuickThrowNullPointer, instruction_, this);
CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
}
@@ -391,7 +390,7 @@ class BoundsCheckSlowPathRISCV64 : public SlowPathCodeRISCV64 {
QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt() ?
kQuickThrowStringBounds :
kQuickThrowArrayBounds;
- riscv64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(entrypoint, instruction_, this);
CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
}
@@ -414,7 +413,6 @@ class LoadClassSlowPathRISCV64 : public SlowPathCodeRISCV64 {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
Location out = locations->Out();
- const uint32_t dex_pc = instruction_->GetDexPc();
bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
@@ -432,11 +430,10 @@ class LoadClassSlowPathRISCV64 : public SlowPathCodeRISCV64 {
__ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
if (cls_->NeedsAccessCheck()) {
CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
- riscv64_codegen->InvokeRuntime(
- kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
+ riscv64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, this);
} else {
CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
- riscv64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
+ riscv64_codegen->InvokeRuntime(kQuickResolveType, instruction_, this);
}
// If we also must_do_clinit, the resolved type is now in the correct register.
} else {
@@ -446,7 +443,7 @@ class LoadClassSlowPathRISCV64 : public SlowPathCodeRISCV64 {
Location::RegisterLocation(calling_convention.GetRegisterAt(0)), source, cls_->GetType());
}
if (must_do_clinit) {
- riscv64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
+ riscv64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, this);
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
}
@@ -484,7 +481,7 @@ class DeoptimizationSlowPathRISCV64 : public SlowPathCodeRISCV64 {
InvokeRuntimeCallingConvention calling_convention;
__ LoadConst32(calling_convention.GetRegisterAt(0),
static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
- riscv64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, this);
CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
}
@@ -522,10 +519,7 @@ class ReadBarrierForRootSlowPathRISCV64 : public SlowPathCodeRISCV64 {
riscv64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
root_,
DataType::Type::kReference);
- riscv64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ riscv64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow, instruction_, this);
CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
riscv64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
@@ -557,7 +551,7 @@ class MethodEntryExitHooksSlowPathRISCV64 : public SlowPathCodeRISCV64 {
if (instruction_->IsMethodExitHook()) {
__ Li(A4, riscv64_codegen->GetFrameSize());
}
- riscv64_codegen->InvokeRuntime(entry_point, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(entry_point, instruction_, this);
RestoreLiveRegisters(codegen, locations);
__ J(GetExitLabel());
}
@@ -599,7 +593,7 @@ class ArraySetSlowPathRISCV64 : public SlowPathCodeRISCV64 {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
CodeGeneratorRISCV64* riscv64_codegen = down_cast<CodeGeneratorRISCV64*>(codegen);
- riscv64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(kQuickAputObject, instruction_, this);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
RestoreLiveRegisters(codegen, locations);
__ J(GetExitLabel());
@@ -619,7 +613,6 @@ class TypeCheckSlowPathRISCV64 : public SlowPathCodeRISCV64 {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
- uint32_t dex_pc = instruction_->GetDexPc();
DCHECK(instruction_->IsCheckCast()
|| !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
CodeGeneratorRISCV64* riscv64_codegen = down_cast<CodeGeneratorRISCV64*>(codegen);
@@ -639,14 +632,14 @@ class TypeCheckSlowPathRISCV64 : public SlowPathCodeRISCV64 {
Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
DataType::Type::kReference);
if (instruction_->IsInstanceOf()) {
- riscv64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
+ riscv64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, this);
CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
DataType::Type ret_type = instruction_->GetType();
Location ret_loc = calling_convention.GetReturnLocation(ret_type);
riscv64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
} else {
DCHECK(instruction_->IsCheckCast());
- riscv64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
+ riscv64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, this);
CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
}
@@ -674,8 +667,7 @@ class DivZeroCheckSlowPathRISCV64 : public SlowPathCodeRISCV64 {
void EmitNativeCode(CodeGenerator* codegen) override {
CodeGeneratorRISCV64* riscv64_codegen = down_cast<CodeGeneratorRISCV64*>(codegen);
__ Bind(GetEntryLabel());
- riscv64_codegen->InvokeRuntime(
- kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, this);
CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
}
@@ -768,8 +760,7 @@ class LoadStringSlowPathRISCV64 : public SlowPathCodeRISCV64 {
SaveLiveRegisters(codegen, locations);
__ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
- riscv64_codegen->InvokeRuntime(
- kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
+ riscv64_codegen->InvokeRuntime(kQuickResolveString, instruction_, this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
DataType::Type type = DataType::Type::kReference;
@@ -4586,7 +4577,7 @@ void InstructionCodeGeneratorRISCV64::VisitLoadString(HLoadString* instruction)
InvokeRuntimeCallingConvention calling_convention;
DCHECK(calling_convention.GetReturnLocation(DataType::Type::kReference).Equals(out_loc));
__ LoadConst32(calling_convention.GetRegisterAt(0), instruction->GetStringIndex().index_);
- codegen_->InvokeRuntime(kQuickResolveString, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickResolveString, instruction);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
}
@@ -4656,8 +4647,7 @@ void LocationsBuilderRISCV64::VisitMonitorOperation(HMonitorOperation* instructi
void InstructionCodeGeneratorRISCV64::VisitMonitorOperation(HMonitorOperation* instruction) {
codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
- instruction,
- instruction->GetDexPc());
+ instruction);
if (instruction->IsEnter()) {
CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
} else {
@@ -4772,7 +4762,7 @@ void LocationsBuilderRISCV64::VisitNewArray(HNewArray* instruction) {
void InstructionCodeGeneratorRISCV64::VisitNewArray(HNewArray* instruction) {
QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
- codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, instruction);
CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
DCHECK(!codegen_->IsLeafMethod());
}
@@ -4786,7 +4776,7 @@ void LocationsBuilderRISCV64::VisitNewInstance(HNewInstance* instruction) {
}
void InstructionCodeGeneratorRISCV64::VisitNewInstance(HNewInstance* instruction) {
- codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
}
@@ -4981,7 +4971,7 @@ void InstructionCodeGeneratorRISCV64::VisitRem(HRem* instruction) {
case DataType::Type::kFloat64: {
QuickEntrypointEnum entrypoint =
(type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
- codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, instruction);
if (type == DataType::Type::kFloat32) {
CheckEntrypointTypes<kQuickFmodf, float, float, float>();
} else {
@@ -5079,7 +5069,7 @@ void LocationsBuilderRISCV64::VisitStringBuilderAppend(HStringBuilderAppend* ins
void InstructionCodeGeneratorRISCV64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
__ LoadConst32(A0, instruction->GetFormat()->GetValue());
- codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction);
}
void LocationsBuilderRISCV64::VisitUnresolvedInstanceFieldGet(
@@ -5288,7 +5278,7 @@ void LocationsBuilderRISCV64::VisitThrow(HThrow* instruction) {
}
void InstructionCodeGeneratorRISCV64::VisitThrow(HThrow* instruction) {
- codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickDeliverException, instruction);
CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
}
@@ -6511,7 +6501,6 @@ void CodeGeneratorRISCV64::Finalize() {
// Generate code to invoke a runtime entry point.
void CodeGeneratorRISCV64::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
@@ -7096,7 +7085,7 @@ void CodeGeneratorRISCV64::MaybeGenerateInlineCacheCheck(HInstruction* instructi
// Fast path for a monomorphic cache.
__ Beq(klass, tmp, &done);
}
- InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
+ InvokeRuntime(kQuickUpdateInlineCache, instruction);
__ Bind(&done);
} else {
// This is unexpected, but we don't guarantee stable compilation across
diff --git a/compiler/optimizing/code_generator_riscv64.h b/compiler/optimizing/code_generator_riscv64.h
index 1945cacb6f..dc88296be2 100644
--- a/compiler/optimizing/code_generator_riscv64.h
+++ b/compiler/optimizing/code_generator_riscv64.h
@@ -504,7 +504,6 @@ class CodeGeneratorRISCV64 : public CodeGenerator {
// Generate code to invoke a runtime entry point.
void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path = nullptr) override;
// Generate code to invoke a runtime entry point, but do not record
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 370d8fed8a..58d38feb8d 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -89,10 +89,7 @@ class NullCheckSlowPathX86 : public SlowPathCode {
// Live registers will be restored in the catch block if caught.
SaveLiveRegisters(codegen, instruction_->GetLocations());
}
- x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_codegen->InvokeRuntime(kQuickThrowNullPointer, instruction_, this);
CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
}
@@ -111,7 +108,7 @@ class DivZeroCheckSlowPathX86 : public SlowPathCode {
void EmitNativeCode(CodeGenerator* codegen) override {
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
__ Bind(GetEntryLabel());
- x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, this);
CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
}
@@ -213,7 +210,7 @@ class BoundsCheckSlowPathX86 : public SlowPathCode {
QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
? kQuickThrowStringBounds
: kQuickThrowArrayBounds;
- x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(entrypoint, instruction_, this);
CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
}
@@ -236,7 +233,7 @@ class SuspendCheckSlowPathX86 : public SlowPathCode {
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
- x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, this);
CheckEntrypointTypes<kQuickTestSuspend, void, void>();
RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
if (successor_ == nullptr) {
@@ -279,7 +276,7 @@ class LoadStringSlowPathX86 : public SlowPathCode {
InvokeRuntimeCallingConvention calling_convention;
const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
__ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
- x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
RestoreLiveRegisters(codegen, locations);
@@ -304,7 +301,6 @@ class LoadClassSlowPathX86 : public SlowPathCode {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
Location out = locations->Out();
- const uint32_t dex_pc = instruction_->GetDexPc();
bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
@@ -322,10 +318,10 @@ class LoadClassSlowPathX86 : public SlowPathCode {
__ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
if (cls_->NeedsAccessCheck()) {
CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
- x86_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
+ x86_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, this);
} else {
CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
- x86_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
+ x86_codegen->InvokeRuntime(kQuickResolveType, instruction_, this);
}
// If we also must_do_clinit, the resolved type is now in the correct register.
} else {
@@ -334,7 +330,7 @@ class LoadClassSlowPathX86 : public SlowPathCode {
x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), source);
}
if (must_do_clinit) {
- x86_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
+ x86_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, this);
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
}
@@ -390,17 +386,11 @@ class TypeCheckSlowPathX86 : public SlowPathCode {
Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
DataType::Type::kReference);
if (instruction_->IsInstanceOf()) {
- x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, this);
CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
} else {
DCHECK(instruction_->IsCheckCast());
- x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, this);
CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
}
@@ -437,7 +427,7 @@ class DeoptimizationSlowPathX86 : public SlowPathCode {
x86_codegen->Load32BitValue(
calling_convention.GetRegisterAt(0),
static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
- x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, this);
CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
}
@@ -476,7 +466,7 @@ class ArraySetSlowPathX86 : public SlowPathCode {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
- x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, this);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
RestoreLiveRegisters(codegen, locations);
__ jmp(GetExitLabel());
@@ -878,7 +868,7 @@ class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
__ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
}
- x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, this);
CheckEntrypointTypes<
kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
@@ -942,10 +932,7 @@ class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
InvokeRuntimeCallingConvention calling_convention;
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
- x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow, instruction_, this);
CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
@@ -976,7 +963,7 @@ class MethodEntryExitHooksSlowPathX86 : public SlowPathCode {
if (instruction_->IsMethodExitHook()) {
__ movl(EBX, Immediate(codegen->GetFrameSize()));
}
- x86_codegen->InvokeRuntime(entry_point, instruction_, instruction_->GetDexPc(), this);
+ x86_codegen->InvokeRuntime(entry_point, instruction_, this);
RestoreLiveRegisters(codegen, locations);
__ jmp(GetExitLabel());
}
@@ -1105,7 +1092,6 @@ size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32
void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
@@ -3470,12 +3456,12 @@ void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversio
break;
case DataType::Type::kFloat32:
- codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
+ codegen_->InvokeRuntime(kQuickF2l, conversion);
CheckEntrypointTypes<kQuickF2l, int64_t, float>();
break;
case DataType::Type::kFloat64:
- codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
+ codegen_->InvokeRuntime(kQuickD2l, conversion);
CheckEntrypointTypes<kQuickD2l, int64_t, double>();
break;
@@ -4309,10 +4295,10 @@ void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instr
DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
if (is_div) {
- codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickLdiv, instruction);
CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
} else {
- codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickLmod, instruction);
CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
}
break;
@@ -5208,7 +5194,7 @@ void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
}
void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
- codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
DCHECK(!codegen_->IsLeafMethod());
}
@@ -5225,7 +5211,7 @@ void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
// Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
- codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, instruction);
CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
DCHECK(!codegen_->IsLeafMethod());
}
@@ -6395,7 +6381,7 @@ void LocationsBuilderX86::VisitStringBuilderAppend(HStringBuilderAppend* instruc
void InstructionCodeGeneratorX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
__ movl(EAX, Immediate(instruction->GetFormat()->GetValue()));
- codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction);
}
void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
@@ -7723,7 +7709,7 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_S
InvokeRuntimeCallingConvention calling_convention;
DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
__ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
- codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
+ codegen_->InvokeRuntime(kQuickResolveString, load);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
}
@@ -7757,7 +7743,7 @@ void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
}
void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
- codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickDeliverException, instruction);
CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
}
@@ -8390,10 +8376,8 @@ void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction)
}
void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
- codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
- : kQuickUnlockObject,
- instruction,
- instruction->GetDexPc());
+ codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
+ instruction);
if (instruction->IsEnter()) {
CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
} else {
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index fae6c7f801..2c145b5133 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -436,7 +436,6 @@ class CodeGeneratorX86 : public CodeGenerator {
// Generate code to invoke a runtime entry point.
void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path = nullptr) override;
// Generate code to invoke a runtime entry point, but do not record
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 4c180be5fb..9133f87c7b 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -89,10 +89,7 @@ class NullCheckSlowPathX86_64 : public SlowPathCode {
// Live registers will be restored in the catch block if caught.
SaveLiveRegisters(codegen, instruction_->GetLocations());
}
- x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer, instruction_, this);
CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
}
@@ -111,7 +108,7 @@ class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
void EmitNativeCode(CodeGenerator* codegen) override {
CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
__ Bind(GetEntryLabel());
- x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
+ x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, this);
CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
}
@@ -167,7 +164,7 @@ class SuspendCheckSlowPathX86_64 : public SlowPathCode {
CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
- x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
+ x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, this);
CheckEntrypointTypes<kQuickTestSuspend, void, void>();
RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
if (successor_ == nullptr) {
@@ -257,7 +254,7 @@ class BoundsCheckSlowPathX86_64 : public SlowPathCode {
QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
? kQuickThrowStringBounds
: kQuickThrowArrayBounds;
- x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
+ x86_64_codegen->InvokeRuntime(entrypoint, instruction_, this);
CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
}
@@ -285,10 +282,7 @@ class LoadMethodTypeSlowPathX86_64: public SlowPathCode {
const dex::ProtoIndex proto_index = instruction_->AsLoadMethodType()->GetProtoIndex();
// Custom calling convention: RAX serves as both input and output.
__ movl(CpuRegister(RAX), Immediate(proto_index.index_));
- x86_64_codegen->InvokeRuntime(kQuickResolveMethodType,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_64_codegen->InvokeRuntime(kQuickResolveMethodType, instruction_, this);
CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>();
x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
RestoreLiveRegisters(codegen, locations);
@@ -313,7 +307,6 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
Location out = locations->Out();
- const uint32_t dex_pc = instruction_->GetDexPc();
bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
@@ -331,10 +324,10 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
__ movl(CpuRegister(RAX), Immediate(type_index.index_));
if (cls_->NeedsAccessCheck()) {
CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
- x86_64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
+ x86_64_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, this);
} else {
CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
- x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
+ x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, this);
}
// If we also must_do_clinit, the resolved type is now in the correct register.
} else {
@@ -343,7 +336,7 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
}
if (must_do_clinit) {
- x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
+ x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, this);
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
}
@@ -381,10 +374,7 @@ class LoadStringSlowPathX86_64 : public SlowPathCode {
const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
// Custom calling convention: RAX serves as both input and output.
__ movl(CpuRegister(RAX), Immediate(string_index.index_));
- x86_64_codegen->InvokeRuntime(kQuickResolveString,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_64_codegen->InvokeRuntime(kQuickResolveString, instruction_, this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
RestoreLiveRegisters(codegen, locations);
@@ -405,7 +395,6 @@ class TypeCheckSlowPathX86_64 : public SlowPathCode {
void EmitNativeCode(CodeGenerator* codegen) override {
LocationSummary* locations = instruction_->GetLocations();
- uint32_t dex_pc = instruction_->GetDexPc();
DCHECK(instruction_->IsCheckCast()
|| !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
@@ -433,11 +422,11 @@ class TypeCheckSlowPathX86_64 : public SlowPathCode {
Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
DataType::Type::kReference);
if (instruction_->IsInstanceOf()) {
- x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
+ x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, this);
CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
} else {
DCHECK(instruction_->IsCheckCast());
- x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
+ x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, this);
CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
}
@@ -475,7 +464,7 @@ class DeoptimizationSlowPathX86_64 : public SlowPathCode {
x86_64_codegen->Load32BitValue(
CpuRegister(calling_convention.GetRegisterAt(0)),
static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
- x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
+ x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, this);
CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
}
@@ -514,7 +503,7 @@ class ArraySetSlowPathX86_64 : public SlowPathCode {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
- x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
+ x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, this);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
RestoreLiveRegisters(codegen, locations);
__ jmp(GetExitLabel());
@@ -924,10 +913,7 @@ class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
__ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
}
- x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, this);
CheckEntrypointTypes<
kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
@@ -992,10 +978,7 @@ class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
InvokeRuntimeCallingConvention calling_convention;
CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
- x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
- instruction_,
- instruction_->GetDexPc(),
- this);
+ x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow, instruction_, this);
CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
@@ -1028,7 +1011,7 @@ class MethodEntryExitHooksSlowPathX86_64 : public SlowPathCode {
// Load FrameSize to pass to the exit hook.
__ movq(CpuRegister(R8), Immediate(codegen->GetFrameSize()));
}
- x86_64_codegen->InvokeRuntime(entry_point, instruction_, instruction_->GetDexPc(), this);
+ x86_64_codegen->InvokeRuntime(entry_point, instruction_, this);
RestoreLiveRegisters(codegen, locations);
__ jmp(GetExitLabel());
}
@@ -1562,7 +1545,6 @@ size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uin
void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- [[maybe_unused]] uint32_t dex_pc,
SlowPathCode* slow_path) {
ValidateInvokeRuntime(entrypoint, instruction, slow_path);
GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
@@ -5171,7 +5153,7 @@ void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
}
void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
- codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
DCHECK(!codegen_->IsLeafMethod());
}
@@ -5188,7 +5170,7 @@ void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
// Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
- codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(entrypoint, instruction);
CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
DCHECK(!codegen_->IsLeafMethod());
}
@@ -5711,7 +5693,7 @@ void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* inst
void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
__ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
- codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction);
}
void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
@@ -7071,9 +7053,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREA
// Custom calling convention: RAX serves as both input and output.
__ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
- codegen_->InvokeRuntime(kQuickResolveString,
- load,
- load->GetDexPc());
+ codegen_->InvokeRuntime(kQuickResolveString, load);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
}
@@ -7108,7 +7088,7 @@ void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
}
void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
- codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
+ codegen_->InvokeRuntime(kQuickDeliverException, instruction);
CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
}
@@ -7757,8 +7737,7 @@ void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instructio
void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
- instruction,
- instruction->GetDexPc());
+ instruction);
if (instruction->IsEnter()) {
CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
} else {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 3024116402..8a514b21f0 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -415,7 +415,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
// Generate code to invoke a runtime entry point.
void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
- uint32_t dex_pc,
SlowPathCode* slow_path = nullptr) override;
// Generate code to invoke a runtime entry point, but do not record
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index db8d6cac05..31e617baec 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -184,8 +184,7 @@ class InvokePolymorphicSlowPathARM64 : public SlowPathCodeARM64 {
// Passing `MethodHandle` object as hidden argument.
__ Mov(w0, method_handle_.W());
codegen->InvokeRuntime(QuickEntrypointEnum::kQuickInvokePolymorphicWithHiddenReceiver,
- instruction_,
- instruction_->GetDexPc());
+ instruction_);
RestoreLiveRegisters(codegen, instruction_->GetLocations());
__ B(GetExitLabel());
@@ -2476,7 +2475,7 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke,
__ Mov(tmp_reg, 0);
}
- codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
+ codegen->InvokeRuntime(kQuickIndexOf, invoke, slow_path);
CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
if (slow_path != nullptr) {
@@ -2540,7 +2539,7 @@ void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke)
codegen_->AddSlowPath(slow_path);
__ B(eq, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, slow_path);
CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
__ Bind(slow_path->GetExitLabel());
}
@@ -2562,7 +2561,7 @@ void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke)
// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
//
// all include a null check on `data` before calling that method.
- codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke);
CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
}
@@ -2585,7 +2584,7 @@ void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke
codegen_->AddSlowPath(slow_path);
__ B(eq, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, slow_path);
CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
__ Bind(slow_path->GetExitLabel());
}
@@ -2637,7 +2636,7 @@ static void CreateFPFPFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke
static void GenFPToFPCall(HInvoke* invoke,
CodeGeneratorARM64* codegen,
QuickEntrypointEnum entry) {
- codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
+ codegen->InvokeRuntime(entry, invoke);
}
void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) {
@@ -3669,7 +3668,7 @@ void IntrinsicCodeGeneratorARM64::HandleValueOf(HInvoke* invoke,
auto allocate_instance = [&]() {
DCHECK(out.X().Is(InvokeRuntimeCallingConvention().GetRegisterAt(0)));
codegen_->LoadIntrinsicDeclaringClass(out, invoke);
- codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
};
if (invoke->InputAt(0)->IsIntConstant()) {
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index db7457a68b..a6f6eb0ba0 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -1081,7 +1081,7 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke,
__ Mov(tmp_reg, 0);
}
- codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
+ codegen->InvokeRuntime(kQuickIndexOf, invoke, slow_path);
CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
if (slow_path != nullptr) {
@@ -1143,7 +1143,7 @@ void IntrinsicCodeGeneratorARMVIXL::VisitStringNewStringFromBytes(HInvoke* invok
codegen_->AddSlowPath(slow_path);
__ B(eq, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, slow_path);
CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
__ Bind(slow_path->GetExitLabel());
}
@@ -1165,7 +1165,7 @@ void IntrinsicCodeGeneratorARMVIXL::VisitStringNewStringFromChars(HInvoke* invok
// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
//
// all include a null check on `data` before calling that method.
- codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke);
CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
}
@@ -1186,7 +1186,7 @@ void IntrinsicCodeGeneratorARMVIXL::VisitStringNewStringFromString(HInvoke* invo
codegen_->AddSlowPath(slow_path);
__ B(eq, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, slow_path);
CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
__ Bind(slow_path->GetExitLabel());
@@ -1666,7 +1666,7 @@ static void GenFPToFPCall(HInvoke* invoke,
__ Vmov(RegisterFrom(locations->GetTemp(0)),
RegisterFrom(locations->GetTemp(1)),
InputDRegisterAt(invoke, 0));
- codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
+ codegen->InvokeRuntime(entry, invoke);
__ Vmov(OutputDRegister(invoke),
RegisterFrom(locations->GetTemp(0)),
RegisterFrom(locations->GetTemp(1)));
@@ -1688,7 +1688,7 @@ static void GenFPFPToFPCall(HInvoke* invoke,
__ Vmov(RegisterFrom(locations->GetTemp(2)),
RegisterFrom(locations->GetTemp(3)),
InputDRegisterAt(invoke, 1));
- codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
+ codegen->InvokeRuntime(entry, invoke);
__ Vmov(OutputDRegister(invoke),
RegisterFrom(locations->GetTemp(0)),
RegisterFrom(locations->GetTemp(1)));
@@ -2338,7 +2338,7 @@ void IntrinsicCodeGeneratorARMVIXL::HandleValueOf(HInvoke* invoke,
auto allocate_instance = [&]() {
DCHECK(out.Is(InvokeRuntimeCallingConventionARMVIXL().GetRegisterAt(0)));
codegen_->LoadIntrinsicDeclaringClass(out, invoke);
- codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
};
if (invoke->InputAt(0)->IsIntConstant()) {
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index c750633450..cc0f114c56 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -109,8 +109,7 @@ class InvokePolymorphicSlowPathRISCV64 : public SlowPathCodeRISCV64 {
// Passing `MethodHandle` object as hidden argument.
__ Mv(A0, method_handle_);
codegen->InvokeRuntime(QuickEntrypointEnum::kQuickInvokePolymorphicWithHiddenReceiver,
- instruction_,
- instruction_->GetDexPc());
+ instruction_);
RestoreLiveRegisters(codegen, instruction_->GetLocations());
__ J(GetExitLabel());
@@ -731,7 +730,7 @@ void IntrinsicCodeGeneratorRISCV64::HandleValueOf(HInvoke* invoke,
auto allocate_instance = [&]() {
DCHECK_EQ(out, InvokeRuntimeCallingConvention().GetRegisterAt(0));
codegen_->LoadIntrinsicDeclaringClass(out, invoke);
- codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
};
if (invoke->InputAt(0)->IsIntConstant()) {
@@ -946,7 +945,7 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke,
__ Li(tmp_reg, 0);
}
- codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
+ codegen->InvokeRuntime(kQuickIndexOf, invoke, slow_path);
CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
if (slow_path != nullptr) {
@@ -1009,7 +1008,7 @@ void IntrinsicCodeGeneratorRISCV64::VisitStringNewStringFromBytes(HInvoke* invok
codegen_->AddSlowPath(slow_path);
__ Beqz(byte_array, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, slow_path);
CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
__ Bind(slow_path->GetExitLabel());
}
@@ -1031,7 +1030,7 @@ void IntrinsicCodeGeneratorRISCV64::VisitStringNewStringFromChars(HInvoke* invok
// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
//
// all include a null check on `data` before calling that method.
- codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke);
CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
}
@@ -1053,7 +1052,7 @@ void IntrinsicCodeGeneratorRISCV64::VisitStringNewStringFromString(HInvoke* invo
codegen_->AddSlowPath(slow_path);
__ Beqz(string_to_copy, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, slow_path);
CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
__ Bind(slow_path->GetExitLabel());
}
@@ -5224,7 +5223,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathCos(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathCos(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickCos, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickCos, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathSin(HInvoke* invoke) {
@@ -5232,7 +5231,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathSin(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathSin(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickSin, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickSin, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathAcos(HInvoke* invoke) {
@@ -5240,7 +5239,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathAcos(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathAcos(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickAcos, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAcos, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathAsin(HInvoke* invoke) {
@@ -5248,7 +5247,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathAsin(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathAsin(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickAsin, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAsin, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathAtan(HInvoke* invoke) {
@@ -5256,7 +5255,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathAtan(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathAtan(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickAtan, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAtan, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathAtan2(HInvoke* invoke) {
@@ -5264,7 +5263,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathAtan2(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathAtan2(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickAtan2, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAtan2, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathPow(HInvoke* invoke) {
@@ -5272,7 +5271,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathPow(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathPow(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickPow, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickPow, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathCbrt(HInvoke* invoke) {
@@ -5280,7 +5279,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathCbrt(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathCbrt(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickCbrt, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickCbrt, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathCosh(HInvoke* invoke) {
@@ -5288,7 +5287,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathCosh(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathCosh(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickCosh, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickCosh, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathExp(HInvoke* invoke) {
@@ -5296,7 +5295,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathExp(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathExp(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickExp, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickExp, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathExpm1(HInvoke* invoke) {
@@ -5304,7 +5303,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathExpm1(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathExpm1(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickExpm1, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickExpm1, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathHypot(HInvoke* invoke) {
@@ -5312,7 +5311,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathHypot(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathHypot(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickHypot, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickHypot, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathLog(HInvoke* invoke) {
@@ -5320,7 +5319,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathLog(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathLog(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickLog, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickLog, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathLog10(HInvoke* invoke) {
@@ -5328,7 +5327,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathLog10(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathLog10(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickLog10, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickLog10, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathNextAfter(HInvoke* invoke) {
@@ -5336,7 +5335,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathNextAfter(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathNextAfter(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickNextAfter, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickNextAfter, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathSinh(HInvoke* invoke) {
@@ -5344,7 +5343,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathSinh(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathSinh(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickSinh, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickSinh, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathTan(HInvoke* invoke) {
@@ -5352,7 +5351,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathTan(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathTan(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickTan, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickTan, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathTanh(HInvoke* invoke) {
@@ -5360,7 +5359,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitMathTanh(HInvoke* invoke) {
}
void IntrinsicCodeGeneratorRISCV64::VisitMathTanh(HInvoke* invoke) {
- codegen_->InvokeRuntime(kQuickTanh, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickTanh, invoke);
}
void IntrinsicLocationsBuilderRISCV64::VisitMathSqrt(HInvoke* invoke) {
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 3f021d9aef..5a6b8832c4 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -471,7 +471,7 @@ static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntry
}
// Now do the actual call.
- codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
+ codegen->InvokeRuntime(entry, invoke);
// Extract the return value from the FP stack.
__ fstpl(Address(ESP, 0));
@@ -1012,7 +1012,7 @@ void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
codegen_->AddSlowPath(slow_path);
__ j(kEqual, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, slow_path);
__ Bind(slow_path->GetExitLabel());
}
@@ -1351,7 +1351,7 @@ void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
codegen_->AddSlowPath(slow_path);
__ j(kEqual, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke);
CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
__ Bind(slow_path->GetExitLabel());
}
@@ -1373,7 +1373,7 @@ void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
//
// all include a null check on `data` before calling that method.
- codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke);
CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
}
@@ -1395,7 +1395,7 @@ void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke)
codegen_->AddSlowPath(slow_path);
__ j(kEqual, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke);
CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
__ Bind(slow_path->GetExitLabel());
}
@@ -3606,7 +3606,7 @@ void IntrinsicCodeGeneratorX86::HandleValueOf(HInvoke* invoke,
auto allocate_instance = [&]() {
DCHECK_EQ(out, InvokeRuntimeCallingConvention().GetRegisterAt(0));
codegen_->LoadIntrinsicDeclaringClass(out, invoke->AsInvokeStaticOrDirect());
- codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
};
if (invoke->InputAt(0)->IsIntConstant()) {
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 14c1ef90aa..281f196f06 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -166,8 +166,7 @@ class InvokePolymorphicSlowPathX86_64 : public SlowPathCode {
// Passing `MethodHandle` object as hidden argument.
__ movl(CpuRegister(RDI), method_handle_);
x86_64_codegen->InvokeRuntime(QuickEntrypointEnum::kQuickInvokePolymorphicWithHiddenReceiver,
- instruction_,
- instruction_->GetDexPc());
+ instruction_);
RestoreLiveRegisters(codegen, instruction_->GetLocations());
__ jmp(GetExitLabel());
@@ -520,7 +519,7 @@ static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86_64* codegen,
DCHECK(locations->WillCall());
DCHECK(invoke->IsInvokeStaticOrDirect());
- codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
+ codegen->InvokeRuntime(entry, invoke);
}
void IntrinsicLocationsBuilderX86_64::VisitMathCos(HInvoke* invoke) {
@@ -1240,7 +1239,7 @@ void IntrinsicCodeGeneratorX86_64::VisitStringCompareTo(HInvoke* invoke) {
codegen_->AddSlowPath(slow_path);
__ j(kEqual, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
+ codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, slow_path);
__ Bind(slow_path->GetExitLabel());
}
@@ -1565,7 +1564,7 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromBytes(HInvoke* invoke
codegen_->AddSlowPath(slow_path);
__ j(kEqual, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke);
CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
__ Bind(slow_path->GetExitLabel());
}
@@ -1587,7 +1586,7 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromChars(HInvoke* invoke
// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
//
// all include a null check on `data` before calling that method.
- codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke);
CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
}
@@ -1609,7 +1608,7 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromString(HInvoke* invok
codegen_->AddSlowPath(slow_path);
__ j(kEqual, slow_path->GetEntryLabel());
- codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke);
CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
__ Bind(slow_path->GetExitLabel());
}
@@ -3416,7 +3415,7 @@ void IntrinsicCodeGeneratorX86_64::HandleValueOf(HInvoke* invoke,
CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0));
auto allocate_instance = [&]() {
codegen_->LoadIntrinsicDeclaringClass(argument, invoke);
- codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
+ codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke);
CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
};
if (invoke->InputAt(0)->IsIntConstant()) {