summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Almaz Mingaleev <mingaleev@google.com> 2024-09-23 13:26:27 +0000
committer Almaz Mingaleev <mingaleev@google.com> 2024-09-24 07:59:55 +0000
commit68df0adae73f0258782986fc9ca1f3f82cf084bd (patch)
treeeab63911e4dcaf8ff2a4bea3090042f1d55f1f7f /compiler/optimizing
parent8a5a51adddb5f1055934cd72e47be20fc0be180b (diff)
x86_64: Handle invoke-static in invokeExact fast path.
Bug: 297147201 Test: ./art/test/testrunner/testrunner.py --host --64 --optimizing -b Test: ./art/test.py --host -g Change-Id: Ia49285c116e4abfc3da2d78495f85d9131f111b5
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/instruction_builder.cc11
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc94
-rw-r--r--compiler/optimizing/nodes.h11
3 files changed, 56 insertions, 60 deletions
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 55e3267427..a679f88a58 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -1394,14 +1394,9 @@ bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
// MethodHandle.invokeExact intrinsic needs to check whether call-site matches with MethodHandle's
// type. To do that, MethodType corresponding to the call-site is passed as an extra input.
// Other invoke-polymorphic calls do not need it.
- bool is_invoke_exact =
+ bool can_be_intrinsified =
static_cast<Intrinsics>(resolved_method->GetIntrinsic()) ==
Intrinsics::kMethodHandleInvokeExact;
- // Currently intrinsic works for MethodHandle targeting invoke-virtual calls only.
- bool can_be_virtual = number_of_arguments >= 2 &&
- DataType::FromShorty(shorty[1]) == DataType::Type::kReference;
-
- bool can_be_intrinsified = is_invoke_exact && can_be_virtual;
uint32_t number_of_other_inputs = can_be_intrinsified ? 1u : 0u;
@@ -1418,7 +1413,7 @@ bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
return false;
}
- DCHECK_EQ(invoke->AsInvokePolymorphic()->CanHaveFastPath(), can_be_intrinsified);
+ DCHECK_EQ(invoke->AsInvokePolymorphic()->IsMethodHandleInvokeExact(), can_be_intrinsified);
if (invoke->GetIntrinsic() != Intrinsics::kNone &&
invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvoke &&
@@ -1902,7 +1897,7 @@ bool HInstructionBuilder::SetupInvokeArguments(HInstruction* invoke,
// MethodHandle.invokeExact intrinsic expects MethodType corresponding to the call-site as an
// extra input to determine whether to throw WrongMethodTypeException or execute target method.
- if (invoke_polymorphic->CanHaveFastPath()) {
+ if (invoke_polymorphic->IsMethodHandleInvokeExact()) {
HLoadMethodType* load_method_type =
new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(),
invoke_polymorphic->GetProtoIndex(),
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index bd6d6d1889..09074f39df 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -4099,12 +4099,6 @@ static void GenerateVarHandleGet(HInvoke* invoke,
}
void IntrinsicLocationsBuilderX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke) {
- // Don't emit intrinsic code for MethodHandle.invokeExact when it certainly does not target
- // invoke-virtual: if invokeExact is called w/o arguments or if the first argument in that
- // call is not a reference.
- if (!invoke->AsInvokePolymorphic()->CanHaveFastPath()) {
- return;
- }
ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
LocationSummary* locations = new (allocator)
LocationSummary(invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
@@ -4127,7 +4121,6 @@ void IntrinsicLocationsBuilderX86_64::VisitMethodHandleInvokeExact(HInvoke* invo
}
void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke) {
- DCHECK(invoke->AsInvokePolymorphic()->CanHaveFastPath());
LocationSummary* locations = invoke->GetLocations();
CpuRegister method_handle = locations->InAt(0).AsRegister<CpuRegister>();
@@ -4137,17 +4130,6 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
codegen_->AddSlowPath(slow_path);
X86_64Assembler* assembler = codegen_->GetAssembler();
- Address method_handle_kind = Address(method_handle, mirror::MethodHandle::HandleKindOffset());
-
- // If it is not InvokeVirtual then go to slow path.
- // Even if MethodHandle's kind is kInvokeVirtual, the underlying method can still be an interface
- // or a direct method (that's what current `MethodHandles$Lookup.findVirtual` is doing). We don't
- // check whether `method` is an interface method explicitly: in that case the subtype check below
- // will fail.
- // TODO(b/297147201): check whether it can be more precise and what d8/r8 can produce.
- __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::Kind::kInvokeVirtual));
- __ j(kNotEqual, slow_path->GetEntryLabel());
-
CpuRegister call_site_type =
locations->InAt(invoke->GetNumberOfArguments()).AsRegister<CpuRegister>();
@@ -4156,40 +4138,56 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
__ j(kNotEqual, slow_path->GetEntryLabel());
CpuRegister method = CpuRegister(kMethodRegisterArgument);
-
- // Get method to call.
__ movq(method, Address(method_handle, mirror::MethodHandle::ArtFieldOrMethodOffset()));
- CpuRegister receiver = locations->InAt(1).AsRegister<CpuRegister>();
+ Label static_dispatch;
+ Label execute_target_method;
- __ testl(receiver, receiver);
- __ j(kEqual, slow_path->GetEntryLabel());
+ Address method_handle_kind = Address(method_handle, mirror::MethodHandle::HandleKindOffset());
+ if (invoke->AsInvokePolymorphic()->CanTargetInstanceMethod()) {
+ // Handle invoke-virtual case.
+ // Even if MethodHandle's kind is kInvokeVirtual, the underlying method can still be an
+ // interface or a direct method (that's what current `MethodHandles$Lookup.findVirtual` is
+ // doing). We don't check whether `method` is an interface method explicitly: in that case the
+ // subtype check below will fail.
+ // TODO(b/297147201): check whether it can be more precise and what d8/r8 can produce.
+ __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::Kind::kInvokeVirtual));
+ __ j(kNotEqual, &static_dispatch);
+ CpuRegister receiver = locations->InAt(1).AsRegister<CpuRegister>();
+
+ __ testl(receiver, receiver);
+ __ j(kEqual, slow_path->GetEntryLabel());
- // Using vtable_index register as temporary in subtype check. It will be overridden later.
- // If `method` is an interface method this check will fail.
- CpuRegister vtable_index = locations->GetTemp(0).AsRegister<CpuRegister>();
- // We deliberately avoid the read barrier, letting the slow path handle the false negatives.
- GenerateSubTypeObjectCheckNoReadBarrier(codegen_,
- slow_path,
- receiver,
- vtable_index,
- Address(method, ArtMethod::DeclaringClassOffset()));
-
- NearLabel execute_target_method;
- // Skip virtual dispatch if `method` is private.
- __ testl(Address(method, ArtMethod::AccessFlagsOffset()), Immediate(kAccPrivate));
- __ j(kNotZero, &execute_target_method);
-
- // MethodIndex is uint16_t.
- __ movzxw(vtable_index, Address(method, ArtMethod::MethodIndexOffset()));
-
- constexpr uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
- // Re-using method register for receiver class.
- __ movl(method, Address(receiver, class_offset));
-
- constexpr uint32_t vtable_offset =
- mirror::Class::EmbeddedVTableOffset(art::PointerSize::k64).Int32Value();
- __ movq(method, Address(method, vtable_index, TIMES_8, vtable_offset));
+ // Skip virtual dispatch if `method` is private.
+ __ testl(Address(method, ArtMethod::AccessFlagsOffset()), Immediate(kAccPrivate));
+ __ j(kNotZero, &execute_target_method);
+
+ // Using vtable_index register as temporary in subtype check. It will be overridden later.
+ // If `method` is an interface method this check will fail.
+ CpuRegister vtable_index = locations->GetTemp(0).AsRegister<CpuRegister>();
+ // We deliberately avoid the read barrier, letting the slow path handle the false negatives.
+ GenerateSubTypeObjectCheckNoReadBarrier(codegen_,
+ slow_path,
+ receiver,
+ vtable_index,
+ Address(method, ArtMethod::DeclaringClassOffset()));
+
+ // MethodIndex is uint16_t.
+ __ movzxw(vtable_index, Address(method, ArtMethod::MethodIndexOffset()));
+
+ constexpr uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ // Re-using method register for receiver class.
+ __ movl(method, Address(receiver, class_offset));
+
+ constexpr uint32_t vtable_offset =
+ mirror::Class::EmbeddedVTableOffset(art::PointerSize::k64).Int32Value();
+ __ movq(method, Address(method, vtable_index, TIMES_8, vtable_offset));
+ __ Jump(&execute_target_method);
+ }
+ __ Bind(&static_dispatch);
+ __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::Kind::kInvokeStatic));
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ // MH's kind is invoke-static. The method can be called directly, hence fall-through.
__ Bind(&execute_target_method);
__ call(Address(
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 2477c9fe51..fd00a5e2fe 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -4940,10 +4940,13 @@ class HInvokePolymorphic final : public HInvoke {
dex::ProtoIndex GetProtoIndex() { return proto_idx_; }
- // Whether we can do direct invocation of the method handle.
- bool CanHaveFastPath() const {
- return GetIntrinsic() == Intrinsics::kMethodHandleInvokeExact &&
- GetNumberOfArguments() >= 2 &&
+ bool IsMethodHandleInvokeExact() const {
+ return GetIntrinsic() == Intrinsics::kMethodHandleInvokeExact;
+ }
+
+ bool CanTargetInstanceMethod() const {
+ DCHECK(IsMethodHandleInvokeExact());
+ return GetNumberOfArguments() >= 2 &&
InputAt(1)->GetType() == DataType::Type::kReference;
}