summaryrefslogtreecommitdiff
path: root/compiler/optimizing/intrinsics_x86_64.cc
diff options
context:
space:
mode:
author Almaz Mingaleev <mingaleev@google.com> 2024-09-23 13:26:27 +0000
committer Almaz Mingaleev <mingaleev@google.com> 2024-09-24 07:59:55 +0000
commit68df0adae73f0258782986fc9ca1f3f82cf084bd (patch)
treeeab63911e4dcaf8ff2a4bea3090042f1d55f1f7f /compiler/optimizing/intrinsics_x86_64.cc
parent8a5a51adddb5f1055934cd72e47be20fc0be180b (diff)
x86_64: Handle invoke-static in invokeExact fast path.
Bug: 297147201 Test: ./art/test/testrunner/testrunner.py --host --64 --optimizing -b Test: ./art/test.py --host -g Change-Id: Ia49285c116e4abfc3da2d78495f85d9131f111b5
Diffstat (limited to 'compiler/optimizing/intrinsics_x86_64.cc')
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc94
1 files changed, 46 insertions, 48 deletions
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index bd6d6d1889..09074f39df 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -4099,12 +4099,6 @@ static void GenerateVarHandleGet(HInvoke* invoke,
}
void IntrinsicLocationsBuilderX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke) {
- // Don't emit intrinsic code for MethodHandle.invokeExact when it certainly does not target
- // invoke-virtual: if invokeExact is called w/o arguments or if the first argument in that
- // call is not a reference.
- if (!invoke->AsInvokePolymorphic()->CanHaveFastPath()) {
- return;
- }
ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
LocationSummary* locations = new (allocator)
LocationSummary(invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
@@ -4127,7 +4121,6 @@ void IntrinsicLocationsBuilderX86_64::VisitMethodHandleInvokeExact(HInvoke* invo
}
void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke) {
- DCHECK(invoke->AsInvokePolymorphic()->CanHaveFastPath());
LocationSummary* locations = invoke->GetLocations();
CpuRegister method_handle = locations->InAt(0).AsRegister<CpuRegister>();
@@ -4137,17 +4130,6 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
codegen_->AddSlowPath(slow_path);
X86_64Assembler* assembler = codegen_->GetAssembler();
- Address method_handle_kind = Address(method_handle, mirror::MethodHandle::HandleKindOffset());
-
- // If it is not InvokeVirtual then go to slow path.
- // Even if MethodHandle's kind is kInvokeVirtual, the underlying method can still be an interface
- // or a direct method (that's what current `MethodHandles$Lookup.findVirtual` is doing). We don't
- // check whether `method` is an interface method explicitly: in that case the subtype check below
- // will fail.
- // TODO(b/297147201): check whether it can be more precise and what d8/r8 can produce.
- __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::Kind::kInvokeVirtual));
- __ j(kNotEqual, slow_path->GetEntryLabel());
-
CpuRegister call_site_type =
locations->InAt(invoke->GetNumberOfArguments()).AsRegister<CpuRegister>();
@@ -4156,40 +4138,56 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
__ j(kNotEqual, slow_path->GetEntryLabel());
CpuRegister method = CpuRegister(kMethodRegisterArgument);
-
- // Get method to call.
__ movq(method, Address(method_handle, mirror::MethodHandle::ArtFieldOrMethodOffset()));
- CpuRegister receiver = locations->InAt(1).AsRegister<CpuRegister>();
+ Label static_dispatch;
+ Label execute_target_method;
- __ testl(receiver, receiver);
- __ j(kEqual, slow_path->GetEntryLabel());
+ Address method_handle_kind = Address(method_handle, mirror::MethodHandle::HandleKindOffset());
+ if (invoke->AsInvokePolymorphic()->CanTargetInstanceMethod()) {
+ // Handle invoke-virtual case.
+ // Even if MethodHandle's kind is kInvokeVirtual, the underlying method can still be an
+ // interface or a direct method (that's what current `MethodHandles$Lookup.findVirtual` is
+ // doing). We don't check whether `method` is an interface method explicitly: in that case the
+ // subtype check below will fail.
+ // TODO(b/297147201): check whether it can be more precise and what d8/r8 can produce.
+ __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::Kind::kInvokeVirtual));
+ __ j(kNotEqual, &static_dispatch);
+ CpuRegister receiver = locations->InAt(1).AsRegister<CpuRegister>();
+
+ __ testl(receiver, receiver);
+ __ j(kEqual, slow_path->GetEntryLabel());
- // Using vtable_index register as temporary in subtype check. It will be overridden later.
- // If `method` is an interface method this check will fail.
- CpuRegister vtable_index = locations->GetTemp(0).AsRegister<CpuRegister>();
- // We deliberately avoid the read barrier, letting the slow path handle the false negatives.
- GenerateSubTypeObjectCheckNoReadBarrier(codegen_,
- slow_path,
- receiver,
- vtable_index,
- Address(method, ArtMethod::DeclaringClassOffset()));
-
- NearLabel execute_target_method;
- // Skip virtual dispatch if `method` is private.
- __ testl(Address(method, ArtMethod::AccessFlagsOffset()), Immediate(kAccPrivate));
- __ j(kNotZero, &execute_target_method);
-
- // MethodIndex is uint16_t.
- __ movzxw(vtable_index, Address(method, ArtMethod::MethodIndexOffset()));
-
- constexpr uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
- // Re-using method register for receiver class.
- __ movl(method, Address(receiver, class_offset));
-
- constexpr uint32_t vtable_offset =
- mirror::Class::EmbeddedVTableOffset(art::PointerSize::k64).Int32Value();
- __ movq(method, Address(method, vtable_index, TIMES_8, vtable_offset));
+ // Skip virtual dispatch if `method` is private.
+ __ testl(Address(method, ArtMethod::AccessFlagsOffset()), Immediate(kAccPrivate));
+ __ j(kNotZero, &execute_target_method);
+
+ // Using vtable_index register as temporary in subtype check. It will be overridden later.
+ // If `method` is an interface method this check will fail.
+ CpuRegister vtable_index = locations->GetTemp(0).AsRegister<CpuRegister>();
+ // We deliberately avoid the read barrier, letting the slow path handle the false negatives.
+ GenerateSubTypeObjectCheckNoReadBarrier(codegen_,
+ slow_path,
+ receiver,
+ vtable_index,
+ Address(method, ArtMethod::DeclaringClassOffset()));
+
+ // MethodIndex is uint16_t.
+ __ movzxw(vtable_index, Address(method, ArtMethod::MethodIndexOffset()));
+
+ constexpr uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ // Re-using method register for receiver class.
+ __ movl(method, Address(receiver, class_offset));
+
+ constexpr uint32_t vtable_offset =
+ mirror::Class::EmbeddedVTableOffset(art::PointerSize::k64).Int32Value();
+ __ movq(method, Address(method, vtable_index, TIMES_8, vtable_offset));
+ __ Jump(&execute_target_method);
+ }
+ __ Bind(&static_dispatch);
+ __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::Kind::kInvokeStatic));
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ // MH's kind is invoke-static. The method can be called directly, hence fall-through.
__ Bind(&execute_target_method);
__ call(Address(