summaryrefslogtreecommitdiff
path: root/compiler/optimizing/intrinsics_x86_64.cc
diff options
context:
space:
mode:
author Almaz Mingaleev <mingaleev@google.com> 2024-10-30 15:35:14 +0000
committer Almaz Mingaleev <mingaleev@google.com> 2024-11-06 16:24:17 +0000
commitcc22b57fd30005bdb03495412408a56c7ba08fb6 (patch)
treed46d259e79dba7d6a481a579fd9420b6c2d8d451 /compiler/optimizing/intrinsics_x86_64.cc
parent52f22d080cc077eaf11a1c32bb07f87339343cd4 (diff)
Don't virtual dispatch non-copied method defined in an interface.
Following what Class::FindVirtualMethodForVirtualOrInterface does. Currently findVirtual sets MH's kind to INVOKE_INTERFACE only if class on which lookup is done is interface. Bug: 297147201 Test: ./art/test/testrunner/testrunner.py -b --host --64 Change-Id: Ib2576f317f7932111a05e464334de69a00e24ca1
Diffstat (limited to 'compiler/optimizing/intrinsics_x86_64.cc')
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc26
1 files changed, 22 insertions, 4 deletions
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 38954c8fa1..5e7c3a9611 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -163,7 +163,7 @@ class InvokePolymorphicSlowPathX86_64 : public SlowPathCode {
SaveLiveRegisters(codegen, instruction_->GetLocations());
// Passing `MethodHandle` object as hidden argument.
- __ movq(CpuRegister(RDI), method_handle_);
+ __ movl(CpuRegister(RDI), method_handle_);
x86_64_codegen->InvokeRuntime(QuickEntrypointEnum::kQuickInvokePolymorphicWithHiddenReceiver,
instruction_,
instruction_->GetDexPc());
@@ -4279,10 +4279,28 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
__ testl(Address(method, ArtMethod::AccessFlagsOffset()), Immediate(kAccPrivate));
__ j(kNotZero, &execute_target_method);
- CpuRegister vtable_index = locations->GetTemp(0).AsRegister<CpuRegister>();
+ Label do_virtual_dispatch;
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+
+ __ movl(temp, Address(method, ArtMethod::DeclaringClassOffset()));
+ __ cmpl(temp, Address(receiver, mirror::Object::ClassOffset()));
+ // If method is defined in the receiver's class, execute it as it is.
+ __ j(kEqual, &execute_target_method);
+
+ __ testl(Address(temp, mirror::Class::AccessFlagsOffset()), Immediate(kAccInterface));
+ // If `method`'s declaring class is not an interface, do virtual dispatch.
+ __ j(kZero, &do_virtual_dispatch);
+
+ __ movl(temp, Address(method, ArtMethod::AccessFlagsOffset()));
+ // These flags are uint32_t and their signed value doesn't fit into int32_t (see b/377275405).
+ __ andl(temp, Immediate(bit_cast<int32_t, uint32_t>(kAccIntrinsic | kAccCopied)));
+ __ cmpl(temp, Immediate(kAccCopied));
+ // If method is defined in an interface and is not copied it should be interface dispatched.
+ __ j(kNotEqual, slow_path->GetEntryLabel());
+ __ Bind(&do_virtual_dispatch);
// MethodIndex is uint16_t.
- __ movzxw(vtable_index, Address(method, ArtMethod::MethodIndexOffset()));
+ __ movzxw(temp, Address(method, ArtMethod::MethodIndexOffset()));
constexpr uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
// Re-using method register for receiver class.
@@ -4290,7 +4308,7 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke)
constexpr uint32_t vtable_offset =
mirror::Class::EmbeddedVTableOffset(art::PointerSize::k64).Int32Value();
- __ movq(method, Address(method, vtable_index, TIMES_8, vtable_offset));
+ __ movq(method, Address(method, temp, TIMES_8, vtable_offset));
__ Jump(&execute_target_method);
}
__ Bind(&static_dispatch);