diff options
author | 2025-03-10 07:49:20 -0700 | |
---|---|---|
committer | 2025-03-10 09:26:04 -0700 | |
commit | 336a1845af10f9f3ad7c9f7f8e1ae21bb7a03cd5 (patch) | |
tree | 021ed3415ad8b8535a3b356f088f0d37944b386a /compiler | |
parent | ee8ab3125768bbf60347c507103a910553acd158 (diff) |
Revert "Call target method in accessor MHs when it is set."
Revert submission 3382609-target-accessors
Reason for revert: x86.poison.64 failures on LUCI
Reverted changes: /q/submissionid:3382609-target-accessors
Change-Id: I384945809bc18ac4dfc937af088c7dbe30ac5c5c
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 35 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 35 |
2 files changed, 9 insertions, 61 deletions
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index c4f8681631..75607b77e0 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -16,8 +16,6 @@ #include "intrinsics_arm64.h" -#include "aarch64/assembler-aarch64.h" -#include "aarch64/operands-aarch64.h" #include "arch/arm64/callee_save_frame_arm64.h" #include "arch/arm64/instruction_set_features_arm64.h" #include "art_method.h" @@ -35,7 +33,6 @@ #include "mirror/array-inl.h" #include "mirror/class.h" #include "mirror/method_handle_impl.h" -#include "mirror/method_type.h" #include "mirror/object.h" #include "mirror/object_array-inl.h" #include "mirror/reference.h" @@ -5984,46 +5981,30 @@ void IntrinsicLocationsBuilderARM64::VisitMethodHandleInvokeExact(HInvoke* invok InvokeDexCallingConventionVisitorARM64 calling_convention; locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType())); + locations->SetInAt(0, Location::RequiresRegister()); + // Accomodating LocationSummary for underlying invoke-* call. uint32_t number_of_args = invoke->GetNumberOfArguments(); - for (uint32_t i = 1; i < number_of_args; ++i) { locations->SetInAt(i, calling_convention.GetNextLocation(invoke->InputAt(i)->GetType())); } - // Passing MethodHandle object as the last parameter: accessors implementation rely on it. - DCHECK_EQ(invoke->InputAt(0)->GetType(), DataType::Type::kReference); - Location receiver_mh_loc = calling_convention.GetNextLocation(DataType::Type::kReference); - locations->SetInAt(0, receiver_mh_loc); - // The last input is MethodType object corresponding to the call-site. locations->SetInAt(number_of_args, Location::RequiresRegister()); locations->AddTemp(calling_convention.GetMethodLocation()); locations->AddRegisterTemps(4); - - if (!receiver_mh_loc.IsRegister()) { - locations->AddTemp(Location::RequiresRegister()); - } } void IntrinsicCodeGeneratorARM64::VisitMethodHandleInvokeExact(HInvoke* invoke) { LocationSummary* locations = invoke->GetLocations(); - MacroAssembler* masm = codegen_->GetVIXLAssembler(); - - Location receiver_mh_loc = locations->InAt(0); - Register method_handle = receiver_mh_loc.IsRegister() - ? InputRegisterAt(invoke, 0) - : WRegisterFrom(locations->GetTemp(5)); - if (!receiver_mh_loc.IsRegister()) { - DCHECK(receiver_mh_loc.IsStackSlot()); - __ Ldr(method_handle.W(), MemOperand(sp, receiver_mh_loc.GetStackIndex())); - } + Register method_handle = InputRegisterAt(invoke, 0); SlowPathCodeARM64* slow_path = new (codegen_->GetScopedAllocator()) InvokePolymorphicSlowPathARM64(invoke, method_handle); codegen_->AddSlowPath(slow_path); + MacroAssembler* masm = codegen_->GetVIXLAssembler(); Register call_site_type = InputRegisterAt(invoke, invoke->GetNumberOfArguments()); @@ -6038,18 +6019,10 @@ void IntrinsicCodeGeneratorARM64::VisitMethodHandleInvokeExact(HInvoke* invoke) __ Ldr(method, HeapOperand(method_handle.W(), mirror::MethodHandle::ArtFieldOrMethodOffset())); vixl::aarch64::Label execute_target_method; - vixl::aarch64::Label method_dispatch; Register method_handle_kind = WRegisterFrom(locations->GetTemp(2)); __ Ldr(method_handle_kind, HeapOperand(method_handle.W(), mirror::MethodHandle::HandleKindOffset())); - - __ Cmp(method_handle_kind, Operand(mirror::MethodHandle::Kind::kFirstAccessorKind)); - __ B(lt, &method_dispatch); - __ Ldr(method, HeapOperand(method_handle.W(), mirror::MethodHandleImpl::TargetOffset())); - __ B(&execute_target_method); - - __ Bind(&method_dispatch); __ Cmp(method_handle_kind, Operand(mirror::MethodHandle::Kind::kInvokeStatic)); __ B(eq, &execute_target_method); diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 2508df3382..281f196f06 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -32,7 +32,6 @@ #include "intrinsics_utils.h" #include "lock_word.h" #include "mirror/array-inl.h" -#include "mirror/method_handle_impl.h" #include "mirror/object_array-inl.h" #include "mirror/reference.h" #include "mirror/string.h" @@ -4242,47 +4241,31 @@ void IntrinsicLocationsBuilderX86_64::VisitMethodHandleInvokeExact(HInvoke* invo InvokeDexCallingConventionVisitorX86_64 calling_convention; locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType())); - uint32_t number_of_args = invoke->GetNumberOfArguments(); + locations->SetInAt(0, Location::RequiresRegister()); // Accomodating LocationSummary for underlying invoke-* call. + uint32_t number_of_args = invoke->GetNumberOfArguments(); for (uint32_t i = 1; i < number_of_args; ++i) { locations->SetInAt(i, calling_convention.GetNextLocation(invoke->InputAt(i)->GetType())); } - // Passing MethodHandle object as the last parameter: accessors implementation rely on it. - DCHECK_EQ(invoke->InputAt(0)->GetType(), DataType::Type::kReference); - Location receiver_mh_loc = calling_convention.GetNextLocation(DataType::Type::kReference); - locations->SetInAt(0, receiver_mh_loc); - // The last input is MethodType object corresponding to the call-site. locations->SetInAt(number_of_args, Location::RequiresRegister()); locations->AddTemp(Location::RequiresRegister()); // Hidden arg for invoke-interface. locations->AddTemp(Location::RegisterLocation(RAX)); - - if (!receiver_mh_loc.IsRegister()) { - locations->AddTemp(Location::RequiresRegister()); - } } void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke) { LocationSummary* locations = invoke->GetLocations(); - X86_64Assembler* assembler = codegen_->GetAssembler(); - Location receiver_mh_loc = locations->InAt(0); - CpuRegister method_handle = receiver_mh_loc.IsRegister() - ? receiver_mh_loc.AsRegister<CpuRegister>() - : locations->GetTemp(2).AsRegister<CpuRegister>(); - - if (!receiver_mh_loc.IsRegister()) { - DCHECK(receiver_mh_loc.IsStackSlot()); - __ movl(method_handle, Address(CpuRegister(RSP), receiver_mh_loc.GetStackIndex())); - } + CpuRegister method_handle = locations->InAt(0).AsRegister<CpuRegister>(); SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) InvokePolymorphicSlowPathX86_64(invoke, method_handle); codegen_->AddSlowPath(slow_path); + X86_64Assembler* assembler = codegen_->GetAssembler(); CpuRegister call_site_type = locations->InAt(invoke->GetNumberOfArguments()).AsRegister<CpuRegister>(); @@ -4295,18 +4278,10 @@ void IntrinsicCodeGeneratorX86_64::VisitMethodHandleInvokeExact(HInvoke* invoke) CpuRegister method = CpuRegister(kMethodRegisterArgument); __ movq(method, Address(method_handle, mirror::MethodHandle::ArtFieldOrMethodOffset())); - Label execute_target_method; - Label method_dispatch; Label static_dispatch; + Label execute_target_method; Address method_handle_kind = Address(method_handle, mirror::MethodHandle::HandleKindOffset()); - - __ cmpl(method_handle_kind, Immediate(mirror::MethodHandle::kFirstAccessorKind)); - __ j(kLess, &method_dispatch); - __ movq(method, Address(method_handle, mirror::MethodHandleImpl::TargetOffset())); - __ Jump(&execute_target_method); - - __ Bind(&method_dispatch); if (invoke->AsInvokePolymorphic()->CanTargetInstanceMethod()) { CpuRegister receiver = locations->InAt(1).AsRegister<CpuRegister>(); |