arm/arm64: Clean up intrinsic slow paths.
Generalize and use the slow path template IntrinsicSlowPath
from intrinsics_utils.h.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: aosp_taimen-userdebug boot image is unchanged.
Change-Id: Ia8fa4e1b31c1f190fc5f02671336caec15e4cf4d
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index d88e034..93a99b1 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -23,6 +23,7 @@
#include "entrypoints/quick/quick_entrypoints.h"
#include "heap_poisoning.h"
#include "intrinsics.h"
+#include "intrinsics_utils.h"
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/object_array-inl.h"
@@ -46,7 +47,6 @@
namespace arm64 {
using helpers::DRegisterFrom;
-using helpers::FPRegisterFrom;
using helpers::HeapOperand;
using helpers::LocationFrom;
using helpers::OperandFrom;
@@ -74,87 +74,12 @@
return codegen_->GetGraph()->GetAllocator();
}
+using IntrinsicSlowPathARM64 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM64,
+ SlowPathCodeARM64,
+ Arm64Assembler>;
+
#define __ codegen->GetVIXLAssembler()->
-static void MoveFromReturnRegister(Location trg,
- DataType::Type type,
- CodeGeneratorARM64* codegen) {
- if (!trg.IsValid()) {
- DCHECK(type == DataType::Type::kVoid);
- return;
- }
-
- DCHECK_NE(type, DataType::Type::kVoid);
-
- if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
- Register trg_reg = RegisterFrom(trg, type);
- Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
- __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
- } else {
- VRegister trg_reg = FPRegisterFrom(trg, type);
- VRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
- __ Fmov(trg_reg, res_reg);
- }
-}
-
-static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
- InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
- IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
-}
-
-// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
-// call. This will copy the arguments into the positions for a regular call.
-//
-// Note: The actual parameters are required to be in the locations given by the invoke's location
-// summary. If an intrinsic modifies those locations before a slowpath call, they must be
-// restored!
-class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
- public:
- explicit IntrinsicSlowPathARM64(HInvoke* invoke)
- : SlowPathCodeARM64(invoke), invoke_(invoke) { }
-
- void EmitNativeCode(CodeGenerator* codegen_in) override {
- CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
- __ Bind(GetEntryLabel());
-
- SaveLiveRegisters(codegen, invoke_->GetLocations());
-
- MoveArguments(invoke_, codegen);
-
- {
- // Ensure that between the BLR (emitted by Generate*Call) and RecordPcInfo there
- // are no pools emitted.
- vixl::EmissionCheckScope guard(codegen->GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
- if (invoke_->IsInvokeStaticOrDirect()) {
- codegen->GenerateStaticOrDirectCall(
- invoke_->AsInvokeStaticOrDirect(), LocationFrom(kArtMethodRegister), this);
- } else {
- codegen->GenerateVirtualCall(
- invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister), this);
- }
- }
-
- // Copy the result back to the expected output.
- Location out = invoke_->GetLocations()->Out();
- if (out.IsValid()) {
- DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
- DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
- MoveFromReturnRegister(out, invoke_->GetType(), codegen);
- }
-
- RestoreLiveRegisters(codegen, invoke_->GetLocations());
- __ B(GetExitLabel());
- }
-
- const char* GetDescription() const override { return "IntrinsicSlowPathARM64"; }
-
- private:
- // The instruction where this slow path is happening.
- HInvoke* const invoke_;
-
- DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
-};
-
// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
class ReadBarrierSystemArrayCopySlowPathARM64 : public SlowPathCodeARM64 {
public: