diff options
| -rw-r--r-- | build/Android.gtest.mk | 4 | ||||
| -rw-r--r-- | compiler/utils/arm64/assembler_arm64.h | 5 | ||||
| -rw-r--r-- | compiler/utils/arm64/managed_register_arm64.cc | 2 | ||||
| -rw-r--r-- | compiler/utils/arm64/managed_register_arm64.h | 8 | ||||
| -rw-r--r-- | compiler/utils/arm64/managed_register_arm64_test.cc | 170 | ||||
| -rw-r--r-- | runtime/arch/arm64/registers_arm64.h | 5 |
6 files changed, 175 insertions, 19 deletions
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk index 5b830564cb..ef5819dd5b 100644 --- a/build/Android.gtest.mk +++ b/build/Android.gtest.mk @@ -177,7 +177,7 @@ define build-art-test LOCAL_CLANG := $(ART_TARGET_CLANG) LOCAL_CFLAGS += $(ART_TARGET_CFLAGS) $(ART_TARGET_DEBUG_CFLAGS) LOCAL_CFLAGS_x86 := $(ART_TARGET_CFLAGS_x86) - LOCAL_SHARED_LIBRARIES += libdl libicuuc libicui18n libnativehelper libz libcutils + LOCAL_SHARED_LIBRARIES += libdl libicuuc libicui18n libnativehelper libz libcutils libvixl LOCAL_STATIC_LIBRARIES += libgtest LOCAL_MODULE_PATH_32 := $(ART_BASE_NATIVETEST_OUT) LOCAL_MODULE_PATH_64 := $(ART_BASE_NATIVETEST_OUT)64 @@ -200,7 +200,7 @@ $$(art_gtest_target): $$(art_gtest_target)$(ART_PHONY_TEST_TARGET_SUFFIX) LOCAL_CLANG := $(ART_HOST_CLANG) LOCAL_CFLAGS += $(ART_HOST_CFLAGS) $(ART_HOST_DEBUG_CFLAGS) LOCAL_SHARED_LIBRARIES += libicuuc-host libicui18n-host libnativehelper libz-host - LOCAL_STATIC_LIBRARIES += libcutils + LOCAL_STATIC_LIBRARIES += libcutils libvixl ifneq ($(WITHOUT_HOST_CLANG),true) # GCC host compiled tests fail with this linked, presumably due to destructors that run. LOCAL_STATIC_LIBRARIES += libgtest_host diff --git a/compiler/utils/arm64/assembler_arm64.h b/compiler/utils/arm64/assembler_arm64.h index 022072461c..8acd1f9950 100644 --- a/compiler/utils/arm64/assembler_arm64.h +++ b/compiler/utils/arm64/assembler_arm64.h @@ -197,6 +197,8 @@ class Arm64Assembler FINAL : public Assembler { CHECK(code < kNumberOfCoreRegisters) << code; if (code == SP) { return vixl::sp; + } else if (code == XZR) { + return vixl::xzr; } return vixl::Register::XRegFromCode(code); } @@ -243,6 +245,9 @@ class Arm64Assembler FINAL : public Assembler { // List of exception blocks to generate at the end of the code cache. std::vector<Arm64Exception*> exception_blocks_; + + // Used for testing. + friend class Arm64ManagedRegister_VixlRegisters_Test; }; class Arm64Exception { diff --git a/compiler/utils/arm64/managed_register_arm64.cc b/compiler/utils/arm64/managed_register_arm64.cc index de5cb8cd8d..8977313256 100644 --- a/compiler/utils/arm64/managed_register_arm64.cc +++ b/compiler/utils/arm64/managed_register_arm64.cc @@ -53,7 +53,7 @@ int Arm64ManagedRegister::RegNo() const { CHECK(!IsNoRegister()); int no; if (IsCoreRegister()) { - if (IsStackPointer()) { + if (IsZeroRegister()) { no = static_cast<int>(X31); } else { no = static_cast<int>(AsCoreRegister()); diff --git a/compiler/utils/arm64/managed_register_arm64.h b/compiler/utils/arm64/managed_register_arm64.h index 80f17f5eb1..a0f520f6a7 100644 --- a/compiler/utils/arm64/managed_register_arm64.h +++ b/compiler/utils/arm64/managed_register_arm64.h @@ -24,7 +24,7 @@ namespace art { namespace arm64 { -const int kNumberOfCoreRegIds = 32; +const int kNumberOfCoreRegIds = kNumberOfCoreRegisters; const int kNumberOfWRegIds = kNumberOfWRegisters; const int kNumberOfDRegIds = kNumberOfDRegisters; const int kNumberOfSRegIds = kNumberOfSRegisters; @@ -78,7 +78,7 @@ class Arm64ManagedRegister : public ManagedRegister { WRegister AsOverlappingCoreRegisterLow() const { CHECK(IsValidManagedRegister()); - if (IsStackPointer()) return W31; + if (IsZeroRegister()) return W31; return static_cast<WRegister>(AsCoreRegister()); } @@ -189,6 +189,10 @@ class Arm64ManagedRegister : public ManagedRegister { return IsCoreRegister() && (id_ == SP); } + bool IsZeroRegister() const { + return IsCoreRegister() && (id_ == XZR); + } + int RegId() const { CHECK(!IsNoRegister()); return id_; diff --git a/compiler/utils/arm64/managed_register_arm64_test.cc b/compiler/utils/arm64/managed_register_arm64_test.cc index 88c01ee793..f149f1bcf1 100644 --- a/compiler/utils/arm64/managed_register_arm64_test.cc +++ b/compiler/utils/arm64/managed_register_arm64_test.cc @@ -15,6 +15,7 @@ */ #include "globals.h" +#include "assembler_arm64.h" #include "managed_register_arm64.h" #include "gtest/gtest.h" @@ -295,9 +296,8 @@ TEST(Arm64ManagedRegister, Equals) { Arm64ManagedRegister reg_X31 = Arm64ManagedRegister::FromCoreRegister(X31); EXPECT_TRUE(!reg_X31.Equals(Arm64ManagedRegister::NoRegister())); - // TODO: Fix the infrastructure, then re-enable. - // EXPECT_TRUE(!reg_X31.Equals(Arm64ManagedRegister::FromCoreRegister(SP))); - // EXPECT_TRUE(reg_X31.Equals(Arm64ManagedRegister::FromCoreRegister(XZR))); + EXPECT_TRUE(reg_X31.Equals(Arm64ManagedRegister::FromCoreRegister(SP))); + EXPECT_TRUE(!reg_X31.Equals(Arm64ManagedRegister::FromCoreRegister(XZR))); EXPECT_TRUE(!reg_X31.Equals(Arm64ManagedRegister::FromWRegister(W31))); EXPECT_TRUE(!reg_X31.Equals(Arm64ManagedRegister::FromWRegister(WZR))); EXPECT_TRUE(!reg_X31.Equals(Arm64ManagedRegister::FromSRegister(S0))); @@ -305,8 +305,7 @@ TEST(Arm64ManagedRegister, Equals) { Arm64ManagedRegister reg_SP = Arm64ManagedRegister::FromCoreRegister(SP); EXPECT_TRUE(!reg_SP.Equals(Arm64ManagedRegister::NoRegister())); - // TODO: We expect these to pass - SP has a different semantic than X31/XZR. - // EXPECT_TRUE(!reg_SP.Equals(Arm64ManagedRegister::FromCoreRegister(X31))); + EXPECT_TRUE(reg_SP.Equals(Arm64ManagedRegister::FromCoreRegister(X31))); EXPECT_TRUE(!reg_SP.Equals(Arm64ManagedRegister::FromCoreRegister(XZR))); EXPECT_TRUE(!reg_SP.Equals(Arm64ManagedRegister::FromWRegister(W31))); EXPECT_TRUE(!reg_SP.Equals(Arm64ManagedRegister::FromSRegister(S0))); @@ -453,17 +452,17 @@ TEST(Arm64ManagedRegister, Overlaps) { reg = Arm64ManagedRegister::FromCoreRegister(XZR); reg_o = Arm64ManagedRegister::FromWRegister(WZR); - // TODO: Overlap not implemented, yet - // EXPECT_TRUE(reg.Overlaps(Arm64ManagedRegister::FromCoreRegister(X31))); + EXPECT_TRUE(reg.Overlaps(Arm64ManagedRegister::FromCoreRegister(X31))); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromCoreRegister(X1))); - // EXPECT_TRUE(reg.Overlaps(Arm64ManagedRegister::FromCoreRegister(SP))); - // EXPECT_TRUE(reg.Overlaps(Arm64ManagedRegister::FromWRegister(W31))); + EXPECT_TRUE(reg.Overlaps(Arm64ManagedRegister::FromCoreRegister(SP))); + EXPECT_TRUE(reg.Overlaps(Arm64ManagedRegister::FromWRegister(W31))); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromWRegister(W1))); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromWRegister(W12))); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromWRegister(W19))); EXPECT_EQ(X31, reg_o.AsOverlappingWRegisterCore()); - // TODO: XZR is not a core register right now. - // EXPECT_EQ(W31, reg.AsOverlappingCoreRegisterLow()); + EXPECT_EQ(SP, reg_o.AsOverlappingWRegisterCore()); + EXPECT_NE(XZR, reg_o.AsOverlappingWRegisterCore()); + EXPECT_EQ(W31, reg.AsOverlappingCoreRegisterLow()); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromSRegister(S0))); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromSRegister(S1))); EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromSRegister(S2))); @@ -610,5 +609,154 @@ TEST(Arm64ManagedRegister, Overlaps) { EXPECT_TRUE(!reg.Overlaps(Arm64ManagedRegister::FromDRegister(D20))); } +TEST(Arm64ManagedRegister, VixlRegisters) { + // X Registers. + EXPECT_TRUE(vixl::x0.Is(Arm64Assembler::reg_x(X0))); + EXPECT_TRUE(vixl::x1.Is(Arm64Assembler::reg_x(X1))); + EXPECT_TRUE(vixl::x2.Is(Arm64Assembler::reg_x(X2))); + EXPECT_TRUE(vixl::x3.Is(Arm64Assembler::reg_x(X3))); + EXPECT_TRUE(vixl::x4.Is(Arm64Assembler::reg_x(X4))); + EXPECT_TRUE(vixl::x5.Is(Arm64Assembler::reg_x(X5))); + EXPECT_TRUE(vixl::x6.Is(Arm64Assembler::reg_x(X6))); + EXPECT_TRUE(vixl::x7.Is(Arm64Assembler::reg_x(X7))); + EXPECT_TRUE(vixl::x8.Is(Arm64Assembler::reg_x(X8))); + EXPECT_TRUE(vixl::x9.Is(Arm64Assembler::reg_x(X9))); + EXPECT_TRUE(vixl::x10.Is(Arm64Assembler::reg_x(X10))); + EXPECT_TRUE(vixl::x11.Is(Arm64Assembler::reg_x(X11))); + EXPECT_TRUE(vixl::x12.Is(Arm64Assembler::reg_x(X12))); + EXPECT_TRUE(vixl::x13.Is(Arm64Assembler::reg_x(X13))); + EXPECT_TRUE(vixl::x14.Is(Arm64Assembler::reg_x(X14))); + EXPECT_TRUE(vixl::x15.Is(Arm64Assembler::reg_x(X15))); + EXPECT_TRUE(vixl::x16.Is(Arm64Assembler::reg_x(X16))); + EXPECT_TRUE(vixl::x17.Is(Arm64Assembler::reg_x(X17))); + EXPECT_TRUE(vixl::x18.Is(Arm64Assembler::reg_x(X18))); + EXPECT_TRUE(vixl::x19.Is(Arm64Assembler::reg_x(X19))); + EXPECT_TRUE(vixl::x20.Is(Arm64Assembler::reg_x(X20))); + EXPECT_TRUE(vixl::x21.Is(Arm64Assembler::reg_x(X21))); + EXPECT_TRUE(vixl::x22.Is(Arm64Assembler::reg_x(X22))); + EXPECT_TRUE(vixl::x23.Is(Arm64Assembler::reg_x(X23))); + EXPECT_TRUE(vixl::x24.Is(Arm64Assembler::reg_x(X24))); + EXPECT_TRUE(vixl::x25.Is(Arm64Assembler::reg_x(X25))); + EXPECT_TRUE(vixl::x26.Is(Arm64Assembler::reg_x(X26))); + EXPECT_TRUE(vixl::x27.Is(Arm64Assembler::reg_x(X27))); + EXPECT_TRUE(vixl::x28.Is(Arm64Assembler::reg_x(X28))); + EXPECT_TRUE(vixl::x29.Is(Arm64Assembler::reg_x(X29))); + EXPECT_TRUE(vixl::x30.Is(Arm64Assembler::reg_x(X30))); + // FIXME: Take a look here. + EXPECT_TRUE(vixl::sp.Is(Arm64Assembler::reg_x(X31))); + EXPECT_TRUE(!vixl::x31.Is(Arm64Assembler::reg_x(X31))); + + EXPECT_TRUE(vixl::x18.Is(Arm64Assembler::reg_x(TR))); + EXPECT_TRUE(vixl::ip0.Is(Arm64Assembler::reg_x(IP0))); + EXPECT_TRUE(vixl::ip1.Is(Arm64Assembler::reg_x(IP1))); + EXPECT_TRUE(vixl::x29.Is(Arm64Assembler::reg_x(FP))); + EXPECT_TRUE(vixl::lr.Is(Arm64Assembler::reg_x(LR))); + EXPECT_TRUE(vixl::sp.Is(Arm64Assembler::reg_x(SP))); + EXPECT_TRUE(vixl::xzr.Is(Arm64Assembler::reg_x(XZR))); + + // W Registers. + EXPECT_TRUE(vixl::w0.Is(Arm64Assembler::reg_w(W0))); + EXPECT_TRUE(vixl::w1.Is(Arm64Assembler::reg_w(W1))); + EXPECT_TRUE(vixl::w2.Is(Arm64Assembler::reg_w(W2))); + EXPECT_TRUE(vixl::w3.Is(Arm64Assembler::reg_w(W3))); + EXPECT_TRUE(vixl::w4.Is(Arm64Assembler::reg_w(W4))); + EXPECT_TRUE(vixl::w5.Is(Arm64Assembler::reg_w(W5))); + EXPECT_TRUE(vixl::w6.Is(Arm64Assembler::reg_w(W6))); + EXPECT_TRUE(vixl::w7.Is(Arm64Assembler::reg_w(W7))); + EXPECT_TRUE(vixl::w8.Is(Arm64Assembler::reg_w(W8))); + EXPECT_TRUE(vixl::w9.Is(Arm64Assembler::reg_w(W9))); + EXPECT_TRUE(vixl::w10.Is(Arm64Assembler::reg_w(W10))); + EXPECT_TRUE(vixl::w11.Is(Arm64Assembler::reg_w(W11))); + EXPECT_TRUE(vixl::w12.Is(Arm64Assembler::reg_w(W12))); + EXPECT_TRUE(vixl::w13.Is(Arm64Assembler::reg_w(W13))); + EXPECT_TRUE(vixl::w14.Is(Arm64Assembler::reg_w(W14))); + EXPECT_TRUE(vixl::w15.Is(Arm64Assembler::reg_w(W15))); + EXPECT_TRUE(vixl::w16.Is(Arm64Assembler::reg_w(W16))); + EXPECT_TRUE(vixl::w17.Is(Arm64Assembler::reg_w(W17))); + EXPECT_TRUE(vixl::w18.Is(Arm64Assembler::reg_w(W18))); + EXPECT_TRUE(vixl::w19.Is(Arm64Assembler::reg_w(W19))); + EXPECT_TRUE(vixl::w20.Is(Arm64Assembler::reg_w(W20))); + EXPECT_TRUE(vixl::w21.Is(Arm64Assembler::reg_w(W21))); + EXPECT_TRUE(vixl::w22.Is(Arm64Assembler::reg_w(W22))); + EXPECT_TRUE(vixl::w23.Is(Arm64Assembler::reg_w(W23))); + EXPECT_TRUE(vixl::w24.Is(Arm64Assembler::reg_w(W24))); + EXPECT_TRUE(vixl::w25.Is(Arm64Assembler::reg_w(W25))); + EXPECT_TRUE(vixl::w26.Is(Arm64Assembler::reg_w(W26))); + EXPECT_TRUE(vixl::w27.Is(Arm64Assembler::reg_w(W27))); + EXPECT_TRUE(vixl::w28.Is(Arm64Assembler::reg_w(W28))); + EXPECT_TRUE(vixl::w29.Is(Arm64Assembler::reg_w(W29))); + EXPECT_TRUE(vixl::w30.Is(Arm64Assembler::reg_w(W30))); + EXPECT_TRUE(vixl::w31.Is(Arm64Assembler::reg_w(W31))); + EXPECT_TRUE(vixl::wzr.Is(Arm64Assembler::reg_w(WZR))); + + // D Registers. + EXPECT_TRUE(vixl::d0.Is(Arm64Assembler::reg_d(D0))); + EXPECT_TRUE(vixl::d1.Is(Arm64Assembler::reg_d(D1))); + EXPECT_TRUE(vixl::d2.Is(Arm64Assembler::reg_d(D2))); + EXPECT_TRUE(vixl::d3.Is(Arm64Assembler::reg_d(D3))); + EXPECT_TRUE(vixl::d4.Is(Arm64Assembler::reg_d(D4))); + EXPECT_TRUE(vixl::d5.Is(Arm64Assembler::reg_d(D5))); + EXPECT_TRUE(vixl::d6.Is(Arm64Assembler::reg_d(D6))); + EXPECT_TRUE(vixl::d7.Is(Arm64Assembler::reg_d(D7))); + EXPECT_TRUE(vixl::d8.Is(Arm64Assembler::reg_d(D8))); + EXPECT_TRUE(vixl::d9.Is(Arm64Assembler::reg_d(D9))); + EXPECT_TRUE(vixl::d10.Is(Arm64Assembler::reg_d(D10))); + EXPECT_TRUE(vixl::d11.Is(Arm64Assembler::reg_d(D11))); + EXPECT_TRUE(vixl::d12.Is(Arm64Assembler::reg_d(D12))); + EXPECT_TRUE(vixl::d13.Is(Arm64Assembler::reg_d(D13))); + EXPECT_TRUE(vixl::d14.Is(Arm64Assembler::reg_d(D14))); + EXPECT_TRUE(vixl::d15.Is(Arm64Assembler::reg_d(D15))); + EXPECT_TRUE(vixl::d16.Is(Arm64Assembler::reg_d(D16))); + EXPECT_TRUE(vixl::d17.Is(Arm64Assembler::reg_d(D17))); + EXPECT_TRUE(vixl::d18.Is(Arm64Assembler::reg_d(D18))); + EXPECT_TRUE(vixl::d19.Is(Arm64Assembler::reg_d(D19))); + EXPECT_TRUE(vixl::d20.Is(Arm64Assembler::reg_d(D20))); + EXPECT_TRUE(vixl::d21.Is(Arm64Assembler::reg_d(D21))); + EXPECT_TRUE(vixl::d22.Is(Arm64Assembler::reg_d(D22))); + EXPECT_TRUE(vixl::d23.Is(Arm64Assembler::reg_d(D23))); + EXPECT_TRUE(vixl::d24.Is(Arm64Assembler::reg_d(D24))); + EXPECT_TRUE(vixl::d25.Is(Arm64Assembler::reg_d(D25))); + EXPECT_TRUE(vixl::d26.Is(Arm64Assembler::reg_d(D26))); + EXPECT_TRUE(vixl::d27.Is(Arm64Assembler::reg_d(D27))); + EXPECT_TRUE(vixl::d28.Is(Arm64Assembler::reg_d(D28))); + EXPECT_TRUE(vixl::d29.Is(Arm64Assembler::reg_d(D29))); + EXPECT_TRUE(vixl::d30.Is(Arm64Assembler::reg_d(D30))); + EXPECT_TRUE(vixl::d31.Is(Arm64Assembler::reg_d(D31))); + + // S Registers. + EXPECT_TRUE(vixl::s0.Is(Arm64Assembler::reg_s(S0))); + EXPECT_TRUE(vixl::s1.Is(Arm64Assembler::reg_s(S1))); + EXPECT_TRUE(vixl::s2.Is(Arm64Assembler::reg_s(S2))); + EXPECT_TRUE(vixl::s3.Is(Arm64Assembler::reg_s(S3))); + EXPECT_TRUE(vixl::s4.Is(Arm64Assembler::reg_s(S4))); + EXPECT_TRUE(vixl::s5.Is(Arm64Assembler::reg_s(S5))); + EXPECT_TRUE(vixl::s6.Is(Arm64Assembler::reg_s(S6))); + EXPECT_TRUE(vixl::s7.Is(Arm64Assembler::reg_s(S7))); + EXPECT_TRUE(vixl::s8.Is(Arm64Assembler::reg_s(S8))); + EXPECT_TRUE(vixl::s9.Is(Arm64Assembler::reg_s(S9))); + EXPECT_TRUE(vixl::s10.Is(Arm64Assembler::reg_s(S10))); + EXPECT_TRUE(vixl::s11.Is(Arm64Assembler::reg_s(S11))); + EXPECT_TRUE(vixl::s12.Is(Arm64Assembler::reg_s(S12))); + EXPECT_TRUE(vixl::s13.Is(Arm64Assembler::reg_s(S13))); + EXPECT_TRUE(vixl::s14.Is(Arm64Assembler::reg_s(S14))); + EXPECT_TRUE(vixl::s15.Is(Arm64Assembler::reg_s(S15))); + EXPECT_TRUE(vixl::s16.Is(Arm64Assembler::reg_s(S16))); + EXPECT_TRUE(vixl::s17.Is(Arm64Assembler::reg_s(S17))); + EXPECT_TRUE(vixl::s18.Is(Arm64Assembler::reg_s(S18))); + EXPECT_TRUE(vixl::s19.Is(Arm64Assembler::reg_s(S19))); + EXPECT_TRUE(vixl::s20.Is(Arm64Assembler::reg_s(S20))); + EXPECT_TRUE(vixl::s21.Is(Arm64Assembler::reg_s(S21))); + EXPECT_TRUE(vixl::s22.Is(Arm64Assembler::reg_s(S22))); + EXPECT_TRUE(vixl::s23.Is(Arm64Assembler::reg_s(S23))); + EXPECT_TRUE(vixl::s24.Is(Arm64Assembler::reg_s(S24))); + EXPECT_TRUE(vixl::s25.Is(Arm64Assembler::reg_s(S25))); + EXPECT_TRUE(vixl::s26.Is(Arm64Assembler::reg_s(S26))); + EXPECT_TRUE(vixl::s27.Is(Arm64Assembler::reg_s(S27))); + EXPECT_TRUE(vixl::s28.Is(Arm64Assembler::reg_s(S28))); + EXPECT_TRUE(vixl::s29.Is(Arm64Assembler::reg_s(S29))); + EXPECT_TRUE(vixl::s30.Is(Arm64Assembler::reg_s(S30))); + EXPECT_TRUE(vixl::s31.Is(Arm64Assembler::reg_s(S31))); +} + } // namespace arm64 } // namespace art diff --git a/runtime/arch/arm64/registers_arm64.h b/runtime/arch/arm64/registers_arm64.h index ca904bc947..43c0ad67ad 100644 --- a/runtime/arch/arm64/registers_arm64.h +++ b/runtime/arch/arm64/registers_arm64.h @@ -63,8 +63,8 @@ enum Register { LR = 30, SP = 31, // SP is X31 and overlaps with XRZ but we encode it as a // special register, due to the different instruction semantics. - XZR = 32, // FIXME This needs to be reconciled with the JNI assembler. - kNumberOfCoreRegisters = 32, + XZR = 32, + kNumberOfCoreRegisters = 33, kNoRegister = -1, }; std::ostream& operator<<(std::ostream& os, const Register& rhs); @@ -103,7 +103,6 @@ enum WRegister { W29 = 29, W30 = 30, W31 = 31, - WSP = 31, WZR = 31, kNumberOfWRegisters = 32, kNoWRegister = -1, |