ARM: VIXL32: Pass initial ART tests with new code generator.
- Implement enough codegen to pass ~70 art/tests.
- When ART_USE_VIXL_ARM_BACKEND is defined:
- Blacklist known-to-fail target tests
- interpret-only everything except the tests themselves
- Set a flag to use the VIXL based ARM backend
Test: export ART_USE_VIXL_ARM_BACKEND=true && mma test-art-target && mma test-art-host
Change-Id: Ic8bc095e8449f10f97fa0511284790f36c20e276
diff --git a/compiler/utils/arm/assembler_arm_vixl.cc b/compiler/utils/arm/assembler_arm_vixl.cc
index 3c5973e..8045bd2 100644
--- a/compiler/utils/arm/assembler_arm_vixl.cc
+++ b/compiler/utils/arm/assembler_arm_vixl.cc
@@ -346,6 +346,51 @@
___ Vldr(reg, MemOperand(base, offset));
}
+// Prefer Str to Add/Stm in ArmVIXLAssembler::StoreRegisterList and
+// ArmVIXLAssembler::LoadRegisterList where this generates less code (size).
+static constexpr int kRegListThreshold = 4;
+
+void ArmVIXLAssembler::StoreRegisterList(RegList regs, size_t stack_offset) {
+ int number_of_regs = POPCOUNT(static_cast<uint32_t>(regs));
+ if (number_of_regs != 0) {
+ if (number_of_regs > kRegListThreshold) {
+ UseScratchRegisterScope temps(GetVIXLAssembler());
+ vixl32::Register base = sp;
+ if (stack_offset != 0) {
+ base = temps.Acquire();
+ DCHECK_EQ(regs & (1u << base.GetCode()), 0u);
+ ___ Add(base, sp, stack_offset);
+ }
+ ___ Stm(base, NO_WRITE_BACK, RegisterList(regs));
+ } else {
+ for (uint32_t i : LowToHighBits(static_cast<uint32_t>(regs))) {
+ ___ Str(vixl32::Register(i), MemOperand(sp, stack_offset));
+ stack_offset += kRegSizeInBytes;
+ }
+ }
+ }
+}
+
+void ArmVIXLAssembler::LoadRegisterList(RegList regs, size_t stack_offset) {
+ int number_of_regs = POPCOUNT(static_cast<uint32_t>(regs));
+ if (number_of_regs != 0) {
+ if (number_of_regs > kRegListThreshold) {
+ UseScratchRegisterScope temps(GetVIXLAssembler());
+ vixl32::Register base = sp;
+ if (stack_offset != 0) {
+ base = temps.Acquire();
+ ___ Add(base, sp, stack_offset);
+ }
+ ___ Ldm(base, NO_WRITE_BACK, RegisterList(regs));
+ } else {
+ for (uint32_t i : LowToHighBits(static_cast<uint32_t>(regs))) {
+ ___ Ldr(vixl32::Register(i), MemOperand(sp, stack_offset));
+ stack_offset += kRegSizeInBytes;
+ }
+ }
+ }
+}
+
void ArmVIXLAssembler::AddConstant(vixl32::Register rd, int32_t value) {
AddConstant(rd, rd, value);
}
diff --git a/compiler/utils/arm/assembler_arm_vixl.h b/compiler/utils/arm/assembler_arm_vixl.h
index c8f3a9b..c5575fa 100644
--- a/compiler/utils/arm/assembler_arm_vixl.h
+++ b/compiler/utils/arm/assembler_arm_vixl.h
@@ -90,6 +90,9 @@
void LoadSFromOffset(vixl32::SRegister reg, vixl32::Register base, int32_t offset);
void LoadDFromOffset(vixl32::DRegister reg, vixl32::Register base, int32_t offset);
+ void LoadRegisterList(RegList regs, size_t stack_offset);
+ void StoreRegisterList(RegList regs, size_t stack_offset);
+
bool ShifterOperandCanAlwaysHold(uint32_t immediate);
bool ShifterOperandCanHold(Opcode opcode, uint32_t immediate, SetCc set_cc);
bool CanSplitLoadStoreOffset(int32_t allowed_offset_bits,