summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2024-11-07 09:36:53 +0100
committer VladimĂ­r Marko <vmarko@google.com> 2024-11-08 09:29:38 +0000
commitf8ac417533d9ebee6d02ad84a1e6a6b056e6720d (patch)
treef0b9d5e9c2f7a3164205745055ed68eb328eb591
parentb506262278a1b556bea98fe47e919ed4e8bc7d2c (diff)
Clean up after introducing `HRol`.
Clean up after https://android-review.googlesource.com/3262277 . Test: m test-art-host-gtest Test: testrunner.py --host --optimizing Change-Id: I11d3b2ffd2305a841fa44345b7d2bd09de21b42d
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc4
-rw-r--r--compiler/optimizing/code_generator_x86.cc11
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc10
-rw-r--r--compiler/optimizing/instruction_simplifier_shared.cc11
-rw-r--r--compiler/optimizing/instruction_simplifier_x86.cc16
5 files changed, 17 insertions, 35 deletions
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 34b3aad3d4..e88d14b3eb 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -5291,11 +5291,11 @@ void InstructionCodeGeneratorARMVIXL::HandleIntegerRotate(HBinaryOperation* rota
// Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
// so map all rotations to a +ve. equivalent in that range.
// (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
- uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F;
-
+ uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
if (rotate->IsRol()) {
rot = -rot;
}
+ rot &= 0x1f;
if (rot) {
// Rotate, mapping left rotations to right equivalents if necessary.
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index d0368be866..6db49c7771 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -5125,13 +5125,14 @@ void InstructionCodeGeneratorX86::HandleRotate(HBinaryOperation* rotate) {
Register second_reg = second.AsRegister<Register>();
DCHECK_EQ(second_reg, ECX);
+ __ movl(temp_reg, first_reg_hi);
if (rotate->IsRol()) {
- __ negl(second_reg);
+ __ shld(first_reg_hi, first_reg_lo, second_reg);
+ __ shld(first_reg_lo, temp_reg, second_reg);
+ } else {
+ __ shrd(first_reg_hi, first_reg_lo, second_reg);
+ __ shrd(first_reg_lo, temp_reg, second_reg);
}
-
- __ movl(temp_reg, first_reg_hi);
- __ shrd(first_reg_hi, first_reg_lo, second_reg);
- __ shrd(first_reg_lo, temp_reg, second_reg);
__ movl(temp_reg, first_reg_hi);
__ testl(second_reg, Immediate(32));
__ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 6d3acedb5d..e9593d6c98 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -5122,11 +5122,11 @@ void InstructionCodeGeneratorX86_64::HandleRotate(HBinaryOperation* rotate) {
}
}
-void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
- HandleRotate(ror);
+void LocationsBuilderX86_64::VisitRol(HRol* rol) {
+ HandleRotate(rol);
}
-void LocationsBuilderX86_64::VisitRol(HRol* rol) {
+void InstructionCodeGeneratorX86_64::VisitRol(HRol* rol) {
HandleRotate(rol);
}
@@ -5134,8 +5134,8 @@ void LocationsBuilderX86_64::VisitRor(HRor* ror) {
HandleRotate(ror);
}
-void InstructionCodeGeneratorX86_64::VisitRol(HRol* rol) {
- HandleRotate(rol);
+void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
+ HandleRotate(ror);
}
void LocationsBuilderX86_64::VisitShl(HShl* shl) {
diff --git a/compiler/optimizing/instruction_simplifier_shared.cc b/compiler/optimizing/instruction_simplifier_shared.cc
index 7f575c0348..2215b93414 100644
--- a/compiler/optimizing/instruction_simplifier_shared.cc
+++ b/compiler/optimizing/instruction_simplifier_shared.cc
@@ -320,18 +320,15 @@ void UnfoldRotateLeft(HRol* rol) {
HBasicBlock* block = rol->GetBlock();
HGraph* graph = block->GetGraph();
ArenaAllocator* allocator = graph->GetAllocator();
- HRor* ror;
-
+ HInstruction* neg;
if (rol->GetRight()->IsConstant()) {
int32_t value = rol->GetRight()->AsIntConstant()->GetValue();
- HIntConstant* negated = graph->GetIntConstant(-value);
- ror = new (allocator) HRor(rol->GetType(), rol->GetLeft(), negated);
+ neg = graph->GetIntConstant(-value);
} else {
- HNeg* neg = new (allocator) HNeg(DataType::Type::kInt32, rol->GetRight());
+ neg = new (allocator) HNeg(DataType::Type::kInt32, rol->GetRight());
block->InsertInstructionBefore(neg, rol);
- ror = new (allocator) HRor(rol->GetType(), rol->GetLeft(), neg);
}
-
+ HInstruction* ror = new (allocator) HRor(rol->GetType(), rol->GetLeft(), neg);
block->ReplaceAndRemoveInstructionWith(rol, ror);
}
diff --git a/compiler/optimizing/instruction_simplifier_x86.cc b/compiler/optimizing/instruction_simplifier_x86.cc
index e1c783e5b8..9b5fb52509 100644
--- a/compiler/optimizing/instruction_simplifier_x86.cc
+++ b/compiler/optimizing/instruction_simplifier_x86.cc
@@ -48,7 +48,6 @@ class InstructionSimplifierX86Visitor final : public HGraphVisitor {
}
void VisitAnd(HAnd * instruction) override;
- void VisitRol(HRol* instruction) override;
void VisitXor(HXor* instruction) override;
private:
@@ -71,21 +70,6 @@ void InstructionSimplifierX86Visitor::VisitAnd(HAnd* instruction) {
}
}
-void InstructionSimplifierX86Visitor::VisitRol(HRol* rol) {
- if (rol->GetType() != DataType::Type::kInt64) {
- return;
- }
-
- HBasicBlock* block = rol->GetBlock();
- HGraph* graph = block->GetGraph();
- ArenaAllocator* allocator = graph->GetAllocator();
-
- HNeg* neg = new (allocator) HNeg(DataType::Type::kInt32, rol->GetRight());
- block->InsertInstructionBefore(neg, rol);
- HRor* ror = new (allocator) HRor(rol->GetType(), rol->GetLeft(), neg);
- block->ReplaceAndRemoveInstructionWith(rol, ror);
-}
-
void InstructionSimplifierX86Visitor::VisitXor(HXor* instruction) {
if (!HasAVX2()) {
return;