diff options
Diffstat (limited to 'compiler')
| -rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 9 | ||||
| -rw-r--r-- | compiler/optimizing/common_arm64.h | 35 | 
2 files changed, 40 insertions, 4 deletions
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index c21084a6fe..6b4c2f0656 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -63,6 +63,7 @@ using helpers::StackOperandFrom;  using helpers::VIXLRegCodeFromART;  using helpers::WRegisterFrom;  using helpers::XRegisterFrom; +using helpers::ARM64EncodableConstantOrRegister;  static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);  static constexpr int kCurrentMethodStackOffset = 0; @@ -1104,7 +1105,7 @@ void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {      case Primitive::kPrimInt:      case Primitive::kPrimLong:        locations->SetInAt(0, Location::RequiresRegister()); -      locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1))); +      locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);        break; @@ -1395,7 +1396,7 @@ void LocationsBuilderARM64::VisitCompare(HCompare* compare) {    switch (in_type) {      case Primitive::kPrimLong: {        locations->SetInAt(0, Location::RequiresRegister()); -      locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1))); +      locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);        break;      } @@ -1465,7 +1466,7 @@ void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {  void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {    LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);    locations->SetInAt(0, Location::RequiresRegister()); -  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); +  locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));    if (instruction->NeedsMaterialization()) {      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);    } @@ -2116,7 +2117,7 @@ void LocationsBuilderARM64::VisitNeg(HNeg* neg) {    switch (neg->GetResultType()) {      case Primitive::kPrimInt:      case Primitive::kPrimLong: -      locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0))); +      locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);        break; diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h index 9447d3b816..056deb98d6 100644 --- a/compiler/optimizing/common_arm64.h +++ b/compiler/optimizing/common_arm64.h @@ -183,6 +183,41 @@ static inline vixl::Operand OperandFromMemOperand(const vixl::MemOperand& mem_op    }  } +static bool CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* instr) { +  DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); + +  // For single uses we let VIXL handle the constant generation since it will +  // use registers that are not managed by the register allocator (wip0, wip1). +  if (constant->GetUses().HasOnlyOneUse()) { +    return true; +  } + +  int64_t value = constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() +                                            : constant->AsLongConstant()->GetValue(); + +  if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() || instr->IsCompare()) { +    // Uses aliases of ADD/SUB instructions. +    return vixl::Assembler::IsImmAddSub(value); +  } else if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) { +    // Uses logical operations. +    return vixl::Assembler::IsImmLogical(value, vixl::kXRegSize); +  } else { +    DCHECK(instr->IsNeg()); +    // Uses mov -immediate. +    return vixl::Assembler::IsImmMovn(value, vixl::kXRegSize); +  } +} + +static inline Location ARM64EncodableConstantOrRegister(HInstruction* constant, +                                                        HInstruction* instr) { +  if (constant->IsConstant() +      && CanEncodeConstantAsImmediate(constant->AsConstant(), instr)) { +    return Location::ConstantLocation(constant->AsConstant()); +  } + +  return Location::RequiresRegister(); +} +  }  // namespace helpers  }  // namespace arm64  }  // namespace art  |