diff options
| author | 2016-02-01 18:57:58 -0800 | |
|---|---|---|
| committer | 2016-02-05 09:26:21 -0800 | |
| commit | a19616e3363276e7f2c471eb2839fb16f1d43f27 (patch) | |
| tree | ad3e7fd0f53229e95fb0443586fc30eedabe6967 /compiler/optimizing | |
| parent | 9fba3f67a0792ad5eeb495e489d11a87211c318f (diff) | |
Implemented compare/signum intrinsics as HCompare
(with all code generation for all)
Rationale:
At HIR level, many more optimizations are possible, while ultimately
generated code can take advantage of full semantics.
Change-Id: I6e2ee0311784e5e336847346f7f3c4faef4fd17e
Diffstat (limited to 'compiler/optimizing')
| -rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 9 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 7 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_mips.cc | 9 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_mips64.cc | 18 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 73 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.h | 6 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 86 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.h | 6 | ||||
| -rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 25 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_arm.cc | 12 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 66 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips64.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 62 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 128 |
15 files changed, 211 insertions, 308 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index c2d9edd43e..e43493280a 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -3750,6 +3750,7 @@ void LocationsBuilderARM::VisitCompare(HCompare* compare) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); switch (compare->InputAt(0)->GetType()) { + case Primitive::kPrimInt: case Primitive::kPrimLong: { locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, Location::RequiresRegister()); @@ -3779,6 +3780,13 @@ void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { Primitive::Type type = compare->InputAt(0)->GetType(); Condition less_cond; switch (type) { + case Primitive::kPrimInt: { + __ LoadImmediate(out, 0); + __ cmp(left.AsRegister<Register>(), + ShifterOperand(right.AsRegister<Register>())); // Signed compare. + less_cond = LT; + break; + } case Primitive::kPrimLong: { __ cmp(left.AsRegisterPairHigh<Register>(), ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. @@ -3808,6 +3816,7 @@ void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { LOG(FATAL) << "Unexpected compare type " << type; UNREACHABLE(); } + __ b(&done, EQ); __ b(&less, less_cond); diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 4179fabe48..e20e04400f 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -2408,6 +2408,7 @@ void LocationsBuilderARM64::VisitCompare(HCompare* compare) { new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); Primitive::Type in_type = compare->InputAt(0)->GetType(); switch (in_type) { + case Primitive::kPrimInt: case Primitive::kPrimLong: { locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare)); @@ -2436,14 +2437,14 @@ void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) { // 1 if: left > right // -1 if: left < right switch (in_type) { + case Primitive::kPrimInt: case Primitive::kPrimLong: { Register result = OutputRegister(compare); Register left = InputRegisterAt(compare, 0); Operand right = InputOperandAt(compare, 1); - __ Cmp(left, right); - __ Cset(result, ne); - __ Cneg(result, result, lt); + __ Cset(result, ne); // result == +1 if NE or 0 otherwise + __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise break; } case Primitive::kPrimFloat: diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 961fe62932..e9c0b6ae79 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -2123,6 +2123,7 @@ void LocationsBuilderMIPS::VisitCompare(HCompare* compare) { new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); switch (in_type) { + case Primitive::kPrimInt: case Primitive::kPrimLong: locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, Location::RequiresRegister()); @@ -2153,6 +2154,14 @@ void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) { // 1 if: left > right // -1 if: left < right switch (in_type) { + case Primitive::kPrimInt: { + Register lhs = locations->InAt(0).AsRegister<Register>(); + Register rhs = locations->InAt(1).AsRegister<Register>(); + __ Slt(TMP, lhs, rhs); + __ Slt(res, rhs, lhs); + __ Subu(res, res, TMP); + break; + } case Primitive::kPrimLong: { MipsLabel done; Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>(); diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index 3e1563c66b..da98a89f65 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -1763,6 +1763,7 @@ void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare); switch (in_type) { + case Primitive::kPrimInt: case Primitive::kPrimLong: locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1))); @@ -1791,16 +1792,25 @@ void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) { // 1 if: left > right // -1 if: left < right switch (in_type) { + case Primitive::kPrimInt: case Primitive::kPrimLong: { GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>(); Location rhs_location = locations->InAt(1); bool use_imm = rhs_location.IsConstant(); GpuRegister rhs = ZERO; if (use_imm) { - int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant()); - if (value != 0) { - rhs = AT; - __ LoadConst64(rhs, value); + if (in_type == Primitive::kPrimInt) { + int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant()); + if (value != 0) { + rhs = AT; + __ LoadConst32(rhs, value); + } + } else { + int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant()); + if (value != 0) { + rhs = AT; + __ LoadConst64(rhs, value); + } } } else { rhs = rhs_location.AsRegister<GpuRegister>(); diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 6304fb503d..de62010102 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -1350,11 +1350,7 @@ void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond, int32_t val_high = High32Bits(value); int32_t val_low = Low32Bits(value); - if (val_high == 0) { - __ testl(left_high, left_high); - } else { - __ cmpl(left_high, Immediate(val_high)); - } + codegen_->Compare32BitValue(left_high, val_high); if (if_cond == kCondNE) { __ j(X86Condition(true_high_cond), true_label); } else if (if_cond == kCondEQ) { @@ -1364,11 +1360,7 @@ void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond, __ j(X86Condition(false_high_cond), false_label); } // Must be equal high, so compare the lows. - if (val_low == 0) { - __ testl(left_low, left_low); - } else { - __ cmpl(left_low, Immediate(val_low)); - } + codegen_->Compare32BitValue(left_low, val_low); } else { Register right_high = right.AsRegisterPairHigh<Register>(); Register right_low = right.AsRegisterPairLow<Register>(); @@ -1547,11 +1539,7 @@ void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instructio __ cmpl(lhs.AsRegister<Register>(), rhs.AsRegister<Register>()); } else if (rhs.IsConstant()) { int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()); - if (constant == 0) { - __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>()); - } else { - __ cmpl(lhs.AsRegister<Register>(), Immediate(constant)); - } + codegen_->Compare32BitValue(lhs.AsRegister<Register>(), constant); } else { __ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex())); } @@ -1744,11 +1732,7 @@ void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) { __ cmpl(lhs.AsRegister<Register>(), rhs.AsRegister<Register>()); } else if (rhs.IsConstant()) { int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()); - if (constant == 0) { - __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>()); - } else { - __ cmpl(lhs.AsRegister<Register>(), Immediate(constant)); - } + codegen_->Compare32BitValue(lhs.AsRegister<Register>(), constant); } else { __ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex())); } @@ -4143,6 +4127,7 @@ void LocationsBuilderX86::VisitCompare(HCompare* compare) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); switch (compare->InputAt(0)->GetType()) { + case Primitive::kPrimInt: case Primitive::kPrimLong: { locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, Location::Any()); @@ -4174,7 +4159,21 @@ void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) { Location right = locations->InAt(1); NearLabel less, greater, done; + Condition less_cond = kLess; + switch (compare->InputAt(0)->GetType()) { + case Primitive::kPrimInt: { + Register left_reg = left.AsRegister<Register>(); + if (right.IsConstant()) { + int32_t value = right.GetConstant()->AsIntConstant()->GetValue(); + codegen_->Compare32BitValue(left_reg, value); + } else if (right.IsStackSlot()) { + __ cmpl(left_reg, Address(ESP, right.GetStackIndex())); + } else { + __ cmpl(left_reg, right.AsRegister<Register>()); + } + break; + } case Primitive::kPrimLong: { Register left_low = left.AsRegisterPairLow<Register>(); Register left_high = left.AsRegisterPairHigh<Register>(); @@ -4196,11 +4195,7 @@ void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) { __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize))); } else { DCHECK(right_is_const) << right; - if (val_high == 0) { - __ testl(left_high, left_high); - } else { - __ cmpl(left_high, Immediate(val_high)); - } + codegen_->Compare32BitValue(left_high, val_high); } __ j(kLess, &less); // Signed compare. __ j(kGreater, &greater); // Signed compare. @@ -4210,30 +4205,30 @@ void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) { __ cmpl(left_low, Address(ESP, right.GetStackIndex())); } else { DCHECK(right_is_const) << right; - if (val_low == 0) { - __ testl(left_low, left_low); - } else { - __ cmpl(left_low, Immediate(val_low)); - } + codegen_->Compare32BitValue(left_low, val_low); } + less_cond = kBelow; // for CF (unsigned). break; } case Primitive::kPrimFloat: { GenerateFPCompare(left, right, compare, false); __ j(kUnordered, compare->IsGtBias() ? &greater : &less); + less_cond = kBelow; // for CF (floats). break; } case Primitive::kPrimDouble: { GenerateFPCompare(left, right, compare, true); __ j(kUnordered, compare->IsGtBias() ? &greater : &less); + less_cond = kBelow; // for CF (floats). break; } default: LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType(); } + __ movl(out, Immediate(0)); __ j(kEqual, &done); - __ j(kBelow, &less); // kBelow is for CF (unsigned & floats). + __ j(less_cond, &less); __ Bind(&greater); __ movl(out, Immediate(1)); @@ -7193,6 +7188,22 @@ Address CodeGeneratorX86::LiteralInt64Address(int64_t v, Register reg) { return Address(reg, kDummy32BitOffset, fixup); } +void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) { + if (value == 0) { + __ xorl(dest, dest); + } else { + __ movl(dest, Immediate(value)); + } +} + +void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) { + if (value == 0) { + __ testl(dest, dest); + } else { + __ cmpl(dest, Immediate(value)); + } +} + Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr, Register reg, Register value) { diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index d51b96f85f..45e8ffa84f 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -452,6 +452,12 @@ class CodeGeneratorX86 : public CodeGenerator { Address LiteralInt32Address(int32_t v, Register reg); Address LiteralInt64Address(int64_t v, Register reg); + // Load a 32-bit value into a register in the most efficient manner. + void Load32BitValue(Register dest, int32_t value); + + // Compare a register with a 32-bit value in the most efficient manner. + void Compare32BitValue(Register dest, int32_t value); + Address LiteralCaseTable(HX86PackedSwitch* switch_instr, Register reg, Register value); void Finalize(CodeAllocator* allocator) OVERRIDE; diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 86e5f7c5f4..99396cd983 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -1390,16 +1390,7 @@ void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) CpuRegister left_reg = left.AsRegister<CpuRegister>(); if (right.IsConstant()) { int64_t value = right.GetConstant()->AsLongConstant()->GetValue(); - if (IsInt<32>(value)) { - if (value == 0) { - __ testq(left_reg, left_reg); - } else { - __ cmpq(left_reg, Immediate(static_cast<int32_t>(value))); - } - } else { - // Value won't fit in a 32-bit integer. - __ cmpq(left_reg, codegen_->LiteralInt64Address(value)); - } + codegen_->Compare64BitValue(left_reg, value); } else if (right.IsDoubleStackSlot()) { __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex())); } else { @@ -1563,11 +1554,7 @@ void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruc __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>()); } else if (rhs.IsConstant()) { int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()); - if (constant == 0) { - __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>()); - } else { - __ cmpl(lhs.AsRegister<CpuRegister>(), Immediate(constant)); - } + codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant); } else { __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex())); @@ -1808,11 +1795,7 @@ void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) { __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>()); } else if (rhs.IsConstant()) { int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()); - if (constant == 0) { - __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>()); - } else { - __ cmpl(lhs.AsRegister<CpuRegister>(), Immediate(constant)); - } + codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant); } else { __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex())); } @@ -1826,16 +1809,7 @@ void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) { __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>()); } else if (rhs.IsConstant()) { int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue(); - if (IsInt<32>(value)) { - if (value == 0) { - __ testq(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>()); - } else { - __ cmpq(lhs.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value))); - } - } else { - // Value won't fit in an int. - __ cmpq(lhs.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value)); - } + codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value); } else { __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex())); } @@ -1967,6 +1941,7 @@ void LocationsBuilderX86_64::VisitCompare(HCompare* compare) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); switch (compare->InputAt(0)->GetType()) { + case Primitive::kPrimInt: case Primitive::kPrimLong: { locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, Location::Any()); @@ -1993,21 +1968,26 @@ void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) { NearLabel less, greater, done; Primitive::Type type = compare->InputAt(0)->GetType(); + Condition less_cond = kLess; + switch (type) { + case Primitive::kPrimInt: { + CpuRegister left_reg = left.AsRegister<CpuRegister>(); + if (right.IsConstant()) { + int32_t value = right.GetConstant()->AsIntConstant()->GetValue(); + codegen_->Compare32BitValue(left_reg, value); + } else if (right.IsStackSlot()) { + __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex())); + } else { + __ cmpl(left_reg, right.AsRegister<CpuRegister>()); + } + break; + } case Primitive::kPrimLong: { CpuRegister left_reg = left.AsRegister<CpuRegister>(); if (right.IsConstant()) { int64_t value = right.GetConstant()->AsLongConstant()->GetValue(); - if (IsInt<32>(value)) { - if (value == 0) { - __ testq(left_reg, left_reg); - } else { - __ cmpq(left_reg, Immediate(static_cast<int32_t>(value))); - } - } else { - // Value won't fit in an int. - __ cmpq(left_reg, codegen_->LiteralInt64Address(value)); - } + codegen_->Compare64BitValue(left_reg, value); } else if (right.IsDoubleStackSlot()) { __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex())); } else { @@ -2026,6 +2006,7 @@ void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) { __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>()); } __ j(kUnordered, compare->IsGtBias() ? &greater : &less); + less_cond = kBelow; // ucomis{s,d} sets CF break; } case Primitive::kPrimDouble: { @@ -2039,14 +2020,16 @@ void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) { __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>()); } __ j(kUnordered, compare->IsGtBias() ? &greater : &less); + less_cond = kBelow; // ucomis{s,d} sets CF break; } default: LOG(FATAL) << "Unexpected compare type " << type; } + __ movl(out, Immediate(0)); __ j(kEqual, &done); - __ j(type == Primitive::kPrimLong ? kLess : kBelow, &less); // ucomis{s,d} sets CF (kBelow) + __ j(less_cond, &less); __ Bind(&greater); __ movl(out, Immediate(1)); @@ -6574,6 +6557,27 @@ void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) { Load64BitValue(dest, bit_cast<int64_t, double>(value)); } +void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) { + if (value == 0) { + __ testl(dest, dest); + } else { + __ cmpl(dest, Immediate(value)); + } +} + +void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) { + if (IsInt<32>(value)) { + if (value == 0) { + __ testq(dest, dest); + } else { + __ cmpq(dest, Immediate(static_cast<int32_t>(value))); + } + } else { + // Value won't fit in an int. + __ cmpq(dest, LiteralInt64Address(value)); + } +} + void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) { DCHECK(dest.IsDoubleStackSlot()); if (IsInt<32>(value)) { diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 6aa08143dd..72dddfddfa 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -479,7 +479,7 @@ class CodeGeneratorX86_64 : public CodeGenerator { Address LiteralInt32Address(int32_t v); Address LiteralInt64Address(int64_t v); - // Load a 32/64 bit value into a register in the most efficient manner. + // Load a 32/64-bit value into a register in the most efficient manner. void Load32BitValue(CpuRegister dest, int32_t value); void Load64BitValue(CpuRegister dest, int64_t value); void Load32BitValue(XmmRegister dest, int32_t value); @@ -487,6 +487,10 @@ class CodeGeneratorX86_64 : public CodeGenerator { void Load32BitValue(XmmRegister dest, float value); void Load64BitValue(XmmRegister dest, double value); + // Compare a register with a 32/64-bit value in the most efficient manner. + void Compare32BitValue(CpuRegister dest, int32_t value); + void Compare64BitValue(CpuRegister dest, int64_t value); + Address LiteralCaseTable(HPackedSwitch* switch_instr); // Store a 64 bit value into a DoubleStackSlot in the most efficient manner. diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index fa2f6bb1dd..0029cc3650 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -91,6 +91,7 @@ class InstructionSimplifierVisitor : public HGraphDelegateVisitor { void SimplifyRotate(HInvoke* invoke, bool is_left); void SimplifySystemArrayCopy(HInvoke* invoke); void SimplifyStringEquals(HInvoke* invoke); + void SimplifyCompare(HInvoke* invoke, bool has_zero_op); OptimizingCompilerStats* stats_; bool simplification_occurred_ = false; @@ -1446,6 +1447,24 @@ void InstructionSimplifierVisitor::SimplifySystemArrayCopy(HInvoke* instruction) } } +void InstructionSimplifierVisitor::SimplifyCompare(HInvoke* invoke, bool is_signum) { + DCHECK(invoke->IsInvokeStaticOrDirect()); + uint32_t dex_pc = invoke->GetDexPc(); + HInstruction* left = invoke->InputAt(0); + HInstruction* right; + Primitive::Type type = left->GetType(); + if (!is_signum) { + right = invoke->InputAt(1); + } else if (type == Primitive::kPrimLong) { + right = GetGraph()->GetLongConstant(0); + } else { + right = GetGraph()->GetIntConstant(0); + } + HCompare* compare = new (GetGraph()->GetArena()) + HCompare(type, left, right, ComparisonBias::kNoBias, dex_pc); + invoke->GetBlock()->ReplaceAndRemoveInstructionWith(invoke, compare); +} + void InstructionSimplifierVisitor::VisitInvoke(HInvoke* instruction) { if (instruction->GetIntrinsic() == Intrinsics::kStringEquals) { SimplifyStringEquals(instruction); @@ -1457,6 +1476,12 @@ void InstructionSimplifierVisitor::VisitInvoke(HInvoke* instruction) { } else if (instruction->GetIntrinsic() == Intrinsics::kIntegerRotateLeft || instruction->GetIntrinsic() == Intrinsics::kLongRotateLeft) { SimplifyRotate(instruction, true); + } else if (instruction->GetIntrinsic() == Intrinsics::kIntegerCompare || + instruction->GetIntrinsic() == Intrinsics::kLongCompare) { + SimplifyCompare(instruction, /* is_signum */ false); + } else if (instruction->GetIntrinsic() == Intrinsics::kIntegerSignum || + instruction->GetIntrinsic() == Intrinsics::kLongSignum) { + SimplifyCompare(instruction, /* is_signum */ true); } } diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index e8912b39ab..96a3c3c2f1 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -1633,20 +1633,20 @@ UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite) UNIMPLEMENTED_INTRINSIC(FloatIsNaN) UNIMPLEMENTED_INTRINSIC(DoubleIsNaN) -UNIMPLEMENTED_INTRINSIC(IntegerCompare) -UNIMPLEMENTED_INTRINSIC(LongCompare) UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit) UNIMPLEMENTED_INTRINSIC(LongHighestOneBit) UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit) UNIMPLEMENTED_INTRINSIC(LongLowestOneBit) -UNIMPLEMENTED_INTRINSIC(IntegerSignum) -UNIMPLEMENTED_INTRINSIC(LongSignum) -// Rotate operations are handled as HRor instructions. +// Handled as HIR instructions. UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) -UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateLeft) +UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateRight) +UNIMPLEMENTED_INTRINSIC(IntegerCompare) +UNIMPLEMENTED_INTRINSIC(LongCompare) +UNIMPLEMENTED_INTRINSIC(IntegerSignum) +UNIMPLEMENTED_INTRINSIC(LongSignum) #undef UNIMPLEMENTED_INTRINSIC diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 5dce83a69c..4140d94e17 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -284,36 +284,6 @@ static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); } -static void GenCompare(LocationSummary* locations, bool is_long, vixl::MacroAssembler* masm) { - Location op1 = locations->InAt(0); - Location op2 = locations->InAt(1); - Location out = locations->Out(); - - Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); - Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2); - Register out_reg = WRegisterFrom(out); - - __ Cmp(op1_reg, op2_reg); - __ Cset(out_reg, gt); // out == +1 if GT or 0 otherwise - __ Cinv(out_reg, out_reg, lt); // out == -1 if LT or unchanged otherwise -} - -void IntrinsicLocationsBuilderARM64::VisitIntegerCompare(HInvoke* invoke) { - CreateIntIntToIntLocations(arena_, invoke); -} - -void IntrinsicCodeGeneratorARM64::VisitIntegerCompare(HInvoke* invoke) { - GenCompare(invoke->GetLocations(), /* is_long */ false, GetVIXLAssembler()); -} - -void IntrinsicLocationsBuilderARM64::VisitLongCompare(HInvoke* invoke) { - CreateIntIntToIntLocations(arena_, invoke); -} - -void IntrinsicCodeGeneratorARM64::VisitLongCompare(HInvoke* invoke) { - GenCompare(invoke->GetLocations(), /* is_long */ true, GetVIXLAssembler()); -} - static void GenNumberOfLeadingZeros(LocationSummary* locations, Primitive::Type type, vixl::MacroAssembler* masm) { @@ -1456,34 +1426,6 @@ void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke __ Bind(slow_path->GetExitLabel()); } -static void GenSignum(LocationSummary* locations, bool is_long, vixl::MacroAssembler* masm) { - Location op1 = locations->InAt(0); - Location out = locations->Out(); - - Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); - Register out_reg = WRegisterFrom(out); - - __ Cmp(op1_reg, 0); - __ Cset(out_reg, gt); // out == +1 if GT or 0 otherwise - __ Cinv(out_reg, out_reg, lt); // out == -1 if LT or unchanged otherwise -} - -void IntrinsicLocationsBuilderARM64::VisitIntegerSignum(HInvoke* invoke) { - CreateIntToIntLocations(arena_, invoke); -} - -void IntrinsicCodeGeneratorARM64::VisitIntegerSignum(HInvoke* invoke) { - GenSignum(invoke->GetLocations(), /* is_long */ false, GetVIXLAssembler()); -} - -void IntrinsicLocationsBuilderARM64::VisitLongSignum(HInvoke* invoke) { - CreateIntToIntLocations(arena_, invoke); -} - -void IntrinsicCodeGeneratorARM64::VisitLongSignum(HInvoke* invoke) { - GenSignum(invoke->GetLocations(), /* is_long */ true, GetVIXLAssembler()); -} - static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) { DCHECK_EQ(invoke->GetNumberOfArguments(), 1U); DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType())); @@ -1684,11 +1626,15 @@ UNIMPLEMENTED_INTRINSIC(LongHighestOneBit) UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit) UNIMPLEMENTED_INTRINSIC(LongLowestOneBit) -// Rotate operations are handled as HRor instructions. +// Handled as HIR instructions. UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) -UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateLeft) +UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateRight) +UNIMPLEMENTED_INTRINSIC(IntegerCompare) +UNIMPLEMENTED_INTRINSIC(LongCompare) +UNIMPLEMENTED_INTRINSIC(IntegerSignum) +UNIMPLEMENTED_INTRINSIC(LongSignum) #undef UNIMPLEMENTED_INTRINSIC diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index 0d9cf091cc..2294713a3e 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -1019,12 +1019,14 @@ UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite) UNIMPLEMENTED_INTRINSIC(FloatIsNaN) UNIMPLEMENTED_INTRINSIC(DoubleIsNaN) -UNIMPLEMENTED_INTRINSIC(IntegerCompare) -UNIMPLEMENTED_INTRINSIC(LongCompare) UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit) UNIMPLEMENTED_INTRINSIC(LongHighestOneBit) UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit) UNIMPLEMENTED_INTRINSIC(LongLowestOneBit) + +// Handled as HIR instructions. +UNIMPLEMENTED_INTRINSIC(IntegerCompare) +UNIMPLEMENTED_INTRINSIC(LongCompare) UNIMPLEMENTED_INTRINSIC(IntegerSignum) UNIMPLEMENTED_INTRINSIC(LongSignum) diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index f681d1fd56..ac2850342d 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -1767,12 +1767,14 @@ UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite) UNIMPLEMENTED_INTRINSIC(FloatIsNaN) UNIMPLEMENTED_INTRINSIC(DoubleIsNaN) -UNIMPLEMENTED_INTRINSIC(IntegerCompare) -UNIMPLEMENTED_INTRINSIC(LongCompare) UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit) UNIMPLEMENTED_INTRINSIC(LongHighestOneBit) UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit) UNIMPLEMENTED_INTRINSIC(LongLowestOneBit) + +// Handled as HIR instructions. +UNIMPLEMENTED_INTRINSIC(IntegerCompare) +UNIMPLEMENTED_INTRINSIC(LongCompare) UNIMPLEMENTED_INTRINSIC(IntegerSignum) UNIMPLEMENTED_INTRINSIC(LongSignum) diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 529f678761..ab4f6f9d28 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -2291,7 +2291,7 @@ static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask, } void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler()); + X86Assembler* assembler = GetAssembler(); LocationSummary* locations = invoke->GetLocations(); Register reg = locations->InAt(0).AsRegister<Register>(); @@ -2322,7 +2322,7 @@ void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler()); + X86Assembler* assembler = GetAssembler(); LocationSummary* locations = invoke->GetLocations(); Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>(); @@ -2366,7 +2366,9 @@ static void CreateBitCountLocations( locations->SetOut(Location::RequiresRegister()); } -static void GenBitCount(X86Assembler* assembler, HInvoke* invoke, bool is_long) { +static void GenBitCount(X86Assembler* assembler, + CodeGeneratorX86* codegen, + HInvoke* invoke, bool is_long) { LocationSummary* locations = invoke->GetLocations(); Location src = locations->InAt(0); Register out = locations->Out().AsRegister<Register>(); @@ -2377,11 +2379,7 @@ static void GenBitCount(X86Assembler* assembler, HInvoke* invoke, bool is_long) value = is_long ? POPCOUNT(static_cast<uint64_t>(value)) : POPCOUNT(static_cast<uint32_t>(value)); - if (value == 0) { - __ xorl(out, out); - } else { - __ movl(out, Immediate(value)); - } + codegen->Load32BitValue(out, value); return; } @@ -2413,7 +2411,7 @@ void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) { - GenBitCount(GetAssembler(), invoke, /* is_long */ false); + GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ false); } void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) { @@ -2421,7 +2419,7 @@ void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) { - GenBitCount(GetAssembler(), invoke, /* is_long */ true); + GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ true); } static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) { @@ -2436,7 +2434,9 @@ static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, b locations->SetOut(Location::RequiresRegister()); } -static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) { +static void GenLeadingZeros(X86Assembler* assembler, + CodeGeneratorX86* codegen, + HInvoke* invoke, bool is_long) { LocationSummary* locations = invoke->GetLocations(); Location src = locations->InAt(0); Register out = locations->Out().AsRegister<Register>(); @@ -2449,11 +2449,7 @@ static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_lo } else { value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value)); } - if (value == 0) { - __ xorl(out, out); - } else { - __ movl(out, Immediate(value)); - } + codegen->Load32BitValue(out, value); return; } @@ -2520,8 +2516,7 @@ void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* inv } void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler()); - GenLeadingZeros(assembler, invoke, /* is_long */ false); + GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false); } void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { @@ -2529,8 +2524,7 @@ void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke } void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler()); - GenLeadingZeros(assembler, invoke, /* is_long */ true); + GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true); } static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) { @@ -2545,7 +2539,9 @@ static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, locations->SetOut(Location::RequiresRegister()); } -static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) { +static void GenTrailingZeros(X86Assembler* assembler, + CodeGeneratorX86* codegen, + HInvoke* invoke, bool is_long) { LocationSummary* locations = invoke->GetLocations(); Location src = locations->InAt(0); Register out = locations->Out().AsRegister<Register>(); @@ -2558,11 +2554,7 @@ static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_l } else { value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value)); } - if (value == 0) { - __ xorl(out, out); - } else { - __ movl(out, Immediate(value)); - } + codegen->Load32BitValue(out, value); return; } @@ -2616,8 +2608,7 @@ void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* in } void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler()); - GenTrailingZeros(assembler, invoke, /* is_long */ false); + GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false); } void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { @@ -2625,8 +2616,7 @@ void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invok } void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler()); - GenTrailingZeros(assembler, invoke, /* is_long */ true); + GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true); } // Unimplemented intrinsics. @@ -2646,20 +2636,20 @@ UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite) UNIMPLEMENTED_INTRINSIC(FloatIsNaN) UNIMPLEMENTED_INTRINSIC(DoubleIsNaN) -UNIMPLEMENTED_INTRINSIC(IntegerCompare) -UNIMPLEMENTED_INTRINSIC(LongCompare) UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit) UNIMPLEMENTED_INTRINSIC(LongHighestOneBit) UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit) UNIMPLEMENTED_INTRINSIC(LongLowestOneBit) -UNIMPLEMENTED_INTRINSIC(IntegerSignum) -UNIMPLEMENTED_INTRINSIC(LongSignum) -// Rotate operations are handled as HRor instructions. +// Handled as HIR instructions. UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) +UNIMPLEMENTED_INTRINSIC(LongRotateLeft) UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateRight) -UNIMPLEMENTED_INTRINSIC(LongRotateLeft) +UNIMPLEMENTED_INTRINSIC(IntegerCompare) +UNIMPLEMENTED_INTRINSIC(LongCompare) +UNIMPLEMENTED_INTRINSIC(IntegerSignum) +UNIMPLEMENTED_INTRINSIC(LongSignum) #undef UNIMPLEMENTED_INTRINSIC diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 51fa514cb6..c9a43442b3 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -2431,58 +2431,6 @@ void IntrinsicCodeGeneratorX86_64::VisitLongBitCount(HInvoke* invoke) { GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ true); } -static void CreateCompareLocations(ArenaAllocator* arena, HInvoke* invoke) { - LocationSummary* locations = new (arena) LocationSummary(invoke, - LocationSummary::kNoCall, - kIntrinsified); - locations->SetInAt(0, Location::RequiresRegister()); - locations->SetInAt(1, Location::RequiresRegister()); - locations->SetOut(Location::RequiresRegister()); -} - -static void GenCompare(X86_64Assembler* assembler, HInvoke* invoke, bool is_long) { - LocationSummary* locations = invoke->GetLocations(); - CpuRegister src1 = locations->InAt(0).AsRegister<CpuRegister>(); - CpuRegister src2 = locations->InAt(1).AsRegister<CpuRegister>(); - CpuRegister out = locations->Out().AsRegister<CpuRegister>(); - - NearLabel is_lt, done; - - __ xorl(out, out); - - if (is_long) { - __ cmpq(src1, src2); - } else { - __ cmpl(src1, src2); - } - __ j(kEqual, &done); - __ j(kLess, &is_lt); - - __ movl(out, Immediate(1)); - __ jmp(&done); - - __ Bind(&is_lt); - __ movl(out, Immediate(-1)); - - __ Bind(&done); -} - -void IntrinsicLocationsBuilderX86_64::VisitIntegerCompare(HInvoke* invoke) { - CreateCompareLocations(arena_, invoke); -} - -void IntrinsicCodeGeneratorX86_64::VisitIntegerCompare(HInvoke* invoke) { - GenCompare(GetAssembler(), invoke, /* is_long */ false); -} - -void IntrinsicLocationsBuilderX86_64::VisitLongCompare(HInvoke* invoke) { - CreateCompareLocations(arena_, invoke); -} - -void IntrinsicCodeGeneratorX86_64::VisitLongCompare(HInvoke* invoke) { - GenCompare(GetAssembler(), invoke, /* is_long */ true); -} - static void CreateOneBitLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_high) { LocationSummary* locations = new (arena) LocationSummary(invoke, LocationSummary::kNoCall, @@ -2757,74 +2705,6 @@ void IntrinsicCodeGeneratorX86_64::VisitLongNumberOfTrailingZeros(HInvoke* invok GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true); } -static void CreateSignLocations(ArenaAllocator* arena, HInvoke* invoke) { - LocationSummary* locations = new (arena) LocationSummary(invoke, - LocationSummary::kNoCall, - kIntrinsified); - locations->SetInAt(0, Location::Any()); - locations->SetOut(Location::RequiresRegister()); - locations->AddTemp(Location::RequiresRegister()); // Need a writeable register. -} - -static void GenSign(X86_64Assembler* assembler, - CodeGeneratorX86_64* codegen, - HInvoke* invoke, bool is_long) { - LocationSummary* locations = invoke->GetLocations(); - Location src = locations->InAt(0); - CpuRegister out = locations->Out().AsRegister<CpuRegister>(); - - if (invoke->InputAt(0)->IsConstant()) { - // Evaluate this at compile time. - int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant()); - codegen->Load32BitValue(out, value == 0 ? 0 : (value > 0 ? 1 : -1)); - return; - } - - // Copy input into temporary. - CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>(); - if (src.IsRegister()) { - if (is_long) { - __ movq(tmp, src.AsRegister<CpuRegister>()); - } else { - __ movl(tmp, src.AsRegister<CpuRegister>()); - } - } else if (is_long) { - DCHECK(src.IsDoubleStackSlot()); - __ movq(tmp, Address(CpuRegister(RSP), src.GetStackIndex())); - } else { - DCHECK(src.IsStackSlot()); - __ movl(tmp, Address(CpuRegister(RSP), src.GetStackIndex())); - } - - // Do the bit twiddling: basically tmp >> 63/31 | -tmp >>> 63/31 for long/int. - if (is_long) { - __ movq(out, tmp); - __ sarq(out, Immediate(63)); - __ negq(tmp); - __ shrq(tmp, Immediate(63)); - __ orq(out, tmp); - } else { - __ movl(out, tmp); - __ sarl(out, Immediate(31)); - __ negl(tmp); - __ shrl(tmp, Immediate(31)); - __ orl(out, tmp); - } -} - -void IntrinsicLocationsBuilderX86_64::VisitIntegerSignum(HInvoke* invoke) { - CreateSignLocations(arena_, invoke); -} -void IntrinsicCodeGeneratorX86_64::VisitIntegerSignum(HInvoke* invoke) { - GenSign(GetAssembler(), codegen_, invoke, /* is_long */ false); -} -void IntrinsicLocationsBuilderX86_64::VisitLongSignum(HInvoke* invoke) { - CreateSignLocations(arena_, invoke); -} -void IntrinsicCodeGeneratorX86_64::VisitLongSignum(HInvoke* invoke) { - GenSign(GetAssembler(), codegen_, invoke, /* is_long */ true); -} - // Unimplemented intrinsics. #define UNIMPLEMENTED_INTRINSIC(Name) \ @@ -2840,11 +2720,15 @@ UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite) UNIMPLEMENTED_INTRINSIC(FloatIsNaN) UNIMPLEMENTED_INTRINSIC(DoubleIsNaN) -// Rotate operations are handled as HRor instructions. +// Handled as HIR instructions. UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) -UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateLeft) +UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) UNIMPLEMENTED_INTRINSIC(LongRotateRight) +UNIMPLEMENTED_INTRINSIC(IntegerCompare) +UNIMPLEMENTED_INTRINSIC(LongCompare) +UNIMPLEMENTED_INTRINSIC(IntegerSignum) +UNIMPLEMENTED_INTRINSIC(LongSignum) #undef UNIMPLEMENTED_INTRINSIC |