summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/boolean_simplifier.cc31
-rw-r--r--compiler/optimizing/code_generator_arm.cc133
-rw-r--r--compiler/optimizing/code_generator_arm64.cc56
-rw-r--r--compiler/optimizing/code_generator_mips64.cc104
-rw-r--r--compiler/optimizing/code_generator_x86.cc232
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc81
-rw-r--r--compiler/optimizing/codegen_test.cc126
-rw-r--r--compiler/optimizing/common_arm64.h4
-rw-r--r--compiler/optimizing/constant_area_fixups_x86.cc132
-rw-r--r--compiler/optimizing/induction_var_analysis.cc7
-rw-r--r--compiler/optimizing/instruction_simplifier.cc2
-rw-r--r--compiler/optimizing/intrinsics_mips64.cc358
-rw-r--r--compiler/optimizing/nodes.h224
-rw-r--r--compiler/optimizing/optimizing_compiler.cc1
-rw-r--r--compiler/optimizing/reference_type_propagation.cc4
15 files changed, 1159 insertions, 336 deletions
diff --git a/compiler/optimizing/boolean_simplifier.cc b/compiler/optimizing/boolean_simplifier.cc
index 5b346872b0..f985745e7a 100644
--- a/compiler/optimizing/boolean_simplifier.cc
+++ b/compiler/optimizing/boolean_simplifier.cc
@@ -69,19 +69,17 @@ static HInstruction* GetOppositeCondition(HInstruction* cond) {
if (cond->IsCondition()) {
HInstruction* lhs = cond->InputAt(0);
HInstruction* rhs = cond->InputAt(1);
- if (cond->IsEqual()) {
- return new (allocator) HNotEqual(lhs, rhs);
- } else if (cond->IsNotEqual()) {
- return new (allocator) HEqual(lhs, rhs);
- } else if (cond->IsLessThan()) {
- return new (allocator) HGreaterThanOrEqual(lhs, rhs);
- } else if (cond->IsLessThanOrEqual()) {
- return new (allocator) HGreaterThan(lhs, rhs);
- } else if (cond->IsGreaterThan()) {
- return new (allocator) HLessThanOrEqual(lhs, rhs);
- } else {
- DCHECK(cond->IsGreaterThanOrEqual());
- return new (allocator) HLessThan(lhs, rhs);
+ switch (cond->AsCondition()->GetOppositeCondition()) { // get *opposite*
+ case kCondEQ: return new (allocator) HEqual(lhs, rhs);
+ case kCondNE: return new (allocator) HNotEqual(lhs, rhs);
+ case kCondLT: return new (allocator) HLessThan(lhs, rhs);
+ case kCondLE: return new (allocator) HLessThanOrEqual(lhs, rhs);
+ case kCondGT: return new (allocator) HGreaterThan(lhs, rhs);
+ case kCondGE: return new (allocator) HGreaterThanOrEqual(lhs, rhs);
+ case kCondB: return new (allocator) HBelow(lhs, rhs);
+ case kCondBE: return new (allocator) HBelowOrEqual(lhs, rhs);
+ case kCondA: return new (allocator) HAbove(lhs, rhs);
+ case kCondAE: return new (allocator) HAboveOrEqual(lhs, rhs);
}
} else if (cond->IsIntConstant()) {
HIntConstant* int_const = cond->AsIntConstant();
@@ -91,11 +89,10 @@ static HInstruction* GetOppositeCondition(HInstruction* cond) {
DCHECK(int_const->IsOne());
return graph->GetIntConstant(0);
}
- } else {
- // General case when 'cond' is another instruction of type boolean,
- // as verified by SSAChecker.
- return new (allocator) HBooleanNot(cond);
}
+ // General case when 'cond' is another instruction of type boolean,
+ // as verified by SSAChecker.
+ return new (allocator) HBooleanNot(cond);
}
void HBooleanSimplifier::TryRemovingBooleanSelection(HBasicBlock* block) {
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 8c1820b6cd..3e6cad83fa 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -409,7 +409,7 @@ class ArraySetSlowPathARM : public SlowPathCode {
#undef __
#define __ down_cast<ArmAssembler*>(GetAssembler())->
-inline Condition ARMSignedOrFPCondition(IfCondition cond) {
+inline Condition ARMCondition(IfCondition cond) {
switch (cond) {
case kCondEQ: return EQ;
case kCondNE: return NE;
@@ -417,19 +417,30 @@ inline Condition ARMSignedOrFPCondition(IfCondition cond) {
case kCondLE: return LE;
case kCondGT: return GT;
case kCondGE: return GE;
+ case kCondB: return LO;
+ case kCondBE: return LS;
+ case kCondA: return HI;
+ case kCondAE: return HS;
}
LOG(FATAL) << "Unreachable";
UNREACHABLE();
}
+// Maps signed condition to unsigned condition.
inline Condition ARMUnsignedCondition(IfCondition cond) {
switch (cond) {
case kCondEQ: return EQ;
case kCondNE: return NE;
+ // Signed to unsigned.
case kCondLT: return LO;
case kCondLE: return LS;
case kCondGT: return HI;
case kCondGE: return HS;
+ // Unsigned remain unchanged.
+ case kCondB: return LO;
+ case kCondBE: return LS;
+ case kCondA: return HI;
+ case kCondAE: return HS;
}
LOG(FATAL) << "Unreachable";
UNREACHABLE();
@@ -1130,8 +1141,7 @@ void LocationsBuilderARM::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorARM::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void InstructionCodeGeneratorARM::GenerateCompareWithImmediate(Register left, int32_t right) {
@@ -1149,12 +1159,13 @@ void InstructionCodeGeneratorARM::GenerateFPJumps(HCondition* cond,
Label* true_label,
Label* false_label) {
__ vmstat(); // transfer FP status register to ARM APSR.
+ // TODO: merge into a single branch (except "equal or unordered" and "not equal")
if (cond->IsFPConditionTrueIfNaN()) {
__ b(true_label, VS); // VS for unordered.
} else if (cond->IsFPConditionFalseIfNaN()) {
__ b(false_label, VS); // VS for unordered.
}
- __ b(true_label, ARMSignedOrFPCondition(cond->GetCondition()));
+ __ b(true_label, ARMCondition(cond->GetCondition()));
}
void InstructionCodeGeneratorARM::GenerateLongComparesAndJumps(HCondition* cond,
@@ -1169,10 +1180,11 @@ void InstructionCodeGeneratorARM::GenerateLongComparesAndJumps(HCondition* cond,
Register left_low = left.AsRegisterPairLow<Register>();
IfCondition true_high_cond = if_cond;
IfCondition false_high_cond = cond->GetOppositeCondition();
- Condition final_condition = ARMUnsignedCondition(if_cond);
+ Condition final_condition = ARMUnsignedCondition(if_cond); // unsigned on lower part
// Set the conditions for the test, remembering that == needs to be
// decided using the low words.
+ // TODO: consider avoiding jumps with temporary and CMP low+SBC high
switch (if_cond) {
case kCondEQ:
case kCondNE:
@@ -1190,6 +1202,18 @@ void InstructionCodeGeneratorARM::GenerateLongComparesAndJumps(HCondition* cond,
case kCondGE:
true_high_cond = kCondGT;
break;
+ case kCondB:
+ false_high_cond = kCondA;
+ break;
+ case kCondBE:
+ true_high_cond = kCondB;
+ break;
+ case kCondA:
+ false_high_cond = kCondB;
+ break;
+ case kCondAE:
+ true_high_cond = kCondA;
+ break;
}
if (right.IsConstant()) {
int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
@@ -1198,12 +1222,12 @@ void InstructionCodeGeneratorARM::GenerateLongComparesAndJumps(HCondition* cond,
GenerateCompareWithImmediate(left_high, val_high);
if (if_cond == kCondNE) {
- __ b(true_label, ARMSignedOrFPCondition(true_high_cond));
+ __ b(true_label, ARMCondition(true_high_cond));
} else if (if_cond == kCondEQ) {
- __ b(false_label, ARMSignedOrFPCondition(false_high_cond));
+ __ b(false_label, ARMCondition(false_high_cond));
} else {
- __ b(true_label, ARMSignedOrFPCondition(true_high_cond));
- __ b(false_label, ARMSignedOrFPCondition(false_high_cond));
+ __ b(true_label, ARMCondition(true_high_cond));
+ __ b(false_label, ARMCondition(false_high_cond));
}
// Must be equal high, so compare the lows.
GenerateCompareWithImmediate(left_low, val_low);
@@ -1213,17 +1237,18 @@ void InstructionCodeGeneratorARM::GenerateLongComparesAndJumps(HCondition* cond,
__ cmp(left_high, ShifterOperand(right_high));
if (if_cond == kCondNE) {
- __ b(true_label, ARMSignedOrFPCondition(true_high_cond));
+ __ b(true_label, ARMCondition(true_high_cond));
} else if (if_cond == kCondEQ) {
- __ b(false_label, ARMSignedOrFPCondition(false_high_cond));
+ __ b(false_label, ARMCondition(false_high_cond));
} else {
- __ b(true_label, ARMSignedOrFPCondition(true_high_cond));
- __ b(false_label, ARMSignedOrFPCondition(false_high_cond));
+ __ b(true_label, ARMCondition(true_high_cond));
+ __ b(false_label, ARMCondition(false_high_cond));
}
// Must be equal high, so compare the lows.
__ cmp(left_low, ShifterOperand(right_low));
}
// The last comparison might be unsigned.
+ // TODO: optimize cases where this is always true/false
__ b(true_label, final_condition);
}
@@ -1315,7 +1340,7 @@ void InstructionCodeGeneratorARM::GenerateTestAndBranch(HInstruction* instructio
DCHECK(right.IsConstant());
GenerateCompareWithImmediate(left, CodeGenerator::GetInt32ValueOf(right.GetConstant()));
}
- __ b(true_target, ARMSignedOrFPCondition(cond->AsCondition()->GetCondition()));
+ __ b(true_target, ARMCondition(cond->AsCondition()->GetCondition()));
}
}
if (false_target != nullptr) {
@@ -1417,11 +1442,11 @@ void InstructionCodeGeneratorARM::VisitCondition(HCondition* cond) {
GenerateCompareWithImmediate(left.AsRegister<Register>(),
CodeGenerator::GetInt32ValueOf(right.GetConstant()));
}
- __ it(ARMSignedOrFPCondition(cond->GetCondition()), kItElse);
+ __ it(ARMCondition(cond->GetCondition()), kItElse);
__ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
- ARMSignedOrFPCondition(cond->GetCondition()));
+ ARMCondition(cond->GetCondition()));
__ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
- ARMSignedOrFPCondition(cond->GetOppositeCondition()));
+ ARMCondition(cond->GetOppositeCondition()));
return;
}
case Primitive::kPrimLong:
@@ -1500,6 +1525,38 @@ void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* c
VisitCondition(comp);
}
+void LocationsBuilderARM::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorARM::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderARM::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorARM::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderARM::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorARM::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderARM::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorARM::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
void LocationsBuilderARM::VisitLocal(HLocal* local) {
local->SetLocations(nullptr);
}
@@ -1512,9 +1569,8 @@ void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
load->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
@@ -1541,8 +1597,7 @@ void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
@@ -1551,9 +1606,8 @@ void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
@@ -1562,9 +1616,8 @@ void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
@@ -1573,9 +1626,8 @@ void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
@@ -1584,9 +1636,8 @@ void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
+void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1595,9 +1646,8 @@ void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
+void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
@@ -1612,8 +1662,7 @@ void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -1623,8 +1672,7 @@ void LocationsBuilderARM::VisitReturn(HReturn* ret) {
locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
}
-void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3270,8 +3318,7 @@ void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -4232,13 +4279,11 @@ void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
-void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
- UNUSED(instruction);
+void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -5286,15 +5331,13 @@ Literal* CodeGeneratorARM::DeduplicateMethodCodeLiteral(MethodReference target_m
return DeduplicateMethodLiteral(target_method, &call_patches_);
}
-void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderARM::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index c94da86d2c..ffb9b794fc 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -77,6 +77,10 @@ inline Condition ARM64Condition(IfCondition cond) {
case kCondLE: return le;
case kCondGT: return gt;
case kCondGE: return ge;
+ case kCondB: return lo;
+ case kCondBE: return ls;
+ case kCondA: return hi;
+ case kCondAE: return hs;
}
LOG(FATAL) << "Unreachable";
UNREACHABLE();
@@ -1326,8 +1330,7 @@ enum UnimplementedInstructionBreakCode {
};
#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
- void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
- UNUSED(instr); \
+ void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
__ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
} \
void LocationsBuilderARM64::Visit##name(H##name* instr) { \
@@ -1937,7 +1940,11 @@ void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
M(LessThan) \
M(LessThanOrEqual) \
M(GreaterThan) \
- M(GreaterThanOrEqual)
+ M(GreaterThanOrEqual) \
+ M(Below) \
+ M(BelowOrEqual) \
+ M(Above) \
+ M(AboveOrEqual)
#define DEFINE_CONDITION_VISITORS(Name) \
void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
@@ -2175,8 +2182,8 @@ void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
- UNUSED(constant);
+void InstructionCodeGeneratorARM64::VisitDoubleConstant(
+ HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
}
@@ -2184,8 +2191,7 @@ void LocationsBuilderARM64::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
@@ -2194,8 +2200,7 @@ void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
- UNUSED(constant);
+void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
}
@@ -2682,9 +2687,8 @@ void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
@@ -2692,9 +2696,8 @@ void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
@@ -3085,9 +3088,8 @@ void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
load->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
@@ -3124,9 +3126,8 @@ void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
@@ -3393,8 +3394,7 @@ void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -3464,8 +3464,7 @@ void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
locations->SetInAt(0, ARM64ReturnLocation(return_type));
}
-void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3473,8 +3472,7 @@ void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
instruction->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3518,8 +3516,7 @@ void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderARM64::VisitSub(HSub* instruction) {
@@ -3636,9 +3633,8 @@ void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
@@ -3737,15 +3733,13 @@ void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
HandleBinaryOp(instruction);
}
-void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 1a08503cf9..eb20291e20 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -1778,6 +1778,9 @@ void InstructionCodeGeneratorMIPS64::VisitCondition(HCondition* instruction) {
return;
}
+ // TODO: generalize to long
+ DCHECK_NE(instruction->InputAt(0)->GetType(), Primitive::kPrimLong);
+
LocationSummary* locations = instruction->GetLocations();
GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
@@ -1855,6 +1858,48 @@ void InstructionCodeGeneratorMIPS64::VisitCondition(HCondition* instruction) {
}
}
break;
+
+ case kCondB:
+ case kCondAE:
+ if (use_imm && 0 <= rhs_imm && rhs_imm <= 0x7fff) {
+ __ Sltiu(dst, lhs, rhs_imm);
+ } else {
+ if (use_imm) {
+ rhs_reg = TMP;
+ __ LoadConst32(rhs_reg, rhs_imm);
+ }
+ __ Sltu(dst, lhs, rhs_reg);
+ }
+ if (if_cond == kCondAE) {
+ // Simulate lhs >= rhs via !(lhs < rhs) since there's
+ // only the sltu instruction but no sgeu.
+ __ Xori(dst, dst, 1);
+ }
+ break;
+
+ case kCondBE:
+ case kCondA:
+ if (use_imm && 0 <= rhs_imm && rhs_imm <= 0x7ffe) {
+ // Simulate lhs <= rhs via lhs < rhs + 1.
+ __ Sltiu(dst, lhs, rhs_imm + 1);
+ if (if_cond == kCondA) {
+ // Simulate lhs > rhs via !(lhs <= rhs) since there's
+ // only the sltiu instruction but no sgtiu.
+ __ Xori(dst, dst, 1);
+ }
+ } else {
+ if (use_imm) {
+ rhs_reg = TMP;
+ __ LoadConst32(rhs_reg, rhs_imm);
+ }
+ __ Sltu(dst, rhs_reg, lhs);
+ if (if_cond == kCondBE) {
+ // Simulate lhs <= rhs via !(rhs < lhs) since there's
+ // only the sltu instruction but no sleu.
+ __ Xori(dst, dst, 1);
+ }
+ }
+ break;
}
}
@@ -2072,6 +2117,17 @@ void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruc
case kCondGT:
__ Bgtzc(lhs, true_target);
break;
+ case kCondB:
+ break; // always false
+ case kCondBE:
+ __ Beqzc(lhs, true_target); // <= 0 if zero
+ break;
+ case kCondA:
+ __ Bnezc(lhs, true_target); // > 0 if non-zero
+ break;
+ case kCondAE:
+ __ B(true_target); // always true
+ break;
}
} else {
if (use_imm) {
@@ -2086,12 +2142,16 @@ void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruc
case kCondEQ:
case kCondGE:
case kCondLE:
+ case kCondBE:
+ case kCondAE:
// if lhs == rhs for a positive condition, then it is a branch
__ B(true_target);
break;
case kCondNE:
case kCondLT:
case kCondGT:
+ case kCondB:
+ case kCondA:
// if lhs == rhs for a negative condition, then it is a NOP
break;
}
@@ -2115,6 +2175,18 @@ void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruc
case kCondGT:
__ Bltc(rhs_reg, lhs, true_target);
break;
+ case kCondB:
+ __ Bltuc(lhs, rhs_reg, true_target);
+ break;
+ case kCondAE:
+ __ Bgeuc(lhs, rhs_reg, true_target);
+ break;
+ case kCondBE:
+ __ Bgeuc(rhs_reg, lhs, true_target);
+ break;
+ case kCondA:
+ __ Bltuc(rhs_reg, lhs, true_target);
+ break;
}
}
}
@@ -3462,6 +3534,38 @@ void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual
VisitCondition(comp);
}
+void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
void LocationsBuilderMIPS64::VisitFakeString(HFakeString* instruction) {
DCHECK(codegen_->IsBaseline());
LocationSummary* locations =
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index b60eebf1ba..2aea859b7d 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -428,7 +428,7 @@ class ArraySetSlowPathX86 : public SlowPathCode {
#undef __
#define __ down_cast<X86Assembler*>(GetAssembler())->
-inline Condition X86SignedCondition(IfCondition cond) {
+inline Condition X86Condition(IfCondition cond) {
switch (cond) {
case kCondEQ: return kEqual;
case kCondNE: return kNotEqual;
@@ -436,19 +436,30 @@ inline Condition X86SignedCondition(IfCondition cond) {
case kCondLE: return kLessEqual;
case kCondGT: return kGreater;
case kCondGE: return kGreaterEqual;
+ case kCondB: return kBelow;
+ case kCondBE: return kBelowEqual;
+ case kCondA: return kAbove;
+ case kCondAE: return kAboveEqual;
}
LOG(FATAL) << "Unreachable";
UNREACHABLE();
}
+// Maps signed condition to unsigned condition and FP condition to x86 name.
inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
switch (cond) {
case kCondEQ: return kEqual;
case kCondNE: return kNotEqual;
+ // Signed to unsigned, and FP to x86 name.
case kCondLT: return kBelow;
case kCondLE: return kBelowEqual;
case kCondGT: return kAbove;
case kCondGE: return kAboveEqual;
+ // Unsigned remain unchanged.
+ case kCondB: return kBelow;
+ case kCondBE: return kBelowEqual;
+ case kCondA: return kAbove;
+ case kCondAE: return kAboveEqual;
}
LOG(FATAL) << "Unreachable";
UNREACHABLE();
@@ -1041,8 +1052,7 @@ void LocationsBuilderX86::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
@@ -1068,7 +1078,7 @@ void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Register left_low = left.AsRegisterPairLow<Register>();
IfCondition true_high_cond = if_cond;
IfCondition false_high_cond = cond->GetOppositeCondition();
- Condition final_condition = X86UnsignedOrFPCondition(if_cond);
+ Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
// Set the conditions for the test, remembering that == needs to be
// decided using the low words.
@@ -1089,6 +1099,18 @@ void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
case kCondGE:
true_high_cond = kCondGT;
break;
+ case kCondB:
+ false_high_cond = kCondA;
+ break;
+ case kCondBE:
+ true_high_cond = kCondB;
+ break;
+ case kCondA:
+ false_high_cond = kCondB;
+ break;
+ case kCondAE:
+ true_high_cond = kCondA;
+ break;
}
if (right.IsConstant()) {
@@ -1102,12 +1124,12 @@ void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
__ cmpl(left_high, Immediate(val_high));
}
if (if_cond == kCondNE) {
- __ j(X86SignedCondition(true_high_cond), true_label);
+ __ j(X86Condition(true_high_cond), true_label);
} else if (if_cond == kCondEQ) {
- __ j(X86SignedCondition(false_high_cond), false_label);
+ __ j(X86Condition(false_high_cond), false_label);
} else {
- __ j(X86SignedCondition(true_high_cond), true_label);
- __ j(X86SignedCondition(false_high_cond), false_label);
+ __ j(X86Condition(true_high_cond), true_label);
+ __ j(X86Condition(false_high_cond), false_label);
}
// Must be equal high, so compare the lows.
if (val_low == 0) {
@@ -1121,12 +1143,12 @@ void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
__ cmpl(left_high, right_high);
if (if_cond == kCondNE) {
- __ j(X86SignedCondition(true_high_cond), true_label);
+ __ j(X86Condition(true_high_cond), true_label);
} else if (if_cond == kCondEQ) {
- __ j(X86SignedCondition(false_high_cond), false_label);
+ __ j(X86Condition(false_high_cond), false_label);
} else {
- __ j(X86SignedCondition(true_high_cond), true_label);
- __ j(X86SignedCondition(false_high_cond), false_label);
+ __ j(X86Condition(true_high_cond), true_label);
+ __ j(X86Condition(false_high_cond), false_label);
}
// Must be equal high, so compare the lows.
__ cmpl(left_low, right_low);
@@ -1215,7 +1237,7 @@ void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instructio
}
__ j(kNotEqual, true_target);
} else {
- __ j(X86SignedCondition(cond->AsCondition()->GetCondition()), true_target);
+ __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
}
} else {
// Condition has not been materialized, use its inputs as the
@@ -1248,7 +1270,7 @@ void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instructio
} else {
__ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex()));
}
- __ j(X86SignedCondition(cond->AsCondition()->GetCondition()), true_target);
+ __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
}
}
if (false_target != nullptr) {
@@ -1310,9 +1332,8 @@ void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
local->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
@@ -1339,8 +1360,7 @@ void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderX86::VisitCondition(HCondition* cond) {
@@ -1406,7 +1426,7 @@ void InstructionCodeGeneratorX86::VisitCondition(HCondition* cond) {
} else {
__ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex()));
}
- __ setb(X86SignedCondition(cond->GetCondition()), reg);
+ __ setb(X86Condition(cond->GetCondition()), reg);
return;
}
case Primitive::kPrimLong:
@@ -1484,15 +1504,46 @@ void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* c
VisitCondition(comp);
}
+void LocationsBuilderX86::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderX86::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
@@ -1501,9 +1552,8 @@ void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
@@ -1512,9 +1562,8 @@ void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
@@ -1523,9 +1572,8 @@ void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
+void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1534,9 +1582,8 @@ void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
+void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
@@ -1551,8 +1598,7 @@ void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3686,8 +3732,7 @@ void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -4685,13 +4730,11 @@ void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
-void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
- UNUSED(instruction);
+void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -5614,15 +5657,13 @@ void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instr
}
}
-void LocationsBuilderX86::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
@@ -5922,115 +5963,6 @@ Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
}
-/**
- * Finds instructions that need the constant area base as an input.
- */
-class ConstantHandlerVisitor : public HGraphVisitor {
- public:
- explicit ConstantHandlerVisitor(HGraph* graph) : HGraphVisitor(graph), base_(nullptr) {}
-
- private:
- void VisitAdd(HAdd* add) OVERRIDE {
- BinaryFP(add);
- }
-
- void VisitSub(HSub* sub) OVERRIDE {
- BinaryFP(sub);
- }
-
- void VisitMul(HMul* mul) OVERRIDE {
- BinaryFP(mul);
- }
-
- void VisitDiv(HDiv* div) OVERRIDE {
- BinaryFP(div);
- }
-
- void VisitReturn(HReturn* ret) OVERRIDE {
- HConstant* value = ret->InputAt(0)->AsConstant();
- if ((value != nullptr && Primitive::IsFloatingPointType(value->GetType()))) {
- ReplaceInput(ret, value, 0, true);
- }
- }
-
- void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
- HandleInvoke(invoke);
- }
-
- void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
- HandleInvoke(invoke);
- }
-
- void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
- HandleInvoke(invoke);
- }
-
- void BinaryFP(HBinaryOperation* bin) {
- HConstant* rhs = bin->InputAt(1)->AsConstant();
- if (rhs != nullptr && Primitive::IsFloatingPointType(bin->GetResultType())) {
- ReplaceInput(bin, rhs, 1, false);
- }
- }
-
- void VisitPackedSwitch(HPackedSwitch* switch_insn) OVERRIDE {
- // We need to replace the HPackedSwitch with a HX86PackedSwitch in order to
- // address the constant area.
- InitializeConstantAreaPointer(switch_insn);
- HGraph* graph = GetGraph();
- HBasicBlock* block = switch_insn->GetBlock();
- HX86PackedSwitch* x86_switch = new (graph->GetArena()) HX86PackedSwitch(
- switch_insn->GetStartValue(),
- switch_insn->GetNumEntries(),
- switch_insn->InputAt(0),
- base_,
- switch_insn->GetDexPc());
- block->ReplaceAndRemoveInstructionWith(switch_insn, x86_switch);
- }
-
- void InitializeConstantAreaPointer(HInstruction* user) {
- // Ensure we only initialize the pointer once.
- if (base_ != nullptr) {
- return;
- }
-
- HGraph* graph = GetGraph();
- HBasicBlock* entry = graph->GetEntryBlock();
- base_ = new (graph->GetArena()) HX86ComputeBaseMethodAddress();
- HInstruction* insert_pos = (user->GetBlock() == entry) ? user : entry->GetLastInstruction();
- entry->InsertInstructionBefore(base_, insert_pos);
- DCHECK(base_ != nullptr);
- }
-
- void ReplaceInput(HInstruction* insn, HConstant* value, int input_index, bool materialize) {
- InitializeConstantAreaPointer(insn);
- HGraph* graph = GetGraph();
- HBasicBlock* block = insn->GetBlock();
- HX86LoadFromConstantTable* load_constant =
- new (graph->GetArena()) HX86LoadFromConstantTable(base_, value, materialize);
- block->InsertInstructionBefore(load_constant, insn);
- insn->ReplaceInput(load_constant, input_index);
- }
-
- void HandleInvoke(HInvoke* invoke) {
- // Ensure that we can load FP arguments from the constant area.
- for (size_t i = 0, e = invoke->InputCount(); i < e; i++) {
- HConstant* input = invoke->InputAt(i)->AsConstant();
- if (input != nullptr && Primitive::IsFloatingPointType(input->GetType())) {
- ReplaceInput(invoke, input, i, true);
- }
- }
- }
-
- // The generated HX86ComputeBaseMethodAddress in the entry block needed as an
- // input to the HX86LoadFromConstantTable instructions.
- HX86ComputeBaseMethodAddress* base_;
-};
-
-void ConstantAreaFixups::Run() {
- ConstantHandlerVisitor visitor(graph_);
- visitor.VisitInsertionOrder();
-}
-
// TODO: target as memory.
void CodeGeneratorX86::MoveFromReturnRegister(Location target, Primitive::Type type) {
if (!target.IsValid()) {
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index f0d9420f87..bf570f581b 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -449,11 +449,16 @@ inline Condition X86_64IntegerCondition(IfCondition cond) {
case kCondLE: return kLessEqual;
case kCondGT: return kGreater;
case kCondGE: return kGreaterEqual;
+ case kCondB: return kBelow;
+ case kCondBE: return kBelowEqual;
+ case kCondA: return kAbove;
+ case kCondAE: return kAboveEqual;
}
LOG(FATAL) << "Unreachable";
UNREACHABLE();
}
+// Maps FP condition to x86_64 name.
inline Condition X86_64FPCondition(IfCondition cond) {
switch (cond) {
case kCondEQ: return kEqual;
@@ -462,6 +467,7 @@ inline Condition X86_64FPCondition(IfCondition cond) {
case kCondLE: return kBelowEqual;
case kCondGT: return kAbove;
case kCondGE: return kAboveEqual;
+ default: break; // should not happen
};
LOG(FATAL) << "Unreachable";
UNREACHABLE();
@@ -1044,8 +1050,7 @@ void LocationsBuilderX86_64::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
@@ -1273,9 +1278,8 @@ void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
local->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
@@ -1302,8 +1306,7 @@ void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderX86_64::VisitCondition(HCondition* cond) {
@@ -1475,6 +1478,38 @@ void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual
VisitCondition(comp);
}
+void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
+ VisitCondition(comp);
+}
+
+void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
+void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
+ VisitCondition(comp);
+}
+
void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
@@ -1576,9 +1611,8 @@ void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
@@ -1587,9 +1621,8 @@ void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
@@ -1598,9 +1631,8 @@ void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
@@ -1609,9 +1641,8 @@ void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1620,9 +1651,9 @@ void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
+ HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
@@ -1637,8 +1668,7 @@ void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3592,8 +3622,7 @@ void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unimplemented";
}
@@ -4413,13 +4442,11 @@ void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
-void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
- UNUSED(instruction);
+void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unimplemented";
}
@@ -5294,15 +5321,13 @@ void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* in
}
}
-void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/codegen_test.cc b/compiler/optimizing/codegen_test.cc
index 22f227c56a..fe5af2fc5e 100644
--- a/compiler/optimizing/codegen_test.cc
+++ b/compiler/optimizing/codegen_test.cc
@@ -761,4 +761,130 @@ TEST(CodegenTest, ReturnDivInt2Addr) {
TestCode(data, true, 2);
}
+// Helper method.
+static void TestComparison(IfCondition condition, int64_t i, int64_t j, Primitive::Type type) {
+ ArenaPool pool;
+ ArenaAllocator allocator(&pool);
+ HGraph* graph = CreateGraph(&allocator);
+
+ HBasicBlock* entry_block = new (&allocator) HBasicBlock(graph);
+ graph->AddBlock(entry_block);
+ graph->SetEntryBlock(entry_block);
+ entry_block->AddInstruction(new (&allocator) HGoto());
+
+ HBasicBlock* block = new (&allocator) HBasicBlock(graph);
+ graph->AddBlock(block);
+
+ HBasicBlock* exit_block = new (&allocator) HBasicBlock(graph);
+ graph->AddBlock(exit_block);
+ graph->SetExitBlock(exit_block);
+ exit_block->AddInstruction(new (&allocator) HExit());
+
+ entry_block->AddSuccessor(block);
+ block->AddSuccessor(exit_block);
+
+ HInstruction* op1;
+ HInstruction* op2;
+ if (type == Primitive::kPrimInt) {
+ op1 = graph->GetIntConstant(i);
+ op2 = graph->GetIntConstant(j);
+ } else {
+ DCHECK_EQ(type, Primitive::kPrimLong);
+ op1 = graph->GetLongConstant(i);
+ op2 = graph->GetLongConstant(j);
+ }
+
+ HInstruction* comparison = nullptr;
+ bool expected_result = false;
+ const uint64_t x = i;
+ const uint64_t y = j;
+ switch (condition) {
+ case kCondEQ:
+ comparison = new (&allocator) HEqual(op1, op2);
+ expected_result = (i == j);
+ break;
+ case kCondNE:
+ comparison = new (&allocator) HNotEqual(op1, op2);
+ expected_result = (i != j);
+ break;
+ case kCondLT:
+ comparison = new (&allocator) HLessThan(op1, op2);
+ expected_result = (i < j);
+ break;
+ case kCondLE:
+ comparison = new (&allocator) HLessThanOrEqual(op1, op2);
+ expected_result = (i <= j);
+ break;
+ case kCondGT:
+ comparison = new (&allocator) HGreaterThan(op1, op2);
+ expected_result = (i > j);
+ break;
+ case kCondGE:
+ comparison = new (&allocator) HGreaterThanOrEqual(op1, op2);
+ expected_result = (i >= j);
+ break;
+ case kCondB:
+ comparison = new (&allocator) HBelow(op1, op2);
+ expected_result = (x < y);
+ break;
+ case kCondBE:
+ comparison = new (&allocator) HBelowOrEqual(op1, op2);
+ expected_result = (x <= y);
+ break;
+ case kCondA:
+ comparison = new (&allocator) HAbove(op1, op2);
+ expected_result = (x > y);
+ break;
+ case kCondAE:
+ comparison = new (&allocator) HAboveOrEqual(op1, op2);
+ expected_result = (x >= y);
+ break;
+ }
+ block->AddInstruction(comparison);
+ block->AddInstruction(new (&allocator) HReturn(comparison));
+
+ auto hook_before_codegen = [](HGraph*) {
+ };
+ RunCodeOptimized(graph, hook_before_codegen, true, expected_result);
+}
+
+TEST(CodegenTest, ComparisonsInt) {
+ for (int64_t i = -1; i <= 1; i++) {
+ for (int64_t j = -1; j <= 1; j++) {
+ TestComparison(kCondEQ, i, j, Primitive::kPrimInt);
+ TestComparison(kCondNE, i, j, Primitive::kPrimInt);
+ TestComparison(kCondLT, i, j, Primitive::kPrimInt);
+ TestComparison(kCondLE, i, j, Primitive::kPrimInt);
+ TestComparison(kCondGT, i, j, Primitive::kPrimInt);
+ TestComparison(kCondGE, i, j, Primitive::kPrimInt);
+ TestComparison(kCondB, i, j, Primitive::kPrimInt);
+ TestComparison(kCondBE, i, j, Primitive::kPrimInt);
+ TestComparison(kCondA, i, j, Primitive::kPrimInt);
+ TestComparison(kCondAE, i, j, Primitive::kPrimInt);
+ }
+ }
+}
+
+TEST(CodegenTest, ComparisonsLong) {
+ // TODO: make MIPS work for long
+ if (kRuntimeISA == kMips || kRuntimeISA == kMips64) {
+ return;
+ }
+
+ for (int64_t i = -1; i <= 1; i++) {
+ for (int64_t j = -1; j <= 1; j++) {
+ TestComparison(kCondEQ, i, j, Primitive::kPrimLong);
+ TestComparison(kCondNE, i, j, Primitive::kPrimLong);
+ TestComparison(kCondLT, i, j, Primitive::kPrimLong);
+ TestComparison(kCondLE, i, j, Primitive::kPrimLong);
+ TestComparison(kCondGT, i, j, Primitive::kPrimLong);
+ TestComparison(kCondGE, i, j, Primitive::kPrimLong);
+ TestComparison(kCondB, i, j, Primitive::kPrimLong);
+ TestComparison(kCondBE, i, j, Primitive::kPrimLong);
+ TestComparison(kCondA, i, j, Primitive::kPrimLong);
+ TestComparison(kCondAE, i, j, Primitive::kPrimLong);
+ }
+ }
+}
+
} // namespace art
diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h
index f54547534f..4abe5e953c 100644
--- a/compiler/optimizing/common_arm64.h
+++ b/compiler/optimizing/common_arm64.h
@@ -206,7 +206,9 @@ static bool CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* inst
if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() ||
instr->IsCompare() || instr->IsBoundsCheck()) {
// Uses aliases of ADD/SUB instructions.
- return vixl::Assembler::IsImmAddSub(value);
+ // If `value` does not fit but `-value` does, VIXL will automatically use
+ // the 'opposite' instruction.
+ return vixl::Assembler::IsImmAddSub(value) || vixl::Assembler::IsImmAddSub(-value);
} else if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) {
// Uses logical operations.
return vixl::Assembler::IsImmLogical(value, vixl::kXRegSize);
diff --git a/compiler/optimizing/constant_area_fixups_x86.cc b/compiler/optimizing/constant_area_fixups_x86.cc
new file mode 100644
index 0000000000..c3470002c5
--- /dev/null
+++ b/compiler/optimizing/constant_area_fixups_x86.cc
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "constant_area_fixups_x86.h"
+
+namespace art {
+namespace x86 {
+
+/**
+ * Finds instructions that need the constant area base as an input.
+ */
+class ConstantHandlerVisitor : public HGraphVisitor {
+ public:
+ explicit ConstantHandlerVisitor(HGraph* graph) : HGraphVisitor(graph), base_(nullptr) {}
+
+ private:
+ void VisitAdd(HAdd* add) OVERRIDE {
+ BinaryFP(add);
+ }
+
+ void VisitSub(HSub* sub) OVERRIDE {
+ BinaryFP(sub);
+ }
+
+ void VisitMul(HMul* mul) OVERRIDE {
+ BinaryFP(mul);
+ }
+
+ void VisitDiv(HDiv* div) OVERRIDE {
+ BinaryFP(div);
+ }
+
+ void VisitReturn(HReturn* ret) OVERRIDE {
+ HConstant* value = ret->InputAt(0)->AsConstant();
+ if ((value != nullptr && Primitive::IsFloatingPointType(value->GetType()))) {
+ ReplaceInput(ret, value, 0, true);
+ }
+ }
+
+ void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
+ HandleInvoke(invoke);
+ }
+
+ void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
+ HandleInvoke(invoke);
+ }
+
+ void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
+ HandleInvoke(invoke);
+ }
+
+ void BinaryFP(HBinaryOperation* bin) {
+ HConstant* rhs = bin->InputAt(1)->AsConstant();
+ if (rhs != nullptr && Primitive::IsFloatingPointType(bin->GetResultType())) {
+ ReplaceInput(bin, rhs, 1, false);
+ }
+ }
+
+ void VisitPackedSwitch(HPackedSwitch* switch_insn) OVERRIDE {
+ // We need to replace the HPackedSwitch with a HX86PackedSwitch in order to
+ // address the constant area.
+ InitializeConstantAreaPointer(switch_insn);
+ HGraph* graph = GetGraph();
+ HBasicBlock* block = switch_insn->GetBlock();
+ HX86PackedSwitch* x86_switch = new (graph->GetArena()) HX86PackedSwitch(
+ switch_insn->GetStartValue(),
+ switch_insn->GetNumEntries(),
+ switch_insn->InputAt(0),
+ base_,
+ switch_insn->GetDexPc());
+ block->ReplaceAndRemoveInstructionWith(switch_insn, x86_switch);
+ }
+
+ void InitializeConstantAreaPointer(HInstruction* user) {
+ // Ensure we only initialize the pointer once.
+ if (base_ != nullptr) {
+ return;
+ }
+
+ HGraph* graph = GetGraph();
+ HBasicBlock* entry = graph->GetEntryBlock();
+ base_ = new (graph->GetArena()) HX86ComputeBaseMethodAddress();
+ HInstruction* insert_pos = (user->GetBlock() == entry) ? user : entry->GetLastInstruction();
+ entry->InsertInstructionBefore(base_, insert_pos);
+ DCHECK(base_ != nullptr);
+ }
+
+ void ReplaceInput(HInstruction* insn, HConstant* value, int input_index, bool materialize) {
+ InitializeConstantAreaPointer(insn);
+ HGraph* graph = GetGraph();
+ HBasicBlock* block = insn->GetBlock();
+ HX86LoadFromConstantTable* load_constant =
+ new (graph->GetArena()) HX86LoadFromConstantTable(base_, value, materialize);
+ block->InsertInstructionBefore(load_constant, insn);
+ insn->ReplaceInput(load_constant, input_index);
+ }
+
+ void HandleInvoke(HInvoke* invoke) {
+ // Ensure that we can load FP arguments from the constant area.
+ for (size_t i = 0, e = invoke->InputCount(); i < e; i++) {
+ HConstant* input = invoke->InputAt(i)->AsConstant();
+ if (input != nullptr && Primitive::IsFloatingPointType(input->GetType())) {
+ ReplaceInput(invoke, input, i, true);
+ }
+ }
+ }
+
+ // The generated HX86ComputeBaseMethodAddress in the entry block needed as an
+ // input to the HX86LoadFromConstantTable instructions.
+ HX86ComputeBaseMethodAddress* base_;
+};
+
+void ConstantAreaFixups::Run() {
+ ConstantHandlerVisitor visitor(graph_);
+ visitor.VisitInsertionOrder();
+}
+
+} // namespace x86
+} // namespace art
diff --git a/compiler/optimizing/induction_var_analysis.cc b/compiler/optimizing/induction_var_analysis.cc
index cf0f3493fd..8968a44da8 100644
--- a/compiler/optimizing/induction_var_analysis.cc
+++ b/compiler/optimizing/induction_var_analysis.cc
@@ -650,8 +650,7 @@ bool HInductionVarAnalysis::IsTaken(InductionInfo* lower_expr,
case kCondLE: return lower_value <= upper_value;
case kCondGT: return lower_value > upper_value;
case kCondGE: return lower_value >= upper_value;
- case kCondEQ:
- case kCondNE: LOG(FATAL) << "CONDITION UNREACHABLE";
+ default: LOG(FATAL) << "CONDITION UNREACHABLE";
}
}
return false; // not certain, may be untaken
@@ -680,8 +679,8 @@ bool HInductionVarAnalysis::IsFinite(InductionInfo* upper_expr,
(IsIntAndGet(upper_expr, &value) && value >= (min - stride_value - 1));
case kCondGE:
return (IsIntAndGet(upper_expr, &value) && value >= (min - stride_value));
- case kCondEQ:
- case kCondNE: LOG(FATAL) << "CONDITION UNREACHABLE";
+ default:
+ LOG(FATAL) << "CONDITION UNREACHABLE";
}
return false; // not certain, may be infinite
}
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index 839cf44632..d468540091 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -618,6 +618,8 @@ void InstructionSimplifierVisitor::VisitLessThanOrEqual(HLessThanOrEqual* condit
VisitCondition(condition);
}
+// TODO: unsigned comparisons too?
+
void InstructionSimplifierVisitor::VisitCondition(HCondition* condition) {
// Try to fold an HCompare into this HCondition.
diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc
index 764a11475f..b8598f3e48 100644
--- a/compiler/optimizing/intrinsics_mips64.cc
+++ b/compiler/optimizing/intrinsics_mips64.cc
@@ -43,6 +43,93 @@ ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
return codegen_->GetGraph()->GetArena();
}
+#define __ codegen->GetAssembler()->
+
+static void MoveFromReturnRegister(Location trg,
+ Primitive::Type type,
+ CodeGeneratorMIPS64* codegen) {
+ if (!trg.IsValid()) {
+ DCHECK_EQ(type, Primitive::kPrimVoid);
+ return;
+ }
+
+ DCHECK_NE(type, Primitive::kPrimVoid);
+
+ if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
+ GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
+ if (trg_reg != V0) {
+ __ Move(V0, trg_reg);
+ }
+ } else {
+ FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
+ if (trg_reg != F0) {
+ if (type == Primitive::kPrimFloat) {
+ __ MovS(F0, trg_reg);
+ } else {
+ __ MovD(F0, trg_reg);
+ }
+ }
+ }
+}
+
+static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
+ InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
+ IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
+}
+
+// Slow-path for fallback (calling the managed code to handle the
+// intrinsic) in an intrinsified call. This will copy the arguments
+// into the positions for a regular call.
+//
+// Note: The actual parameters are required to be in the locations
+// given by the invoke's location summary. If an intrinsic
+// modifies those locations before a slowpath call, they must be
+// restored!
+class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
+ public:
+ explicit IntrinsicSlowPathMIPS64(HInvoke* invoke) : invoke_(invoke) { }
+
+ void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
+ CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
+
+ __ Bind(GetEntryLabel());
+
+ SaveLiveRegisters(codegen, invoke_->GetLocations());
+
+ MoveArguments(invoke_, codegen);
+
+ if (invoke_->IsInvokeStaticOrDirect()) {
+ codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
+ Location::RegisterLocation(A0));
+ codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
+ } else {
+ UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
+ UNREACHABLE();
+ }
+
+ // Copy the result back to the expected output.
+ Location out = invoke_->GetLocations()->Out();
+ if (out.IsValid()) {
+ DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
+ DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
+ MoveFromReturnRegister(out, invoke_->GetType(), codegen);
+ }
+
+ RestoreLiveRegisters(codegen, invoke_->GetLocations());
+ __ B(GetExitLabel());
+ }
+
+ const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
+
+ private:
+ // The instruction where this slow path is happening.
+ HInvoke* const invoke_;
+
+ DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
+};
+
+#undef __
+
bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
Dispatch(invoke);
LocationSummary* res = invoke->GetLocations();
@@ -765,6 +852,270 @@ void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Thread::PeerOffset<kMips64PointerSize>().Int32Value());
}
+// char java.lang.String.charAt(int index)
+void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCallOnSlowPath,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->SetOut(Location::SameAsFirstInput());
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
+ LocationSummary* locations = invoke->GetLocations();
+ Mips64Assembler* assembler = GetAssembler();
+
+ // Location of reference to data array
+ const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
+ // Location of count
+ const int32_t count_offset = mirror::String::CountOffset().Int32Value();
+
+ GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
+ GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
+ GpuRegister out = locations->Out().AsRegister<GpuRegister>();
+
+ // TODO: Maybe we can support range check elimination. Overall,
+ // though, I think it's not worth the cost.
+ // TODO: For simplicity, the index parameter is requested in a
+ // register, so different from Quick we will not optimize the
+ // code for constants (which would save a register).
+
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+
+ // Load the string size
+ __ Lw(TMP, obj, count_offset);
+ codegen_->MaybeRecordImplicitNullCheck(invoke);
+ // Revert to slow path if idx is too large, or negative
+ __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
+
+ // out = obj[2*idx].
+ __ Sll(TMP, idx, 1); // idx * 2
+ __ Daddu(TMP, TMP, obj); // Address of char at location idx
+ __ Lhu(out, TMP, value_offset); // Load char at location idx
+
+ __ Bind(slow_path->GetExitLabel());
+}
+
+// int java.lang.String.compareTo(String anotherString)
+void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+ __ Beqzc(argument, slow_path->GetEntryLabel());
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize,
+ pStringCompareTo).Int32Value());
+ __ Jalr(TMP);
+ __ Nop();
+ __ Bind(slow_path->GetExitLabel());
+}
+
+static void GenerateStringIndexOf(HInvoke* invoke,
+ Mips64Assembler* assembler,
+ CodeGeneratorMIPS64* codegen,
+ ArenaAllocator* allocator,
+ bool start_at_zero) {
+ LocationSummary* locations = invoke->GetLocations();
+ GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ // Check for code points > 0xFFFF. Either a slow-path check when we
+ // don't know statically, or directly dispatch if we have a constant.
+ SlowPathCodeMIPS64* slow_path = nullptr;
+ if (invoke->InputAt(1)->IsIntConstant()) {
+ if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
+ // Always needs the slow-path. We could directly dispatch to it,
+ // but this case should be rare, so for simplicity just put the
+ // full slow-path down and branch unconditionally.
+ slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
+ codegen->AddSlowPath(slow_path);
+ __ B(slow_path->GetEntryLabel());
+ __ Bind(slow_path->GetExitLabel());
+ return;
+ }
+ } else {
+ GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
+ __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
+ slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
+ codegen->AddSlowPath(slow_path);
+ __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
+ }
+
+ if (start_at_zero) {
+ DCHECK_EQ(tmp_reg, A2);
+ // Start-index = 0.
+ __ Clear(tmp_reg);
+ } else {
+ __ Slt(TMP, A2, ZERO); // if fromIndex < 0
+ __ Seleqz(A2, A2, TMP); // fromIndex = 0
+ }
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pIndexOf).Int32Value());
+ __ Jalr(TMP);
+ __ Nop();
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
+}
+
+// int java.lang.String.indexOf(int ch)
+void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ // We have a hand-crafted assembly stub that follows the runtime
+ // calling convention. So it's best to align the inputs accordingly.
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+
+ // Need a temp for slow-path codepoint compare, and need to send start-index=0.
+ locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
+ GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
+}
+
+// int java.lang.String.indexOf(int ch, int fromIndex)
+void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ // We have a hand-crafted assembly stub that follows the runtime
+ // calling convention. So it's best to align the inputs accordingly.
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
+ GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
+}
+
+// java.lang.String.String(byte[] bytes)
+void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+ __ Beqzc(byte_array, slow_path->GetEntryLabel());
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromBytes).Int32Value());
+ codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ __ Jalr(TMP);
+ __ Nop();
+ __ Bind(slow_path->GetExitLabel());
+}
+
+// java.lang.String.String(char[] value)
+void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromChars).Int32Value());
+ codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ __ Jalr(TMP);
+ __ Nop();
+}
+
+// java.lang.String.String(String original)
+void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+ __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromString).Int32Value());
+ codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ __ Jalr(TMP);
+ __ Nop();
+ __ Bind(slow_path->GetExitLabel());
+}
+
// Unimplemented intrinsics.
#define UNIMPLEMENTED_INTRINSIC(Name) \
@@ -794,14 +1145,7 @@ UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile)
UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
-UNIMPLEMENTED_INTRINSIC(StringCharAt)
-UNIMPLEMENTED_INTRINSIC(StringCompareTo)
UNIMPLEMENTED_INTRINSIC(StringEquals)
-UNIMPLEMENTED_INTRINSIC(StringIndexOf)
-UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
-UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes)
-UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars)
-UNIMPLEMENTED_INTRINSIC(StringNewStringFromString)
UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
UNIMPLEMENTED_INTRINSIC(LongRotateRight)
UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros)
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 0d668e8cf7..939e62c6dd 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -81,12 +81,19 @@ static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
static constexpr uint32_t kNoDexPc = -1;
enum IfCondition {
- kCondEQ,
- kCondNE,
- kCondLT,
- kCondLE,
- kCondGT,
- kCondGE,
+ // All types.
+ kCondEQ, // ==
+ kCondNE, // !=
+ // Signed integers and floating-point numbers.
+ kCondLT, // <
+ kCondLE, // <=
+ kCondGT, // >
+ kCondGE, // >=
+ // Unsigned integers.
+ kCondB, // <
+ kCondBE, // <=
+ kCondA, // >
+ kCondAE, // >=
};
class HInstructionList : public ValueObject {
@@ -988,11 +995,15 @@ class HLoopInformationOutwardIterator : public ValueObject {
};
#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
+ M(Above, Condition) \
+ M(AboveOrEqual, Condition) \
M(Add, BinaryOperation) \
M(And, BinaryOperation) \
M(ArrayGet, Instruction) \
M(ArrayLength, Instruction) \
M(ArraySet, Instruction) \
+ M(Below, Condition) \
+ M(BelowOrEqual, Condition) \
M(BooleanNot, UnaryOperation) \
M(BoundsCheck, Instruction) \
M(BoundType, Instruction) \
@@ -1788,8 +1799,7 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
return true;
}
- virtual bool CanDoImplicitNullCheckOn(HInstruction* obj) const {
- UNUSED(obj);
+ virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
return false;
}
@@ -1906,16 +1916,14 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
virtual bool CanBeMoved() const { return false; }
// Returns whether the two instructions are of the same kind.
- virtual bool InstructionTypeEquals(HInstruction* other) const {
- UNUSED(other);
+ virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
return false;
}
// Returns whether any data encoded in the two instructions is equal.
// This method does not look at the inputs. Both instructions must be
// of the same type, otherwise the method has undefined behavior.
- virtual bool InstructionDataEquals(HInstruction* other) const {
- UNUSED(other);
+ virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
return false;
}
@@ -2478,8 +2486,7 @@ class HUnaryOperation : public HExpression<1> {
Primitive::Type GetResultType() const { return GetType(); }
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -2549,8 +2556,7 @@ class HBinaryOperation : public HExpression<2> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -2654,8 +2660,6 @@ class HEqual : public HCondition {
bool IsCommutative() const OVERRIDE { return true; }
- template <typename T> bool Compute(T x, T y) const { return x == y; }
-
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
@@ -2676,6 +2680,8 @@ class HEqual : public HCondition {
}
private:
+ template <typename T> bool Compute(T x, T y) const { return x == y; }
+
DISALLOW_COPY_AND_ASSIGN(HEqual);
};
@@ -2686,8 +2692,6 @@ class HNotEqual : public HCondition {
bool IsCommutative() const OVERRIDE { return true; }
- template <typename T> bool Compute(T x, T y) const { return x != y; }
-
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
@@ -2708,6 +2712,8 @@ class HNotEqual : public HCondition {
}
private:
+ template <typename T> bool Compute(T x, T y) const { return x != y; }
+
DISALLOW_COPY_AND_ASSIGN(HNotEqual);
};
@@ -2716,8 +2722,6 @@ class HLessThan : public HCondition {
HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
: HCondition(first, second, dex_pc) {}
- template <typename T> bool Compute(T x, T y) const { return x < y; }
-
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
@@ -2738,6 +2742,8 @@ class HLessThan : public HCondition {
}
private:
+ template <typename T> bool Compute(T x, T y) const { return x < y; }
+
DISALLOW_COPY_AND_ASSIGN(HLessThan);
};
@@ -2746,8 +2752,6 @@ class HLessThanOrEqual : public HCondition {
HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
: HCondition(first, second, dex_pc) {}
- template <typename T> bool Compute(T x, T y) const { return x <= y; }
-
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
@@ -2768,6 +2772,8 @@ class HLessThanOrEqual : public HCondition {
}
private:
+ template <typename T> bool Compute(T x, T y) const { return x <= y; }
+
DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
};
@@ -2776,8 +2782,6 @@ class HGreaterThan : public HCondition {
HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
: HCondition(first, second, dex_pc) {}
- template <typename T> bool Compute(T x, T y) const { return x > y; }
-
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
@@ -2798,6 +2802,8 @@ class HGreaterThan : public HCondition {
}
private:
+ template <typename T> bool Compute(T x, T y) const { return x > y; }
+
DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
};
@@ -2806,8 +2812,6 @@ class HGreaterThanOrEqual : public HCondition {
HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
: HCondition(first, second, dex_pc) {}
- template <typename T> bool Compute(T x, T y) const { return x >= y; }
-
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
@@ -2828,9 +2832,138 @@ class HGreaterThanOrEqual : public HCondition {
}
private:
+ template <typename T> bool Compute(T x, T y) const { return x >= y; }
+
DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
};
+class HBelow : public HCondition {
+ public:
+ HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
+ : HCondition(first, second, dex_pc) {}
+
+ HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint32_t>(x->GetValue()),
+ static_cast<uint32_t>(y->GetValue())), GetDexPc());
+ }
+ HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint64_t>(x->GetValue()),
+ static_cast<uint64_t>(y->GetValue())), GetDexPc());
+ }
+
+ DECLARE_INSTRUCTION(Below);
+
+ IfCondition GetCondition() const OVERRIDE {
+ return kCondB;
+ }
+
+ IfCondition GetOppositeCondition() const OVERRIDE {
+ return kCondAE;
+ }
+
+ private:
+ template <typename T> bool Compute(T x, T y) const { return x < y; }
+
+ DISALLOW_COPY_AND_ASSIGN(HBelow);
+};
+
+class HBelowOrEqual : public HCondition {
+ public:
+ HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
+ : HCondition(first, second, dex_pc) {}
+
+ HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint32_t>(x->GetValue()),
+ static_cast<uint32_t>(y->GetValue())), GetDexPc());
+ }
+ HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint64_t>(x->GetValue()),
+ static_cast<uint64_t>(y->GetValue())), GetDexPc());
+ }
+
+ DECLARE_INSTRUCTION(BelowOrEqual);
+
+ IfCondition GetCondition() const OVERRIDE {
+ return kCondBE;
+ }
+
+ IfCondition GetOppositeCondition() const OVERRIDE {
+ return kCondA;
+ }
+
+ private:
+ template <typename T> bool Compute(T x, T y) const { return x <= y; }
+
+ DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
+};
+
+class HAbove : public HCondition {
+ public:
+ HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
+ : HCondition(first, second, dex_pc) {}
+
+ HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint32_t>(x->GetValue()),
+ static_cast<uint32_t>(y->GetValue())), GetDexPc());
+ }
+ HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint64_t>(x->GetValue()),
+ static_cast<uint64_t>(y->GetValue())), GetDexPc());
+ }
+
+ DECLARE_INSTRUCTION(Above);
+
+ IfCondition GetCondition() const OVERRIDE {
+ return kCondA;
+ }
+
+ IfCondition GetOppositeCondition() const OVERRIDE {
+ return kCondBE;
+ }
+
+ private:
+ template <typename T> bool Compute(T x, T y) const { return x > y; }
+
+ DISALLOW_COPY_AND_ASSIGN(HAbove);
+};
+
+class HAboveOrEqual : public HCondition {
+ public:
+ HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
+ : HCondition(first, second, dex_pc) {}
+
+ HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint32_t>(x->GetValue()),
+ static_cast<uint32_t>(y->GetValue())), GetDexPc());
+ }
+ HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(
+ Compute(static_cast<uint64_t>(x->GetValue()),
+ static_cast<uint64_t>(y->GetValue())), GetDexPc());
+ }
+
+ DECLARE_INSTRUCTION(AboveOrEqual);
+
+ IfCondition GetCondition() const OVERRIDE {
+ return kCondAE;
+ }
+
+ IfCondition GetOppositeCondition() const OVERRIDE {
+ return kCondB;
+ }
+
+ private:
+ template <typename T> bool Compute(T x, T y) const { return x >= y; }
+
+ DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
+};
// Instruction to check how two inputs compare to each other.
// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
@@ -3254,8 +3387,7 @@ class HInvokeStaticOrDirect : public HInvoke {
target_method_(target_method),
dispatch_info_(dispatch_info) {}
- bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
- UNUSED(obj);
+ bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// We access the method via the dex cache so we can't do an implicit null check.
// TODO: for intrinsics we can generate implicit null checks.
return false;
@@ -3693,8 +3825,7 @@ class HDivZeroCheck : public HExpression<1> {
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -3964,8 +4095,7 @@ class HNot : public HUnaryOperation {
: HUnaryOperation(result_type, input, dex_pc) {}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -3990,8 +4120,7 @@ class HBooleanNot : public HUnaryOperation {
: HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4157,8 +4286,7 @@ class HNullCheck : public HExpression<1> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4303,12 +4431,10 @@ class HArrayGet : public HExpression<2> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
- bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
- UNUSED(obj);
+ bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// TODO: We can be smarter here.
// Currently, the array access is always preceded by an ArrayLength or a NullCheck
// which generates the implicit null check. There are cases when these can be removed
@@ -4356,8 +4482,7 @@ class HArraySet : public HTemplateInstruction<3> {
// Can throw ArrayStoreException.
bool CanThrow() const OVERRIDE { return needs_type_check_; }
- bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
- UNUSED(obj);
+ bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// TODO: Same as for ArrayGet.
return false;
}
@@ -4420,8 +4545,7 @@ class HArrayLength : public HExpression<1> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
@@ -4444,8 +4568,7 @@ class HBoundsCheck : public HExpression<2> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4659,8 +4782,7 @@ class HClinitCheck : public HExpression<1> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -5278,7 +5400,7 @@ class HGraphVisitor : public ValueObject {
explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
virtual ~HGraphVisitor() {}
- virtual void VisitInstruction(HInstruction* instruction) { UNUSED(instruction); }
+ virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
virtual void VisitBasicBlock(HBasicBlock* block);
// Visit the graph following basic block insertion order.
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index c7f08066d4..17a4743290 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -666,7 +666,6 @@ CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_ite
jobject class_loader,
const DexFile& dex_file,
Handle<mirror::DexCache> dex_cache) const {
- UNUSED(invoke_type);
std::string method_name = PrettyMethod(method_idx, dex_file);
MaybeRecordStat(MethodCompilationStat::kAttemptCompilation);
CompilerDriver* compiler_driver = GetCompilerDriver();
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index f7a7e420bb..a1feaf77bd 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -756,7 +756,9 @@ void ReferenceTypePropagation::ProcessWorklist() {
while (!worklist_.empty()) {
HInstruction* instruction = worklist_.back();
worklist_.pop_back();
- if (UpdateNullability(instruction) || UpdateReferenceTypeInfo(instruction)) {
+ bool updated_nullability = UpdateNullability(instruction);
+ bool updated_reference_type = UpdateReferenceTypeInfo(instruction);
+ if (updated_nullability || updated_reference_type) {
AddDependentInstructionsToWorklist(instruction);
}
}