summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator_arm.cc45
-rw-r--r--compiler/optimizing/code_generator_arm64.cc46
-rw-r--r--compiler/optimizing/code_generator_x86.cc42
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc43
-rw-r--r--compiler/optimizing/common_arm64.h4
-rw-r--r--compiler/optimizing/intrinsics_mips64.cc714
-rw-r--r--compiler/optimizing/nodes.h50
-rw-r--r--compiler/optimizing/optimizing_compiler.cc1
-rw-r--r--compiler/optimizing/reference_type_propagation.cc4
9 files changed, 768 insertions, 181 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 358a35c483..92a5878476 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -1141,8 +1141,7 @@ void LocationsBuilderARM::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorARM::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void InstructionCodeGeneratorARM::GenerateCompareWithImmediate(Register left, int32_t right) {
@@ -1569,9 +1568,8 @@ void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
load->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
@@ -1598,8 +1596,7 @@ void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
@@ -1608,9 +1605,8 @@ void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
@@ -1619,9 +1615,8 @@ void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
@@ -1630,9 +1625,8 @@ void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
@@ -1641,9 +1635,8 @@ void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
+void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1652,9 +1645,8 @@ void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
+void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
@@ -1669,8 +1661,7 @@ void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -1680,8 +1671,7 @@ void LocationsBuilderARM::VisitReturn(HReturn* ret) {
locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
}
-void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3327,8 +3317,7 @@ void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -4289,13 +4278,11 @@ void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
-void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
- UNUSED(instruction);
+void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -5343,15 +5330,13 @@ Literal* CodeGeneratorARM::DeduplicateMethodCodeLiteral(MethodReference target_m
return DeduplicateMethodLiteral(target_method, &call_patches_);
}
-void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderARM::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 37f10a5cc5..f68b11b504 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1330,8 +1330,7 @@ enum UnimplementedInstructionBreakCode {
};
#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
- void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
- UNUSED(instr); \
+ void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
__ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
} \
void LocationsBuilderARM64::Visit##name(H##name* instr) { \
@@ -2183,8 +2182,8 @@ void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
- UNUSED(constant);
+void InstructionCodeGeneratorARM64::VisitDoubleConstant(
+ HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
}
@@ -2192,8 +2191,7 @@ void LocationsBuilderARM64::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
@@ -2202,8 +2200,7 @@ void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
- UNUSED(constant);
+void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
}
@@ -2689,9 +2686,8 @@ void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
@@ -2699,9 +2695,8 @@ void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
@@ -3092,9 +3087,8 @@ void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
load->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
@@ -3131,9 +3125,8 @@ void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
@@ -3400,8 +3393,7 @@ void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -3471,8 +3463,7 @@ void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
locations->SetInAt(0, ARM64ReturnLocation(return_type));
}
-void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3480,8 +3471,7 @@ void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
instruction->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3525,8 +3515,7 @@ void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderARM64::VisitSub(HSub* instruction) {
@@ -3643,9 +3632,8 @@ void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
@@ -3744,15 +3732,13 @@ void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
HandleBinaryOp(instruction);
}
-void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 8b33f56295..963eec2529 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1052,8 +1052,7 @@ void LocationsBuilderX86::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
@@ -1332,9 +1331,8 @@ void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
local->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
@@ -1361,8 +1359,7 @@ void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderX86::VisitCondition(HCondition* cond) {
@@ -1544,9 +1541,8 @@ void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
@@ -1555,9 +1551,8 @@ void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
@@ -1566,9 +1561,8 @@ void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
@@ -1577,9 +1571,8 @@ void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
+void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1588,9 +1581,8 @@ void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
+void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
@@ -1605,8 +1597,7 @@ void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3740,8 +3731,7 @@ void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -4739,13 +4729,11 @@ void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
-void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
- UNUSED(instruction);
+void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unreachable";
}
@@ -5668,15 +5656,13 @@ void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instr
}
}
-void LocationsBuilderX86::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index a2af4ba1f9..ed2e4ca87c 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1050,8 +1050,7 @@ void LocationsBuilderX86_64::VisitExit(HExit* exit) {
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
- UNUSED(exit);
+void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
}
void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
@@ -1278,9 +1277,8 @@ void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
local->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
+void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(load);
}
void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
@@ -1307,8 +1305,7 @@ void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
}
}
-void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
- UNUSED(store);
+void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
}
void LocationsBuilderX86_64::VisitCondition(HCondition* cond) {
@@ -1613,9 +1610,8 @@ void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
@@ -1624,9 +1620,8 @@ void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
@@ -1635,9 +1630,8 @@ void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
@@ -1646,9 +1640,8 @@ void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1657,9 +1650,9 @@ void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
+void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
+ HDoubleConstant* constant ATTRIBUTE_UNUSED) {
// Will be generated at use site.
- UNUSED(constant);
}
void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
@@ -1674,8 +1667,7 @@ void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
- UNUSED(ret);
+void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
codegen_->GenerateFrameExit();
}
@@ -3629,8 +3621,7 @@ void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
- UNUSED(instruction);
+void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unimplemented";
}
@@ -4450,13 +4441,11 @@ void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
temp->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) {
+void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
// Nothing to do, this is driven by the code generator.
- UNUSED(temp);
}
-void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
- UNUSED(instruction);
+void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
LOG(FATAL) << "Unimplemented";
}
@@ -5331,15 +5320,13 @@ void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* in
}
}
-void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction) {
+void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction) {
+void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
// Nothing to do, this should be removed during prepare for register allocator.
- UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h
index f54547534f..4abe5e953c 100644
--- a/compiler/optimizing/common_arm64.h
+++ b/compiler/optimizing/common_arm64.h
@@ -206,7 +206,9 @@ static bool CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* inst
if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() ||
instr->IsCompare() || instr->IsBoundsCheck()) {
// Uses aliases of ADD/SUB instructions.
- return vixl::Assembler::IsImmAddSub(value);
+ // If `value` does not fit but `-value` does, VIXL will automatically use
+ // the 'opposite' instruction.
+ return vixl::Assembler::IsImmAddSub(value) || vixl::Assembler::IsImmAddSub(-value);
} else if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) {
// Uses logical operations.
return vixl::Assembler::IsImmLogical(value, vixl::kXRegSize);
diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc
index 764a11475f..fe16d00b72 100644
--- a/compiler/optimizing/intrinsics_mips64.cc
+++ b/compiler/optimizing/intrinsics_mips64.cc
@@ -43,6 +43,93 @@ ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
return codegen_->GetGraph()->GetArena();
}
+#define __ codegen->GetAssembler()->
+
+static void MoveFromReturnRegister(Location trg,
+ Primitive::Type type,
+ CodeGeneratorMIPS64* codegen) {
+ if (!trg.IsValid()) {
+ DCHECK_EQ(type, Primitive::kPrimVoid);
+ return;
+ }
+
+ DCHECK_NE(type, Primitive::kPrimVoid);
+
+ if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
+ GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
+ if (trg_reg != V0) {
+ __ Move(V0, trg_reg);
+ }
+ } else {
+ FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
+ if (trg_reg != F0) {
+ if (type == Primitive::kPrimFloat) {
+ __ MovS(F0, trg_reg);
+ } else {
+ __ MovD(F0, trg_reg);
+ }
+ }
+ }
+}
+
+static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
+ InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
+ IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
+}
+
+// Slow-path for fallback (calling the managed code to handle the
+// intrinsic) in an intrinsified call. This will copy the arguments
+// into the positions for a regular call.
+//
+// Note: The actual parameters are required to be in the locations
+// given by the invoke's location summary. If an intrinsic
+// modifies those locations before a slowpath call, they must be
+// restored!
+class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
+ public:
+ explicit IntrinsicSlowPathMIPS64(HInvoke* invoke) : invoke_(invoke) { }
+
+ void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
+ CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
+
+ __ Bind(GetEntryLabel());
+
+ SaveLiveRegisters(codegen, invoke_->GetLocations());
+
+ MoveArguments(invoke_, codegen);
+
+ if (invoke_->IsInvokeStaticOrDirect()) {
+ codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
+ Location::RegisterLocation(A0));
+ codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
+ } else {
+ UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
+ UNREACHABLE();
+ }
+
+ // Copy the result back to the expected output.
+ Location out = invoke_->GetLocations()->Out();
+ if (out.IsValid()) {
+ DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
+ DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
+ MoveFromReturnRegister(out, invoke_->GetType(), codegen);
+ }
+
+ RestoreLiveRegisters(codegen, invoke_->GetLocations());
+ __ B(GetExitLabel());
+ }
+
+ const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
+
+ private:
+ // The instruction where this slow path is happening.
+ HInvoke* const invoke_;
+
+ DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
+};
+
+#undef __
+
bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
Dispatch(invoke);
LocationSummary* res = invoke->GetLocations();
@@ -185,7 +272,7 @@ void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
}
-static void GenCountZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
+static void GenNumberOfLeadingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
GpuRegister out = locations->Out().AsRegister<GpuRegister>();
@@ -202,7 +289,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke*
}
void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
- GenCountZeroes(invoke->GetLocations(), false, GetAssembler());
+ GenNumberOfLeadingZeroes(invoke->GetLocations(), false, GetAssembler());
}
// int java.lang.Long.numberOfLeadingZeros(long i)
@@ -211,7 +298,103 @@ void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* inv
}
void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
- GenCountZeroes(invoke->GetLocations(), true, GetAssembler());
+ GenNumberOfLeadingZeroes(invoke->GetLocations(), true, GetAssembler());
+}
+
+static void GenNumberOfTrailingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
+ Location in = locations->InAt(0);
+ Location out = locations->Out();
+
+ if (is64bit) {
+ __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
+ __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
+ __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
+ __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
+ } else {
+ __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
+ __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
+ __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
+ __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
+ }
+}
+
+// int java.lang.Integer.numberOfTrailingZeros(int i)
+void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
+ CreateIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
+ GenNumberOfTrailingZeroes(invoke->GetLocations(), false, GetAssembler());
+}
+
+// int java.lang.Long.numberOfTrailingZeros(long i)
+void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
+ CreateIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
+ GenNumberOfTrailingZeroes(invoke->GetLocations(), true, GetAssembler());
+}
+
+static void GenRotateRight(HInvoke* invoke,
+ Primitive::Type type,
+ Mips64Assembler* assembler) {
+ DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+
+ LocationSummary* locations = invoke->GetLocations();
+ GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
+ GpuRegister out = locations->Out().AsRegister<GpuRegister>();
+
+ if (invoke->InputAt(1)->IsIntConstant()) {
+ uint32_t shift = static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
+ if (type == Primitive::kPrimInt) {
+ shift &= 0x1f;
+ __ Rotr(out, in, shift);
+ } else {
+ shift &= 0x3f;
+ if (shift < 32) {
+ __ Drotr(out, in, shift);
+ } else {
+ shift &= 0x1f;
+ __ Drotr32(out, in, shift);
+ }
+ }
+ } else {
+ GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>();
+ if (type == Primitive::kPrimInt) {
+ __ Rotrv(out, in, shamt);
+ } else {
+ __ Drotrv(out, in, shamt);
+ }
+ }
+}
+
+// int java.lang.Integer.rotateRight(int i, int distance)
+void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
+ locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
+ GenRotateRight(invoke, Primitive::kPrimInt, GetAssembler());
+}
+
+// int java.lang.Long.rotateRight(long i, int distance)
+void IntrinsicLocationsBuilderMIPS64::VisitLongRotateRight(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
+ locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitLongRotateRight(HInvoke* invoke) {
+ GenRotateRight(invoke, Primitive::kPrimLong, GetAssembler());
}
static void GenReverse(LocationSummary* locations,
@@ -765,6 +948,505 @@ void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Thread::PeerOffset<kMips64PointerSize>().Int32Value());
}
+static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
+ LocationSummary* locations = new (arena) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->SetInAt(2, Location::RequiresRegister());
+ locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+}
+
+static void GenUnsafeGet(HInvoke* invoke,
+ Primitive::Type type,
+ bool is_volatile,
+ CodeGeneratorMIPS64* codegen) {
+ LocationSummary* locations = invoke->GetLocations();
+ DCHECK((type == Primitive::kPrimInt) ||
+ (type == Primitive::kPrimLong) ||
+ (type == Primitive::kPrimNot));
+ Mips64Assembler* assembler = codegen->GetAssembler();
+ // Object pointer.
+ GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
+ // Long offset.
+ GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
+ GpuRegister trg = locations->Out().AsRegister<GpuRegister>();
+
+ __ Daddu(TMP, base, offset);
+ if (is_volatile) {
+ __ Sync(0);
+ }
+ switch (type) {
+ case Primitive::kPrimInt:
+ __ Lw(trg, TMP, 0);
+ break;
+
+ case Primitive::kPrimNot:
+ __ Lwu(trg, TMP, 0);
+ break;
+
+ case Primitive::kPrimLong:
+ __ Ld(trg, TMP, 0);
+ break;
+
+ default:
+ LOG(FATAL) << "Unsupported op size " << type;
+ UNREACHABLE();
+ }
+}
+
+// int sun.misc.Unsafe.getInt(Object o, long offset)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
+ CreateIntIntIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
+ GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
+}
+
+// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
+ CreateIntIntIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
+ GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
+}
+
+// long sun.misc.Unsafe.getLong(Object o, long offset)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
+ CreateIntIntIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
+ GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
+}
+
+// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
+ CreateIntIntIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
+ GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
+}
+
+// Object sun.misc.Unsafe.getObject(Object o, long offset)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
+ CreateIntIntIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
+ GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
+}
+
+// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
+ CreateIntIntIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
+ GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
+}
+
+static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
+ LocationSummary* locations = new (arena) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->SetInAt(2, Location::RequiresRegister());
+ locations->SetInAt(3, Location::RequiresRegister());
+}
+
+static void GenUnsafePut(LocationSummary* locations,
+ Primitive::Type type,
+ bool is_volatile,
+ bool is_ordered,
+ CodeGeneratorMIPS64* codegen) {
+ DCHECK((type == Primitive::kPrimInt) ||
+ (type == Primitive::kPrimLong) ||
+ (type == Primitive::kPrimNot));
+ Mips64Assembler* assembler = codegen->GetAssembler();
+ // Object pointer.
+ GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
+ // Long offset.
+ GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
+ GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
+
+ __ Daddu(TMP, base, offset);
+ if (is_volatile || is_ordered) {
+ __ Sync(0);
+ }
+ switch (type) {
+ case Primitive::kPrimInt:
+ case Primitive::kPrimNot:
+ __ Sw(value, TMP, 0);
+ break;
+
+ case Primitive::kPrimLong:
+ __ Sd(value, TMP, 0);
+ break;
+
+ default:
+ LOG(FATAL) << "Unsupported op size " << type;
+ UNREACHABLE();
+ }
+ if (is_volatile) {
+ __ Sync(0);
+ }
+
+ if (type == Primitive::kPrimNot) {
+ codegen->MarkGCCard(base, value);
+ }
+}
+
+// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
+}
+
+// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
+}
+
+// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
+}
+
+// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
+}
+
+// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
+}
+
+// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
+}
+
+// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
+}
+
+// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
+}
+
+// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
+void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
+ CreateIntIntIntIntToVoid(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
+ GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
+}
+
+// char java.lang.String.charAt(int index)
+void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCallOnSlowPath,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->SetOut(Location::SameAsFirstInput());
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
+ LocationSummary* locations = invoke->GetLocations();
+ Mips64Assembler* assembler = GetAssembler();
+
+ // Location of reference to data array
+ const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
+ // Location of count
+ const int32_t count_offset = mirror::String::CountOffset().Int32Value();
+
+ GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
+ GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
+ GpuRegister out = locations->Out().AsRegister<GpuRegister>();
+
+ // TODO: Maybe we can support range check elimination. Overall,
+ // though, I think it's not worth the cost.
+ // TODO: For simplicity, the index parameter is requested in a
+ // register, so different from Quick we will not optimize the
+ // code for constants (which would save a register).
+
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+
+ // Load the string size
+ __ Lw(TMP, obj, count_offset);
+ codegen_->MaybeRecordImplicitNullCheck(invoke);
+ // Revert to slow path if idx is too large, or negative
+ __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
+
+ // out = obj[2*idx].
+ __ Sll(TMP, idx, 1); // idx * 2
+ __ Daddu(TMP, TMP, obj); // Address of char at location idx
+ __ Lhu(out, TMP, value_offset); // Load char at location idx
+
+ __ Bind(slow_path->GetExitLabel());
+}
+
+// int java.lang.String.compareTo(String anotherString)
+void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+ __ Beqzc(argument, slow_path->GetEntryLabel());
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize,
+ pStringCompareTo).Int32Value());
+ __ Jalr(TMP);
+ __ Nop();
+ __ Bind(slow_path->GetExitLabel());
+}
+
+static void GenerateStringIndexOf(HInvoke* invoke,
+ Mips64Assembler* assembler,
+ CodeGeneratorMIPS64* codegen,
+ ArenaAllocator* allocator,
+ bool start_at_zero) {
+ LocationSummary* locations = invoke->GetLocations();
+ GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ // Check for code points > 0xFFFF. Either a slow-path check when we
+ // don't know statically, or directly dispatch if we have a constant.
+ SlowPathCodeMIPS64* slow_path = nullptr;
+ if (invoke->InputAt(1)->IsIntConstant()) {
+ if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
+ // Always needs the slow-path. We could directly dispatch to it,
+ // but this case should be rare, so for simplicity just put the
+ // full slow-path down and branch unconditionally.
+ slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
+ codegen->AddSlowPath(slow_path);
+ __ B(slow_path->GetEntryLabel());
+ __ Bind(slow_path->GetExitLabel());
+ return;
+ }
+ } else {
+ GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
+ __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
+ slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
+ codegen->AddSlowPath(slow_path);
+ __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
+ }
+
+ if (start_at_zero) {
+ DCHECK_EQ(tmp_reg, A2);
+ // Start-index = 0.
+ __ Clear(tmp_reg);
+ } else {
+ __ Slt(TMP, A2, ZERO); // if fromIndex < 0
+ __ Seleqz(A2, A2, TMP); // fromIndex = 0
+ }
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pIndexOf).Int32Value());
+ __ Jalr(TMP);
+ __ Nop();
+
+ if (slow_path != nullptr) {
+ __ Bind(slow_path->GetExitLabel());
+ }
+}
+
+// int java.lang.String.indexOf(int ch)
+void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ // We have a hand-crafted assembly stub that follows the runtime
+ // calling convention. So it's best to align the inputs accordingly.
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+
+ // Need a temp for slow-path codepoint compare, and need to send start-index=0.
+ locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
+ GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
+}
+
+// int java.lang.String.indexOf(int ch, int fromIndex)
+void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ // We have a hand-crafted assembly stub that follows the runtime
+ // calling convention. So it's best to align the inputs accordingly.
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
+ GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
+}
+
+// java.lang.String.String(byte[] bytes)
+void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+ __ Beqzc(byte_array, slow_path->GetEntryLabel());
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromBytes).Int32Value());
+ codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ __ Jalr(TMP);
+ __ Nop();
+ __ Bind(slow_path->GetExitLabel());
+}
+
+// java.lang.String.String(char[] value)
+void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromChars).Int32Value());
+ codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ __ Jalr(TMP);
+ __ Nop();
+}
+
+// java.lang.String.String(String original)
+void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCall,
+ kIntrinsified);
+ InvokeRuntimeCallingConvention calling_convention;
+ locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+ locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+ locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
+ Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
+ locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
+}
+
+void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
+ Mips64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
+ SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
+ codegen_->AddSlowPath(slow_path);
+ __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
+
+ __ LoadFromOffset(kLoadDoubleword,
+ TMP,
+ TR,
+ QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromString).Int32Value());
+ codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
+ __ Jalr(TMP);
+ __ Nop();
+ __ Bind(slow_path->GetExitLabel());
+}
+
// Unimplemented intrinsics.
#define UNIMPLEMENTED_INTRINSIC(Name) \
@@ -776,38 +1458,12 @@ void IntrinsicCodeGeneratorMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSE
UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
-UNIMPLEMENTED_INTRINSIC(UnsafeGet)
-UNIMPLEMENTED_INTRINSIC(UnsafeGetVolatile)
-UNIMPLEMENTED_INTRINSIC(UnsafeGetLong)
-UNIMPLEMENTED_INTRINSIC(UnsafeGetLongVolatile)
-UNIMPLEMENTED_INTRINSIC(UnsafeGetObject)
-UNIMPLEMENTED_INTRINSIC(UnsafeGetObjectVolatile)
-UNIMPLEMENTED_INTRINSIC(UnsafePut)
-UNIMPLEMENTED_INTRINSIC(UnsafePutOrdered)
-UNIMPLEMENTED_INTRINSIC(UnsafePutVolatile)
-UNIMPLEMENTED_INTRINSIC(UnsafePutObject)
-UNIMPLEMENTED_INTRINSIC(UnsafePutObjectOrdered)
-UNIMPLEMENTED_INTRINSIC(UnsafePutObjectVolatile)
-UNIMPLEMENTED_INTRINSIC(UnsafePutLong)
-UNIMPLEMENTED_INTRINSIC(UnsafePutLongOrdered)
-UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile)
UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
-UNIMPLEMENTED_INTRINSIC(StringCharAt)
-UNIMPLEMENTED_INTRINSIC(StringCompareTo)
UNIMPLEMENTED_INTRINSIC(StringEquals)
-UNIMPLEMENTED_INTRINSIC(StringIndexOf)
-UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
-UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes)
-UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars)
-UNIMPLEMENTED_INTRINSIC(StringNewStringFromString)
UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
-UNIMPLEMENTED_INTRINSIC(LongRotateRight)
-UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros)
UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
-UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 2426f8b08d..939e62c6dd 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -1799,8 +1799,7 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
return true;
}
- virtual bool CanDoImplicitNullCheckOn(HInstruction* obj) const {
- UNUSED(obj);
+ virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
return false;
}
@@ -1917,16 +1916,14 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
virtual bool CanBeMoved() const { return false; }
// Returns whether the two instructions are of the same kind.
- virtual bool InstructionTypeEquals(HInstruction* other) const {
- UNUSED(other);
+ virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
return false;
}
// Returns whether any data encoded in the two instructions is equal.
// This method does not look at the inputs. Both instructions must be
// of the same type, otherwise the method has undefined behavior.
- virtual bool InstructionDataEquals(HInstruction* other) const {
- UNUSED(other);
+ virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
return false;
}
@@ -2489,8 +2486,7 @@ class HUnaryOperation : public HExpression<1> {
Primitive::Type GetResultType() const { return GetType(); }
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -2560,8 +2556,7 @@ class HBinaryOperation : public HExpression<2> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -3392,8 +3387,7 @@ class HInvokeStaticOrDirect : public HInvoke {
target_method_(target_method),
dispatch_info_(dispatch_info) {}
- bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
- UNUSED(obj);
+ bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// We access the method via the dex cache so we can't do an implicit null check.
// TODO: for intrinsics we can generate implicit null checks.
return false;
@@ -3831,8 +3825,7 @@ class HDivZeroCheck : public HExpression<1> {
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4102,8 +4095,7 @@ class HNot : public HUnaryOperation {
: HUnaryOperation(result_type, input, dex_pc) {}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4128,8 +4120,7 @@ class HBooleanNot : public HUnaryOperation {
: HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4295,8 +4286,7 @@ class HNullCheck : public HExpression<1> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4441,12 +4431,10 @@ class HArrayGet : public HExpression<2> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
- bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
- UNUSED(obj);
+ bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// TODO: We can be smarter here.
// Currently, the array access is always preceded by an ArrayLength or a NullCheck
// which generates the implicit null check. There are cases when these can be removed
@@ -4494,8 +4482,7 @@ class HArraySet : public HTemplateInstruction<3> {
// Can throw ArrayStoreException.
bool CanThrow() const OVERRIDE { return needs_type_check_; }
- bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
- UNUSED(obj);
+ bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// TODO: Same as for ArrayGet.
return false;
}
@@ -4558,8 +4545,7 @@ class HArrayLength : public HExpression<1> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
@@ -4582,8 +4568,7 @@ class HBoundsCheck : public HExpression<2> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -4797,8 +4782,7 @@ class HClinitCheck : public HExpression<1> {
}
bool CanBeMoved() const OVERRIDE { return true; }
- bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
- UNUSED(other);
+ bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
return true;
}
@@ -5416,7 +5400,7 @@ class HGraphVisitor : public ValueObject {
explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
virtual ~HGraphVisitor() {}
- virtual void VisitInstruction(HInstruction* instruction) { UNUSED(instruction); }
+ virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
virtual void VisitBasicBlock(HBasicBlock* block);
// Visit the graph following basic block insertion order.
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index c7f08066d4..17a4743290 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -666,7 +666,6 @@ CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_ite
jobject class_loader,
const DexFile& dex_file,
Handle<mirror::DexCache> dex_cache) const {
- UNUSED(invoke_type);
std::string method_name = PrettyMethod(method_idx, dex_file);
MaybeRecordStat(MethodCompilationStat::kAttemptCompilation);
CompilerDriver* compiler_driver = GetCompilerDriver();
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index f7a7e420bb..a1feaf77bd 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -756,7 +756,9 @@ void ReferenceTypePropagation::ProcessWorklist() {
while (!worklist_.empty()) {
HInstruction* instruction = worklist_.back();
worklist_.pop_back();
- if (UpdateNullability(instruction) || UpdateReferenceTypeInfo(instruction)) {
+ bool updated_nullability = UpdateNullability(instruction);
+ bool updated_reference_type = UpdateReferenceTypeInfo(instruction);
+ if (updated_nullability || updated_reference_type) {
AddDependentInstructionsToWorklist(instruction);
}
}