Remove -Wno-unused-parameter and -Wno-sign-promo from base cflags.
Fix associated errors about unused paramenters and implict sign conversions.
For sign conversion this was largely in the area of enums, so add ostream
operators for the effected enums and fix tools/generate-operator-out.py.
Tidy arena allocation code and arena allocated data types, rather than fixing
new and delete operators.
Remove dead code.
Change-Id: I5b433e722d2f75baacfacae4d32aef4a828bfe1b
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 71f0b1b..01c5cc9 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -51,7 +51,7 @@
uintptr_t native_pc;
};
-class SlowPathCode : public ArenaObject {
+class SlowPathCode : public ArenaObject<kArenaAllocSlowPaths> {
public:
SlowPathCode() {}
virtual ~SlowPathCode() {}
@@ -62,7 +62,7 @@
DISALLOW_COPY_AND_ASSIGN(SlowPathCode);
};
-class CodeGenerator : public ArenaObject {
+class CodeGenerator : public ArenaObject<kArenaAllocMisc> {
public:
// Compiles the graph to executable instructions. Returns whether the compilation
// succeeded.
@@ -115,12 +115,14 @@
// Restores the register from the stack. Returns the size taken on stack.
virtual size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) = 0;
virtual size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
- LOG(FATAL) << "Unimplemented";
- return 0u;
+ UNUSED(stack_index, reg_id);
+ UNIMPLEMENTED(FATAL);
+ UNREACHABLE();
}
virtual size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
- LOG(FATAL) << "Unimplemented";
- return 0u;
+ UNUSED(stack_index, reg_id);
+ UNIMPLEMENTED(FATAL);
+ UNREACHABLE();
}
void RecordPcInfo(HInstruction* instruction, uint32_t dex_pc);
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index c812f6b..0cec4b4 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -794,6 +794,7 @@
}
void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
+ UNUSED(exit);
if (kIsDebugBuild) {
__ Comment("Unreachable");
__ bkpt(0);
@@ -959,6 +960,7 @@
void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(load);
}
void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
@@ -986,6 +988,7 @@
}
void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
+ UNUSED(store);
}
void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
@@ -996,6 +999,7 @@
void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
@@ -1006,6 +1010,7 @@
void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
@@ -1016,6 +1021,7 @@
void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -1026,6 +1032,7 @@
void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
@@ -1033,6 +1040,7 @@
}
void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
+ UNUSED(ret);
codegen_->GenerateFrameExit();
}
@@ -1043,6 +1051,7 @@
}
void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
+ UNUSED(ret);
codegen_->GenerateFrameExit();
}
@@ -1508,6 +1517,7 @@
void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
// Nothing to do, the parameter is already at its location.
+ UNUSED(instruction);
}
void LocationsBuilderARM::VisitNot(HNot* not_) {
@@ -1596,6 +1606,7 @@
}
void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
@@ -1998,9 +2009,11 @@
void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(temp);
}
void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index ec9af73..6ac7a31 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -556,6 +556,7 @@
#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
+ UNUSED(instr); \
__ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
} \
void LocationsBuilderARM64::Visit##name(H##name* instr) { \
@@ -711,6 +712,7 @@
}
void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
+ UNUSED(exit);
if (kIsDebugBuild) {
down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
__ Brk(0); // TODO: Introduce special markers for such code locations.
@@ -877,6 +879,7 @@
void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
@@ -967,6 +970,7 @@
void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(load);
}
void LocationsBuilderARM64::VisitLocal(HLocal* local) {
@@ -984,6 +988,7 @@
void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderARM64::VisitMul(HMul* mul) {
@@ -1109,6 +1114,7 @@
void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
// Nothing to do, the parameter is already at its location.
+ UNUSED(instruction);
}
void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
@@ -1120,6 +1126,7 @@
}
void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
@@ -1164,6 +1171,7 @@
}
void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
+ UNUSED(instruction);
codegen_->GenerateFrameExit();
__ Br(lr);
}
@@ -1191,6 +1199,7 @@
}
void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
+ UNUSED(store);
}
void LocationsBuilderARM64::VisitSub(HSub* instruction) {
@@ -1242,6 +1251,7 @@
void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(temp);
}
} // namespace arm64
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index a4003ff..5530f46 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -180,11 +180,15 @@
virtual Location GetStackLocation(HLoadLocal* load) const OVERRIDE;
virtual size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE {
+ UNUSED(stack_index);
+ UNUSED(reg_id);
UNIMPLEMENTED(INFO) << "TODO: SaveCoreRegister";
return 0;
}
virtual size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE {
+ UNUSED(stack_index);
+ UNUSED(reg_id);
UNIMPLEMENTED(INFO) << "TODO: RestoreCoreRegister";
return 0;
}
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index d41d5a0..ac328c3 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -645,6 +645,7 @@
}
void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
+ UNUSED(exit);
if (kIsDebugBuild) {
__ Comment("Unreachable");
__ int3();
@@ -734,6 +735,7 @@
void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(load);
}
void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
@@ -762,6 +764,7 @@
}
void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
+ UNUSED(store);
}
void LocationsBuilderX86::VisitCondition(HCondition* comp) {
@@ -851,6 +854,7 @@
void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
@@ -861,6 +865,7 @@
void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
@@ -871,6 +876,7 @@
void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -881,6 +887,7 @@
void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
@@ -888,6 +895,7 @@
}
void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
+ UNUSED(ret);
codegen_->GenerateFrameExit();
__ ret();
}
@@ -1456,6 +1464,7 @@
}
void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
+ UNUSED(instruction);
}
void LocationsBuilderX86::VisitNot(HNot* not_) {
@@ -1550,6 +1559,7 @@
}
void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
@@ -2026,9 +2036,11 @@
void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(temp);
}
void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index bda3520..0bc2bad 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -556,6 +556,7 @@
}
void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
+ UNUSED(exit);
if (kIsDebugBuild) {
__ Comment("Unreachable");
__ int3();
@@ -644,6 +645,7 @@
void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(load);
}
void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
@@ -671,6 +673,7 @@
}
void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
+ UNUSED(store);
}
void LocationsBuilderX86_64::VisitCondition(HCondition* comp) {
@@ -793,6 +796,7 @@
void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
@@ -803,6 +807,7 @@
void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
@@ -813,6 +818,7 @@
void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
@@ -823,6 +829,7 @@
void InstructionCodeGeneratorX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
// Will be generated at use site.
+ UNUSED(constant);
}
void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
@@ -830,6 +837,7 @@
}
void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
+ UNUSED(ret);
codegen_->GenerateFrameExit();
__ ret();
}
@@ -1381,6 +1389,7 @@
void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
// Nothing to do, the parameter is already at its location.
+ UNUSED(instruction);
}
void LocationsBuilderX86_64::VisitNot(HNot* not_) {
@@ -1423,6 +1432,7 @@
}
void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unimplemented";
}
@@ -1902,9 +1912,11 @@
void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) {
// Nothing to do, this is driven by the code generator.
+ UNUSED(temp);
}
void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
+ UNUSED(instruction);
LOG(FATAL) << "Unimplemented";
}
diff --git a/compiler/optimizing/gvn.h b/compiler/optimizing/gvn.h
index a98d714..8d2c774 100644
--- a/compiler/optimizing/gvn.h
+++ b/compiler/optimizing/gvn.h
@@ -25,7 +25,7 @@
* A node in the collision list of a ValueSet. Encodes the instruction,
* the hash code, and the next node in the collision list.
*/
-class ValueSetNode : public ArenaObject {
+class ValueSetNode : public ArenaObject<kArenaAllocMisc> {
public:
ValueSetNode(HInstruction* instruction, size_t hash_code, ValueSetNode* next)
: instruction_(instruction), hash_code_(hash_code), next_(next) {}
@@ -52,7 +52,7 @@
* if there is one in the set. In GVN, we would say those instructions have the
* same "number".
*/
-class ValueSet : public ArenaObject {
+class ValueSet : public ArenaObject<kArenaAllocMisc> {
public:
explicit ValueSet(ArenaAllocator* allocator)
: allocator_(allocator), number_of_entries_(0), collisions_(nullptr) {
diff --git a/compiler/optimizing/locations.h b/compiler/optimizing/locations.h
index d7295aa..914a0c4 100644
--- a/compiler/optimizing/locations.h
+++ b/compiler/optimizing/locations.h
@@ -351,6 +351,8 @@
// way that none of them can be interpreted as a kConstant tag.
uintptr_t value_;
};
+std::ostream& operator<<(std::ostream& os, const Location::Kind& rhs);
+std::ostream& operator<<(std::ostream& os, const Location::Policy& rhs);
class RegisterSet : public ValueObject {
public:
@@ -401,7 +403,7 @@
* The intent is to have the code for generating the instruction independent of
* register allocation. A register allocator just has to provide a LocationSummary.
*/
-class LocationSummary : public ArenaObject {
+class LocationSummary : public ArenaObject<kArenaAllocMisc> {
public:
enum CallKind {
kNoCall,
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index d624ad5..8cb2ef6 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -647,4 +647,16 @@
return true;
}
+std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs) {
+#define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break;
+ switch (rhs) {
+ FOR_EACH_INSTRUCTION(DECLARE_CASE)
+ default:
+ os << "Unknown instruction kind " << static_cast<int>(rhs);
+ break;
+ }
+#undef DECLARE_CASE
+ return os;
+}
+
} // namespace art
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 33bfe19..7549ebf 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -79,12 +79,14 @@
};
// Control-flow graph of a method. Contains a list of basic blocks.
-class HGraph : public ArenaObject {
+class HGraph : public ArenaObject<kArenaAllocMisc> {
public:
explicit HGraph(ArenaAllocator* arena)
: arena_(arena),
blocks_(arena, kDefaultNumberOfBlocks),
reverse_post_order_(arena, kDefaultNumberOfBlocks),
+ entry_block_(nullptr),
+ exit_block_(nullptr),
maximum_number_of_out_vregs_(0),
number_of_vregs_(0),
number_of_in_vregs_(0),
@@ -199,7 +201,7 @@
DISALLOW_COPY_AND_ASSIGN(HGraph);
};
-class HLoopInformation : public ArenaObject {
+class HLoopInformation : public ArenaObject<kArenaAllocMisc> {
public:
HLoopInformation(HBasicBlock* header, HGraph* graph)
: header_(header),
@@ -278,7 +280,7 @@
// as a double linked list. Each block knows its predecessors and
// successors.
-class HBasicBlock : public ArenaObject {
+class HBasicBlock : public ArenaObject<kArenaAllocMisc> {
public:
explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
: graph_(graph),
@@ -537,7 +539,7 @@
virtual void Accept(HGraphVisitor* visitor)
template <typename T>
-class HUseListNode : public ArenaObject {
+class HUseListNode : public ArenaObject<kArenaAllocMisc> {
public:
HUseListNode(T* user, size_t index, HUseListNode* tail)
: user_(user), index_(index), tail_(tail) {}
@@ -619,7 +621,7 @@
size_t flags_;
};
-class HInstruction : public ArenaObject {
+class HInstruction : public ArenaObject<kArenaAllocMisc> {
public:
explicit HInstruction(SideEffects side_effects)
: previous_(nullptr),
@@ -738,12 +740,18 @@
virtual bool CanBeMoved() const { return false; }
// Returns whether the two instructions are of the same kind.
- virtual bool InstructionTypeEquals(HInstruction* other) const { return false; }
+ virtual bool InstructionTypeEquals(HInstruction* other) const {
+ UNUSED(other);
+ return false;
+ }
// Returns whether any data encoded in the two instructions is equal.
// This method does not look at the inputs. Both instructions must be
// of the same type, otherwise the method has undefined behavior.
- virtual bool InstructionDataEquals(HInstruction* other) const { return false; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return false;
+ }
// Returns whether two instructions are equal, that is:
// 1) They have the same type and contain the same data,
@@ -808,6 +816,7 @@
DISALLOW_COPY_AND_ASSIGN(HInstruction);
};
+std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
template<typename T>
class HUseIterator : public ValueObject {
@@ -833,7 +842,7 @@
};
// A HEnvironment object contains the values of virtual registers at a given location.
-class HEnvironment : public ArenaObject {
+class HEnvironment : public ArenaObject<kArenaAllocMisc> {
public:
HEnvironment(ArenaAllocator* arena, size_t number_of_vregs) : vregs_(arena, number_of_vregs) {
vregs_.SetSize(number_of_vregs);
@@ -965,14 +974,14 @@
public:
intptr_t length() const { return 0; }
const T& operator[](intptr_t i) const {
+ UNUSED(i);
LOG(FATAL) << "Unreachable";
- static T sentinel = 0;
- return sentinel;
+ UNREACHABLE();
}
T& operator[](intptr_t i) {
+ UNUSED(i);
LOG(FATAL) << "Unreachable";
- static T sentinel = 0;
- return sentinel;
+ UNREACHABLE();
}
};
@@ -1110,7 +1119,10 @@
Primitive::Type GetResultType() const { return GetType(); }
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
// Try to statically evaluate `operation` and return a HConstant
// containing the result of this evaluation. If `operation` cannot
@@ -1143,7 +1155,10 @@
virtual bool IsCommutative() { return false; }
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
// Try to statically evaluate `operation` and return a HConstant
// containing the result of this evaluation. If `operation` cannot
@@ -1732,7 +1747,10 @@
: HUnaryOperation(result_type, input) {}
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
virtual int32_t Evaluate(int32_t x) const OVERRIDE { return ~x; }
virtual int64_t Evaluate(int64_t x) const OVERRIDE { return ~x; }
@@ -1792,7 +1810,10 @@
}
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
virtual bool NeedsEnvironment() const { return true; }
@@ -1884,7 +1905,10 @@
}
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
void SetType(Primitive::Type type) { type_ = type; }
DECLARE_INSTRUCTION(ArrayGet);
@@ -1948,7 +1972,10 @@
}
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
DECLARE_INSTRUCTION(ArrayLength);
@@ -1966,7 +1993,10 @@
}
virtual bool CanBeMoved() const { return true; }
- virtual bool InstructionDataEquals(HInstruction* other) const { return true; }
+ virtual bool InstructionDataEquals(HInstruction* other) const {
+ UNUSED(other);
+ return true;
+ }
virtual bool NeedsEnvironment() const { return true; }
@@ -2177,7 +2207,7 @@
DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
};
-class MoveOperands : public ArenaObject {
+class MoveOperands : public ArenaObject<kArenaAllocMisc> {
public:
MoveOperands(Location source, Location destination, HInstruction* instruction)
: source_(source), destination_(destination), instruction_(instruction) {}
@@ -2278,7 +2308,7 @@
explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
virtual ~HGraphVisitor() {}
- virtual void VisitInstruction(HInstruction* instruction) {}
+ virtual void VisitInstruction(HInstruction* instruction) { UNUSED(instruction); }
virtual void VisitBasicBlock(HBasicBlock* block);
// Visit the graph following basic block insertion order.
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index d3fe1c4..08b74c7 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -213,6 +213,7 @@
uint32_t method_idx,
jobject class_loader,
const DexFile& dex_file) const {
+ UNUSED(invoke_type);
total_compiled_methods_++;
InstructionSet instruction_set = GetCompilerDriver()->GetInstructionSet();
// Always use the thumb2 assembler: some runtime functionality (like implicit stack
diff --git a/compiler/optimizing/parallel_move_test.cc b/compiler/optimizing/parallel_move_test.cc
index 2bdcc61..62629bc 100644
--- a/compiler/optimizing/parallel_move_test.cc
+++ b/compiler/optimizing/parallel_move_test.cc
@@ -50,8 +50,8 @@
<< ")";
}
- virtual void SpillScratch(int reg) {}
- virtual void RestoreScratch(int reg) {}
+ virtual void SpillScratch(int reg ATTRIBUTE_UNUSED) {}
+ virtual void RestoreScratch(int reg ATTRIBUTE_UNUSED) {}
std::string GetMessage() const {
return message_.str();
diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h
index 8811ac8..ca08d5b 100644
--- a/compiler/optimizing/ssa_liveness_analysis.h
+++ b/compiler/optimizing/ssa_liveness_analysis.h
@@ -25,7 +25,7 @@
static constexpr int kNoRegister = -1;
-class BlockInfo : public ArenaObject {
+class BlockInfo : public ArenaObject<kArenaAllocMisc> {
public:
BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
: block_(block),
@@ -53,7 +53,7 @@
* A live range contains the start and end of a range where an instruction or a temporary
* is live.
*/
-class LiveRange : public ArenaObject {
+class LiveRange FINAL : public ArenaObject<kArenaAllocMisc> {
public:
LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
DCHECK_LT(start, end);
@@ -64,16 +64,16 @@
size_t GetEnd() const { return end_; }
LiveRange* GetNext() const { return next_; }
- bool IntersectsWith(const LiveRange& other) {
+ bool IntersectsWith(const LiveRange& other) const {
return (start_ >= other.start_ && start_ < other.end_)
|| (other.start_ >= start_ && other.start_ < end_);
}
- bool IsBefore(const LiveRange& other) {
+ bool IsBefore(const LiveRange& other) const {
return end_ <= other.start_;
}
- void Dump(std::ostream& stream) {
+ void Dump(std::ostream& stream) const {
stream << "[" << start_ << ", " << end_ << ")";
}
@@ -90,7 +90,7 @@
/**
* A use position represents a live interval use at a given position.
*/
-class UsePosition : public ArenaObject {
+class UsePosition : public ArenaObject<kArenaAllocMisc> {
public:
UsePosition(HInstruction* user,
size_t input_index,
@@ -137,7 +137,7 @@
* An interval is a list of disjoint live ranges where an instruction is live.
* Each instruction that has uses gets an interval.
*/
-class LiveInterval : public ArenaObject {
+class LiveInterval : public ArenaObject<kArenaAllocMisc> {
public:
static LiveInterval* MakeInterval(ArenaAllocator* allocator,
Primitive::Type type,