Rewrite HInstruction::Is/As<type>().
Make Is<type>() and As<type>() non-virtual for concrete
instruction types, relying on GetKind(), and mark GetKind()
as PURE to improve optimization opportunities. This reduces
the number of relocations in libart-compiler.so's .rel.dyn
section by ~4K, or ~44%, and in .data.rel.ro by ~18K, or
~65%. The file is 96KiB smaller for Nexus 5, including 8KiB
reduction of the .text section.
Unfortunately, the g++/clang++ __attribute__((pure)) is not
strong enough to avoid duplicated virtual calls and we would
need the C++ [[pure]] attribute proposed in n3744 instead.
To work around this deficiency, we introduce an extra
non-virtual indirection for GetKind(), so that the compiler
can optimize common expressions such as
instruction->IsAdd() || instruction->IsSub()
or
instruction->IsAdd() && instruction->AsAdd()->...
which contain two virtual calls to GetKind() after inlining.
Change-Id: I83787de0671a5cb9f5b0a5f4a536cef239d5b401
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 1f8ef47..00820a6 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -1034,7 +1034,6 @@
M(ClearException, Instruction) \
M(ClinitCheck, Instruction) \
M(Compare, BinaryOperation) \
- M(Condition, BinaryOperation) \
M(CurrentMethod, Instruction) \
M(Deoptimize, Instruction) \
M(Div, BinaryOperation) \
@@ -1141,27 +1140,34 @@
FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
-#define FOR_EACH_INSTRUCTION(M) \
- FOR_EACH_CONCRETE_INSTRUCTION(M) \
+#define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
+ M(Condition, BinaryOperation) \
M(Constant, Instruction) \
M(UnaryOperation, Instruction) \
M(BinaryOperation, Instruction) \
M(Invoke, Instruction)
+#define FOR_EACH_INSTRUCTION(M) \
+ FOR_EACH_CONCRETE_INSTRUCTION(M) \
+ FOR_EACH_ABSTRACT_INSTRUCTION(M)
+
#define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
#undef FORWARD_DECLARATION
#define DECLARE_INSTRUCTION(type) \
- InstructionKind GetKind() const OVERRIDE { return k##type; } \
+ InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \
const char* DebugName() const OVERRIDE { return #type; } \
- const H##type* As##type() const OVERRIDE { return this; } \
- H##type* As##type() OVERRIDE { return this; } \
bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \
return other->Is##type(); \
} \
void Accept(HGraphVisitor* visitor) OVERRIDE
+#define DECLARE_ABSTRACT_INSTRUCTION(type) \
+ bool Is##type() const { return As##type() != nullptr; } \
+ const H##type* As##type() const { return this; } \
+ H##type* As##type() { return this; }
+
template <typename T> class HUseList;
template <typename T>
@@ -1972,11 +1978,18 @@
void MoveBeforeFirstUserAndOutOfLoops();
#define INSTRUCTION_TYPE_CHECK(type, super) \
+ bool Is##type() const; \
+ const H##type* As##type() const; \
+ H##type* As##type();
+
+ FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
+#undef INSTRUCTION_TYPE_CHECK
+
+#define INSTRUCTION_TYPE_CHECK(type, super) \
bool Is##type() const { return (As##type() != nullptr); } \
virtual const H##type* As##type() const { return nullptr; } \
virtual H##type* As##type() { return nullptr; }
-
- FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
+ FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
#undef INSTRUCTION_TYPE_CHECK
// Returns whether the instruction can be moved within the graph.
@@ -1999,7 +2012,12 @@
// 2) Their inputs are identical.
bool Equals(HInstruction* other) const;
- virtual InstructionKind GetKind() const = 0;
+ // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
+ // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
+ // the virtual function because the __attribute__((__pure__)) doesn't really
+ // apply the strong requirement for virtual functions, preventing optimizations.
+ InstructionKind GetKind() const PURE;
+ virtual InstructionKind GetKindInternal() const = 0;
virtual size_t ComputeHashCode() const {
size_t result = GetKind();
@@ -2297,7 +2315,7 @@
virtual uint64_t GetValueAsUint64() const = 0;
- DECLARE_INSTRUCTION(Constant);
+ DECLARE_ABSTRACT_INSTRUCTION(Constant);
private:
DISALLOW_COPY_AND_ASSIGN(HConstant);
@@ -2558,7 +2576,7 @@
virtual HConstant* Evaluate(HIntConstant* x) const = 0;
virtual HConstant* Evaluate(HLongConstant* x) const = 0;
- DECLARE_INSTRUCTION(UnaryOperation);
+ DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
private:
DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
@@ -2651,7 +2669,7 @@
// one. Otherwise it returns null.
HInstruction* GetLeastConstantLeft() const;
- DECLARE_INSTRUCTION(BinaryOperation);
+ DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
private:
DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
@@ -2679,7 +2697,7 @@
// `instruction`, and disregard moves in between.
bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
- DECLARE_INSTRUCTION(Condition);
+ DECLARE_ABSTRACT_INSTRUCTION(Condition);
virtual IfCondition GetCondition() const = 0;
@@ -3288,7 +3306,7 @@
bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
- DECLARE_INSTRUCTION(Invoke);
+ DECLARE_ABSTRACT_INSTRUCTION(Invoke);
protected:
HInvoke(ArenaAllocator* arena,
@@ -5869,6 +5887,18 @@
return &lhs == &rhs;
}
+#define INSTRUCTION_TYPE_CHECK(type, super) \
+ inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \
+ inline const H##type* HInstruction::As##type() const { \
+ return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
+ } \
+ inline H##type* HInstruction::As##type() { \
+ return Is##type() ? static_cast<H##type*>(this) : nullptr; \
+ }
+
+ FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
+#undef INSTRUCTION_TYPE_CHECK
+
} // namespace art
#endif // ART_COMPILER_OPTIMIZING_NODES_H_