Reimplement HInstruction::Is##type().
Use a table-based implementation for abstract kinds. This
implementation can be fully inlined and even eliminated if
the range of possible kinds is known.
There is a small (<0.5%) improvement of dex2oat compile
time for a big app (sum of "Compile Dex File Quick" dumped
for --dump-timings with the timing of compiler passes hacked
away) on aosp_taimen-userdebug, little cores, -j4. The sizes
of libart-compiler.so have changed:
- lib/libart-compiler.so: 2178608 -> 2169136 (-9472)
.rodata: 0xd36c -> 0xd55c (+496)
.text: 0x175914 -> 0x175f44 (+1584)
.data.rel.ro: 0xa858 -> 0x84e8 (-9072)
- lib64/libart-compiler.so: 3091432 -> 3064248 (-27184)
.rodata: 0x15aa4 -> 0x15c94 (+496)
.text: 0x203304 -> 0x202294 (-4208)
.data.rel.ro: 0x151d8 -> 0x10a68 (-18288)
Note that .data.rel.ro is always dirty memory, .rodata and
.text is always clean.
Test: Rely on TreeHugger
Change-Id: I95e6040ecd23cad83f024970c3bf785d32169deb
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 1d913c0..ae1e606 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -1534,10 +1534,7 @@
#define DECLARE_ABSTRACT_INSTRUCTION(type) \
private: \
H##type& operator=(const H##type&) = delete; \
- public: \
- bool Is##type() const { return As##type() != nullptr; } \
- const H##type* As##type() const { return this; } \
- H##type* As##type() { return this; }
+ public:
#define DEFAULT_COPY_CONSTRUCTOR(type) \
explicit H##type(const H##type& other) = default;
@@ -2231,19 +2228,17 @@
void MoveBeforeFirstUserAndOutOfLoops();
#define INSTRUCTION_TYPE_CHECK(type, super) \
- bool Is##type() const; \
+ bool Is##type() const;
+
+ FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
+#undef INSTRUCTION_TYPE_CHECK
+
+#define INSTRUCTION_TYPE_CAST(type, super) \
const H##type* As##type() const; \
H##type* As##type();
- FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
-#undef INSTRUCTION_TYPE_CHECK
-
-#define INSTRUCTION_TYPE_CHECK(type, super) \
- bool Is##type() const { return (As##type() != nullptr); } \
- virtual const H##type* As##type() const { return nullptr; } \
- virtual H##type* As##type() { return nullptr; }
- FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
-#undef INSTRUCTION_TYPE_CHECK
+ FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
+#undef INSTRUCTION_TYPE_CAST
// Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
// if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
@@ -7753,7 +7748,27 @@
}
#define INSTRUCTION_TYPE_CHECK(type, super) \
- inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \
+ inline bool HInstruction::Is##type() const { return GetKind() == k##type; }
+ FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
+#undef INSTRUCTION_TYPE_CHECK
+
+#define INSTRUCTION_TYPE_CHECK_RESULT(type, super) \
+ std::is_base_of<BaseType, H##type>::value,
+#define INSTRUCTION_TYPE_CHECK(type, super) \
+ inline bool HInstruction::Is##type() const { \
+ DCHECK_LT(GetKind(), kLastInstructionKind); \
+ using BaseType = H##type; \
+ static constexpr bool results[] = { \
+ FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK_RESULT) \
+ }; \
+ return results[static_cast<size_t>(GetKind())]; \
+ }
+
+ FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
+#undef INSTRUCTION_TYPE_CHECK
+#undef INSTRUCTION_TYPE_CHECK_CASE
+
+#define INSTRUCTION_TYPE_CAST(type, super) \
inline const H##type* HInstruction::As##type() const { \
return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
} \
@@ -7761,8 +7776,9 @@
return Is##type() ? static_cast<H##type*>(this) : nullptr; \
}
- FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
-#undef INSTRUCTION_TYPE_CHECK
+ FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
+#undef INSTRUCTION_TYPE_CAST
+
// Create space in `blocks` for adding `number_of_new_blocks` entries
// starting at location `at`. Blocks after `at` are moved accordingly.