summaryrefslogtreecommitdiff
path: root/compiler/optimizing/optimizing_unit_test.h
diff options
context:
space:
mode:
author Alex Light <allight@google.com> 2021-03-29 18:11:23 -0700
committer Alex Light <allight@google.com> 2021-03-30 18:01:00 +0000
commit7432805b24cb8b6d8c754807143a691dc51adaba (patch)
tree7693475b3520e1673cdba6210e8dc64d2becf1c4 /compiler/optimizing/optimizing_unit_test.h
parent1a660c0bd57313bae9cd5632ab4252f417590a96 (diff)
Fix simplifier issue with predicated ifield get
In cases where all targets of a HPredicatedInstanceFieldGet instruction are known to not be null the simplifier would attempt to replace the default value with a null instruction. This would cause a null-pointer dereference. Correct the simplifier to handle this case correctly. Moved some LSE test helper functions to CommonCompilerTestHelper to avoid duplicating code. Fixed an incorrect (though until now unused) constructor for HPredicatatedInstanceFieldGet (the default value and target we swapped). Test: ./test.py --host Test: ./art/tools/compile_jars.py --profile-file bad-compile.txt ~/imgur.apk Bug: 183942773 Change-Id: I66f4ce37d768d5e457047a3f80bd4cb9aa4546a3
Diffstat (limited to 'compiler/optimizing/optimizing_unit_test.h')
-rw-r--r--compiler/optimizing/optimizing_unit_test.h445
1 files changed, 383 insertions, 62 deletions
diff --git a/compiler/optimizing/optimizing_unit_test.h b/compiler/optimizing/optimizing_unit_test.h
index cf97c41983..12c1b9879f 100644
--- a/compiler/optimizing/optimizing_unit_test.h
+++ b/compiler/optimizing/optimizing_unit_test.h
@@ -18,8 +18,12 @@
#define ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
#include <memory>
+#include <ostream>
#include <string_view>
+#include <string>
+#include <tuple>
#include <vector>
+#include <variant>
#include "base/indenter.h"
#include "base/malloc_arena_pool.h"
@@ -58,6 +62,33 @@ namespace art {
#define FIVE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(5, __VA_ARGS__)
#define SIX_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(6, __VA_ARGS__)
+struct InstructionDumper {
+ public:
+ HInstruction* ins_;
+};
+
+inline bool operator==(const InstructionDumper& a, const InstructionDumper& b) {
+ return a.ins_ == b.ins_;
+}
+inline bool operator!=(const InstructionDumper& a, const InstructionDumper& b) {
+ return !(a == b);
+}
+
+inline std::ostream& operator<<(std::ostream& os, const InstructionDumper& id) {
+ if (id.ins_ == nullptr) {
+ return os << "NULL";
+ } else {
+ return os << "(" << id.ins_ << "): " << id.ins_->DumpWithArgs();
+ }
+}
+
+#define EXPECT_INS_EQ(a, b) EXPECT_EQ(InstructionDumper{a}, InstructionDumper{b})
+#define EXPECT_INS_REMOVED(a) EXPECT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
+#define EXPECT_INS_RETAINED(a) EXPECT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
+#define ASSERT_INS_EQ(a, b) ASSERT_EQ(InstructionDumper{a}, InstructionDumper{b})
+#define ASSERT_INS_REMOVED(a) ASSERT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
+#define ASSERT_INS_RETAINED(a) ASSERT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
+
inline LiveInterval* BuildInterval(const size_t ranges[][2],
size_t number_of_ranges,
ScopedArenaAllocator* allocator,
@@ -107,6 +138,80 @@ class ArenaPoolAndAllocator {
ScopedArenaAllocator scoped_allocator_;
};
+class AdjacencyListGraph {
+ public:
+ using Edge = std::pair<const std::string_view, const std::string_view>;
+ AdjacencyListGraph(
+ HGraph* graph,
+ ArenaAllocator* alloc,
+ const std::string_view entry_name,
+ const std::string_view exit_name,
+ const std::vector<Edge>& adj) : graph_(graph) {
+ auto create_block = [&]() {
+ HBasicBlock* blk = new (alloc) HBasicBlock(graph_);
+ graph_->AddBlock(blk);
+ return blk;
+ };
+ HBasicBlock* entry = create_block();
+ HBasicBlock* exit = create_block();
+ graph_->SetEntryBlock(entry);
+ graph_->SetExitBlock(exit);
+ name_to_block_.Put(entry_name, entry);
+ name_to_block_.Put(exit_name, exit);
+ for (const auto& [src, dest] : adj) {
+ HBasicBlock* src_blk = name_to_block_.GetOrCreate(src, create_block);
+ HBasicBlock* dest_blk = name_to_block_.GetOrCreate(dest, create_block);
+ src_blk->AddSuccessor(dest_blk);
+ }
+ graph_->ClearReachabilityInformation();
+ graph_->ComputeDominanceInformation();
+ graph_->ComputeReachabilityInformation();
+ for (auto [name, blk] : name_to_block_) {
+ block_to_name_.Put(blk, name);
+ }
+ }
+
+ bool HasBlock(const HBasicBlock* blk) const {
+ return block_to_name_.find(blk) != block_to_name_.end();
+ }
+
+ std::string_view GetName(const HBasicBlock* blk) const {
+ return block_to_name_.Get(blk);
+ }
+
+ HBasicBlock* Get(const std::string_view& sv) const {
+ return name_to_block_.Get(sv);
+ }
+
+ AdjacencyListGraph(AdjacencyListGraph&&) = default;
+ AdjacencyListGraph(const AdjacencyListGraph&) = default;
+ AdjacencyListGraph& operator=(AdjacencyListGraph&&) = default;
+ AdjacencyListGraph& operator=(const AdjacencyListGraph&) = default;
+
+ std::ostream& Dump(std::ostream& os) const {
+ struct Namer : public BlockNamer {
+ public:
+ explicit Namer(const AdjacencyListGraph& alg) : BlockNamer(), alg_(alg) {}
+ std::ostream& PrintName(std::ostream& os, HBasicBlock* blk) const override {
+ if (alg_.HasBlock(blk)) {
+ return os << alg_.GetName(blk) << " (" << blk->GetBlockId() << ")";
+ } else {
+ return os << "<Unnamed B" << blk->GetBlockId() << ">";
+ }
+ }
+
+ const AdjacencyListGraph& alg_;
+ };
+ Namer namer(*this);
+ return graph_->Dump(os, namer);
+ }
+
+ private:
+ HGraph* graph_;
+ SafeMap<const std::string_view, HBasicBlock*> name_to_block_;
+ SafeMap<const HBasicBlock*, const std::string_view> block_to_name_;
+};
+
// Have a separate helper so the OptimizingCFITest can inherit it without causing
// multiple inheritance errors from having two gtest as a parent twice.
class OptimizingUnitTestHelper {
@@ -290,6 +395,140 @@ class OptimizingUnitTestHelper {
}
}
+ AdjacencyListGraph SetupFromAdjacencyList(const std::string_view entry_name,
+ const std::string_view exit_name,
+ const std::vector<AdjacencyListGraph::Edge>& adj) {
+ return AdjacencyListGraph(graph_, GetAllocator(), entry_name, exit_name, adj);
+ }
+
+ void ManuallyBuildEnvFor(HInstruction* ins, const std::initializer_list<HInstruction*>& env) {
+ ArenaVector<HInstruction*> current_locals(env, GetAllocator()->Adapter(kArenaAllocInstruction));
+ OptimizingUnitTestHelper::ManuallyBuildEnvFor(ins, &current_locals);
+ }
+
+ HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt) {
+ return new (GetAllocator()) HLoadClass(graph_->GetCurrentMethod(),
+ ti ? *ti : dex::TypeIndex(class_idx_++),
+ graph_->GetDexFile(),
+ /* klass= */ null_klass_,
+ /* is_referrers_class= */ false,
+ /* dex_pc= */ 0,
+ /* needs_access_check= */ false);
+ }
+
+ HNewInstance* MakeNewInstance(HInstruction* cls, uint32_t dex_pc = 0u) {
+ EXPECT_TRUE(cls->IsLoadClass() || cls->IsClinitCheck()) << *cls;
+ HLoadClass* load =
+ cls->IsLoadClass() ? cls->AsLoadClass() : cls->AsClinitCheck()->GetLoadClass();
+ return new (GetAllocator()) HNewInstance(cls,
+ dex_pc,
+ load->GetTypeIndex(),
+ graph_->GetDexFile(),
+ /* finalizable= */ false,
+ QuickEntrypointEnum::kQuickAllocObjectInitialized);
+ }
+
+ HInstanceFieldSet* MakeIFieldSet(HInstruction* inst,
+ HInstruction* data,
+ MemberOffset off,
+ uint32_t dex_pc = 0u) {
+ return new (GetAllocator()) HInstanceFieldSet(inst,
+ data,
+ /* field= */ nullptr,
+ /* field_type= */ data->GetType(),
+ /* field_offset= */ off,
+ /* is_volatile= */ false,
+ /* field_idx= */ 0,
+ /* declaring_class_def_index= */ 0,
+ graph_->GetDexFile(),
+ dex_pc);
+ }
+
+ HInstanceFieldGet* MakeIFieldGet(HInstruction* inst,
+ DataType::Type type,
+ MemberOffset off,
+ uint32_t dex_pc = 0u) {
+ return new (GetAllocator()) HInstanceFieldGet(inst,
+ /* field= */ nullptr,
+ /* field_type= */ type,
+ /* field_offset= */ off,
+ /* is_volatile= */ false,
+ /* field_idx= */ 0,
+ /* declaring_class_def_index= */ 0,
+ graph_->GetDexFile(),
+ dex_pc);
+ }
+
+ HInvokeStaticOrDirect* MakeInvoke(DataType::Type return_type,
+ const std::vector<HInstruction*>& args) {
+ MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++};
+ HInvokeStaticOrDirect* res = new (GetAllocator())
+ HInvokeStaticOrDirect(GetAllocator(),
+ args.size(),
+ return_type,
+ /* dex_pc= */ 0,
+ method_reference,
+ /* resolved_method= */ nullptr,
+ HInvokeStaticOrDirect::DispatchInfo{},
+ InvokeType::kStatic,
+ /* resolved_method_reference= */ method_reference,
+ HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
+ for (auto [ins, idx] : ZipCount(MakeIterationRange(args))) {
+ res->SetRawInputAt(idx, ins);
+ }
+ return res;
+ }
+
+ HPhi* MakePhi(const std::vector<HInstruction*>& ins) {
+ EXPECT_GE(ins.size(), 2u) << "Phi requires at least 2 inputs";
+ HPhi* phi =
+ new (GetAllocator()) HPhi(GetAllocator(), kNoRegNumber, ins.size(), ins[0]->GetType());
+ for (auto [i, idx] : ZipCount(MakeIterationRange(ins))) {
+ phi->SetRawInputAt(idx, i);
+ }
+ return phi;
+ }
+
+ void SetupExit(HBasicBlock* exit) {
+ exit->AddInstruction(new (GetAllocator()) HExit());
+ }
+
+ dex::TypeIndex DefaultTypeIndexForType(DataType::Type type) {
+ switch (type) {
+ case DataType::Type::kBool:
+ return dex::TypeIndex(1);
+ case DataType::Type::kUint8:
+ case DataType::Type::kInt8:
+ return dex::TypeIndex(2);
+ case DataType::Type::kUint16:
+ case DataType::Type::kInt16:
+ return dex::TypeIndex(3);
+ case DataType::Type::kUint32:
+ case DataType::Type::kInt32:
+ return dex::TypeIndex(4);
+ case DataType::Type::kUint64:
+ case DataType::Type::kInt64:
+ return dex::TypeIndex(5);
+ case DataType::Type::kReference:
+ return dex::TypeIndex(6);
+ case DataType::Type::kFloat32:
+ return dex::TypeIndex(7);
+ case DataType::Type::kFloat64:
+ return dex::TypeIndex(8);
+ case DataType::Type::kVoid:
+ EXPECT_TRUE(false) << "No type for void!";
+ return dex::TypeIndex(1000);
+ }
+ }
+
+ // Creates a parameter. The instruction is automatically added to the entry-block
+ HParameterValue* MakeParam(DataType::Type type, std::optional<dex::TypeIndex> ti = std::nullopt) {
+ HParameterValue* val = new (GetAllocator()) HParameterValue(
+ graph_->GetDexFile(), ti ? *ti : DefaultTypeIndexForType(type), param_count_++, type);
+ graph_->GetEntryBlock()->AddInstruction(val);
+ return val;
+ }
+
protected:
bool CheckGraph(HGraph* graph, bool check_ref_type_info, std::ostream& oss) {
GraphChecker checker(graph);
@@ -308,6 +547,12 @@ class OptimizingUnitTestHelper {
HBasicBlock* exit_block_;
std::vector<HInstruction*> parameters_;
+
+ size_t param_count_ = 0;
+ size_t class_idx_ = 42;
+ uint32_t method_idx_ = 100;
+
+ ScopedNullHandle<mirror::Class> null_klass_;
};
class OptimizingUnitTest : public CommonArtTest, public OptimizingUnitTestHelper {};
@@ -336,82 +581,158 @@ inline bool IsRemoved(HInstruction* instruction) {
return instruction->GetBlock() == nullptr;
}
-class AdjacencyListGraph {
- public:
- using Edge = std::pair<const std::string_view, const std::string_view>;
- AdjacencyListGraph(
- HGraph* graph,
- ArenaAllocator* alloc,
- const std::string_view entry_name,
- const std::string_view exit_name,
- const std::vector<Edge>& adj) : graph_(graph) {
- auto create_block = [&]() {
- HBasicBlock* blk = new (alloc) HBasicBlock(graph_);
- graph_->AddBlock(blk);
- return blk;
- };
- HBasicBlock* entry = create_block();
- HBasicBlock* exit = create_block();
- graph_->SetEntryBlock(entry);
- graph_->SetExitBlock(exit);
- name_to_block_.Put(entry_name, entry);
- name_to_block_.Put(exit_name, exit);
- for (const auto& [src, dest] : adj) {
- HBasicBlock* src_blk = name_to_block_.GetOrCreate(src, create_block);
- HBasicBlock* dest_blk = name_to_block_.GetOrCreate(dest, create_block);
- src_blk->AddSuccessor(dest_blk);
- }
- graph_->ClearReachabilityInformation();
- graph_->ComputeDominanceInformation();
- graph_->ComputeReachabilityInformation();
- for (auto [name, blk] : name_to_block_) {
- block_to_name_.Put(blk, name);
+inline std::ostream& operator<<(std::ostream& oss, const AdjacencyListGraph& alg) {
+ return alg.Dump(oss);
+}
+
+class PatternMatchGraphVisitor : public HGraphVisitor {
+ private:
+ struct HandlerWrapper {
+ public:
+ virtual ~HandlerWrapper() {}
+ virtual void operator()(HInstruction* h) = 0;
+ };
+
+ template <HInstruction::InstructionKind kKind, typename F>
+ struct KindWrapper;
+
+#define GEN_HANDLER(nm, unused) \
+ template <typename F> \
+ struct KindWrapper<HInstruction::InstructionKind::k##nm, F> : public HandlerWrapper { \
+ public: \
+ explicit KindWrapper(F f) : f_(f) {} \
+ void operator()(HInstruction* h) override { \
+ if constexpr (std::is_invocable_v<F, H##nm*>) { \
+ f_(h->As##nm()); \
+ } else { \
+ LOG(FATAL) << "Incorrect call with " << #nm; \
+ } \
+ } \
+ \
+ private: \
+ F f_; \
+ };
+
+ FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)
+#undef GEN_HANDLER
+
+ template <typename F>
+ std::unique_ptr<HandlerWrapper> GetWrapper(HInstruction::InstructionKind kind, F f) {
+ switch (kind) {
+#define GEN_GETTER(nm, unused) \
+ case HInstruction::InstructionKind::k##nm: \
+ return std::unique_ptr<HandlerWrapper>( \
+ new KindWrapper<HInstruction::InstructionKind::k##nm, F>(f));
+ FOR_EACH_CONCRETE_INSTRUCTION(GEN_GETTER)
+#undef GEN_GETTER
+ default:
+ LOG(FATAL) << "Unable to handle kind " << kind;
+ return nullptr;
}
}
- bool HasBlock(const HBasicBlock* blk) const {
- return block_to_name_.find(blk) != block_to_name_.end();
+ public:
+ template <typename... Inst>
+ explicit PatternMatchGraphVisitor(HGraph* graph, Inst... handlers) : HGraphVisitor(graph) {
+ FillHandlers(handlers...);
}
- std::string_view GetName(const HBasicBlock* blk) const {
- return block_to_name_.Get(blk);
+ void VisitInstruction(HInstruction* instruction) override {
+ auto& h = handlers_[instruction->GetKind()];
+ if (h.get() != nullptr) {
+ (*h)(instruction);
+ }
}
- HBasicBlock* Get(const std::string_view& sv) const {
- return name_to_block_.Get(sv);
+ private:
+ template <typename Func>
+ constexpr HInstruction::InstructionKind GetKind() {
+#define CHECK_INST(nm, unused) \
+ if constexpr (std::is_invocable_v<Func, H##nm*>) { \
+ return HInstruction::InstructionKind::k##nm; \
+ }
+ FOR_EACH_CONCRETE_INSTRUCTION(CHECK_INST);
+#undef CHECK_INST
+ static_assert(!std::is_invocable_v<Func, HInstruction*>,
+ "Use on generic HInstruction not allowed");
+#define STATIC_ASSERT_ABSTRACT(nm, unused) && !std::is_invocable_v<Func, H##nm*>
+ static_assert(true FOR_EACH_ABSTRACT_INSTRUCTION(STATIC_ASSERT_ABSTRACT),
+ "Must not be abstract instruction");
+#undef STATIC_ASSERT_ABSTRACT
+#define STATIC_ASSERT_CONCRETE(nm, unused) || std::is_invocable_v<Func, H##nm*>
+ static_assert(false FOR_EACH_CONCRETE_INSTRUCTION(STATIC_ASSERT_CONCRETE),
+ "Must be a concrete instruction");
+#undef STATIC_ASSERT_CONCRETE
+ return HInstruction::InstructionKind::kLastInstructionKind;
+ }
+ template <typename First>
+ void FillHandlers(First h1) {
+ HInstruction::InstructionKind type = GetKind<First>();
+ CHECK_NE(type, HInstruction::kLastInstructionKind)
+ << "Unknown instruction kind. Only concrete ones please.";
+ handlers_[type] = GetWrapper(type, h1);
}
- AdjacencyListGraph(AdjacencyListGraph&&) = default;
- AdjacencyListGraph(const AdjacencyListGraph&) = default;
- AdjacencyListGraph& operator=(AdjacencyListGraph&&) = default;
- AdjacencyListGraph& operator=(const AdjacencyListGraph&) = default;
+ template <typename First, typename... Inst>
+ void FillHandlers(First h1, Inst... handlers) {
+ FillHandlers(h1);
+ FillHandlers<Inst...>(handlers...);
+ }
- std::ostream& Dump(std::ostream& os) const {
- struct Namer : public BlockNamer {
- public:
- explicit Namer(const AdjacencyListGraph& alg) : BlockNamer(), alg_(alg) {}
- std::ostream& PrintName(std::ostream& os, HBasicBlock* blk) const override {
- if (alg_.HasBlock(blk)) {
- return os << alg_.GetName(blk) << " (" << blk->GetBlockId() << ")";
- } else {
- return os << "<Unnamed B" << blk->GetBlockId() << ">";
- }
- }
+ std::array<std::unique_ptr<HandlerWrapper>, HInstruction::InstructionKind::kLastInstructionKind>
+ handlers_;
+};
- const AdjacencyListGraph& alg_;
- };
- Namer namer(*this);
- return graph_->Dump(os, namer);
+template <typename... Target>
+std::tuple<std::vector<Target*>...> FindAllInstructions(
+ HGraph* graph,
+ std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
+ std::nullopt) {
+ std::tuple<std::vector<Target*>...> res;
+ PatternMatchGraphVisitor vis(
+ graph, [&](Target* t) { std::get<std::vector<Target*>>(res).push_back(t); }...);
+
+ if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
+ for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
+ vis.VisitBasicBlock(blk);
+ }
+ } else if (std::holds_alternative<std::nullopt_t>(blks)) {
+ vis.VisitInsertionOrder();
+ } else {
+ vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
}
+ return res;
+}
- private:
- HGraph* graph_;
- SafeMap<const std::string_view, HBasicBlock*> name_to_block_;
- SafeMap<const HBasicBlock*, const std::string_view> block_to_name_;
-};
+template <typename... Target>
+std::tuple<Target*...> FindSingleInstructions(
+ HGraph* graph,
+ std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
+ std::nullopt) {
+ std::tuple<Target*...> res;
+ PatternMatchGraphVisitor vis(graph, [&](Target* t) {
+ EXPECT_EQ(std::get<Target*>(res), nullptr)
+ << *std::get<Target*>(res) << " already found but found " << *t << "!";
+ std::get<Target*>(res) = t;
+ }...);
+ if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
+ for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
+ vis.VisitBasicBlock(blk);
+ }
+ } else if (std::holds_alternative<std::nullopt_t>(blks)) {
+ vis.VisitInsertionOrder();
+ } else {
+ vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
+ }
+ return res;
+}
-inline std::ostream& operator<<(std::ostream& oss, const AdjacencyListGraph& alg) {
- return alg.Dump(oss);
+template <typename Target>
+Target* FindSingleInstruction(
+ HGraph* graph,
+ std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
+ std::nullopt) {
+ return std::get<Target*>(FindSingleInstructions<Target>(graph, blks));
}
} // namespace art