Fix issue with Partial LSE and casts/instanceof
If PartialLSE encounters an instanceof or check-cast before the
escapes it could not correctly handle it. Due to how java language
code is typically developed and compiled this is generally not a
problem but could lead to incorrect codegen on release builds or
DCHECK failures on debug builds. This fixes the issues by (1) causing
partial LSE to consider check-cast and instance-ofs to be escaping.
This also updates the instruction simplifier to be much more
aggressive in removing instance-of and check-casts.
Test: ./test.py --host
Bug: 186041085
Change-Id: Ia513c4210a87a0dfa92f10adc530e17ee631d006
diff --git a/compiler/optimizing/escape.cc b/compiler/optimizing/escape.cc
index f3f5b15..617833c 100644
--- a/compiler/optimizing/escape.cc
+++ b/compiler/optimizing/escape.cc
@@ -41,6 +41,13 @@
if (!escape_visitor(user)) {
return;
}
+ } else if (user->IsCheckCast() || user->IsInstanceOf()) {
+ // TODO Currently we'll just be conservative for Partial LSE and avoid
+ // optimizing check-cast things since we'd need to add blocks otherwise.
+ // Normally the simplifier should be able to just get rid of them
+ if (!escape_visitor(user)) {
+ return;
+ }
} else if (user->IsPhi() ||
user->IsSelect() ||
(user->IsInvoke() && user->GetSideEffects().DoesAnyWrite()) ||
@@ -108,6 +115,9 @@
// Ignore already known inherent escapes and escapes client supplied
// analysis knows is safe. Continue on.
return true;
+ } else if (escape->IsInstanceOf() || escape->IsCheckCast()) {
+ // Ignore since these are not relevant for regular LSE.
+ return true;
} else if (escape->IsReturn()) {
// value is returned but might still be singleton. Continue on.
*is_singleton_and_not_returned = false;
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index 938a775..23a432e 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -611,7 +611,16 @@
if (!class_rti.IsValid()) {
// Happens when the loaded class is unresolved.
- return false;
+ if (obj_rti.IsExact()) {
+ // outcome == 'true' && obj_rti is valid implies that class_rti is valid.
+ // Since that's a contradiction we must not pass this check.
+ *outcome = false;
+ return true;
+ } else {
+ // We aren't able to say anything in particular since we don't know the
+ // exact type of the object.
+ return false;
+ }
}
DCHECK(class_rti.IsExact());
if (class_rti.IsSupertypeOf(obj_rti)) {
@@ -633,12 +642,6 @@
void InstructionSimplifierVisitor::VisitCheckCast(HCheckCast* check_cast) {
HInstruction* object = check_cast->InputAt(0);
- if (check_cast->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck &&
- check_cast->GetTargetClass()->NeedsAccessCheck()) {
- // If we need to perform an access check we cannot remove the instruction.
- return;
- }
-
if (CanEnsureNotNullAt(object, check_cast)) {
check_cast->ClearMustDoNullCheck();
}
@@ -649,6 +652,11 @@
return;
}
+ // Minor correctness check.
+ DCHECK(check_cast->GetTargetClass()->StrictlyDominates(check_cast))
+ << "Illegal graph!\n"
+ << check_cast->DumpWithArgs();
+
// Historical note: The `outcome` was initialized to please Valgrind - the compiler can reorder
// the return value check with the `outcome` check, b/27651442.
bool outcome = false;
@@ -658,27 +666,23 @@
MaybeRecordStat(stats_, MethodCompilationStat::kRemovedCheckedCast);
if (check_cast->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck) {
HLoadClass* load_class = check_cast->GetTargetClass();
- if (!load_class->HasUses()) {
+ if (!load_class->HasUses() && !load_class->NeedsAccessCheck()) {
// We cannot rely on DCE to remove the class because the `HLoadClass` thinks it can throw.
- // However, here we know that it cannot because the checkcast was successfull, hence
+ // However, here we know that it cannot because the checkcast was successful, hence
// the class was already loaded.
load_class->GetBlock()->RemoveInstruction(load_class);
}
}
} else {
- // Don't do anything for exceptional cases for now. Ideally we should remove
- // all instructions and blocks this instruction dominates.
+ // TODO Don't do anything for exceptional cases for now. Ideally we should
+ // remove all instructions and blocks this instruction dominates and
+ // replace it with a manual throw.
}
}
}
void InstructionSimplifierVisitor::VisitInstanceOf(HInstanceOf* instruction) {
HInstruction* object = instruction->InputAt(0);
- if (instruction->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck &&
- instruction->GetTargetClass()->NeedsAccessCheck()) {
- // If we need to perform an access check we cannot remove the instruction.
- return;
- }
bool can_be_null = true;
if (CanEnsureNotNullAt(object, instruction)) {
@@ -695,6 +699,11 @@
return;
}
+ // Minor correctness check.
+ DCHECK(instruction->GetTargetClass()->StrictlyDominates(instruction))
+ << "Illegal graph!\n"
+ << instruction->DumpWithArgs();
+
// Historical note: The `outcome` was initialized to please Valgrind - the compiler can reorder
// the return value check with the `outcome` check, b/27651442.
bool outcome = false;
@@ -713,10 +722,11 @@
instruction->GetBlock()->RemoveInstruction(instruction);
if (outcome && instruction->GetTypeCheckKind() != TypeCheckKind::kBitstringCheck) {
HLoadClass* load_class = instruction->GetTargetClass();
- if (!load_class->HasUses()) {
- // We cannot rely on DCE to remove the class because the `HLoadClass` thinks it can throw.
- // However, here we know that it cannot because the instanceof check was successfull, hence
- // the class was already loaded.
+ if (!load_class->HasUses() && !load_class->NeedsAccessCheck()) {
+ // We cannot rely on DCE to remove the class because the `HLoadClass`
+ // thinks it can throw. However, here we know that it cannot because the
+ // instanceof check was successful and we don't need to check the
+ // access, hence the class was already loaded.
load_class->GetBlock()->RemoveInstruction(load_class);
}
}
diff --git a/compiler/optimizing/instruction_simplifier_test.cc b/compiler/optimizing/instruction_simplifier_test.cc
index 7d93809..ac0bdb9 100644
--- a/compiler/optimizing/instruction_simplifier_test.cc
+++ b/compiler/optimizing/instruction_simplifier_test.cc
@@ -20,25 +20,100 @@
#include <tuple>
#include "gtest/gtest.h"
+
+#include "class_root-inl.h"
#include "nodes.h"
#include "optimizing/data_type.h"
#include "optimizing_unit_test.h"
namespace art {
-class InstructionSimplifierTest : public CommonCompilerTest, public OptimizingUnitTestHelper {
+namespace mirror {
+class ClassExt;
+class Throwable;
+} // namespace mirror
+
+template<typename SuperClass>
+class InstructionSimplifierTestBase : public SuperClass, public OptimizingUnitTestHelper {
public:
void SetUp() override {
- CommonCompilerTest::SetUp();
+ SuperClass::SetUp();
gLogVerbosity.compiler = true;
}
void TearDown() override {
- CommonCompilerTest::TearDown();
+ SuperClass::TearDown();
gLogVerbosity.compiler = false;
}
};
+class InstructionSimplifierTest : public InstructionSimplifierTestBase<CommonCompilerTest> {};
+
+// Various configs we can use for testing. Currently used in PartialComparison tests.
+enum class InstanceOfKind {
+ kSelf,
+ kUnrelatedLoaded,
+ kUnrelatedUnloaded,
+ kSupertype,
+};
+
+std::ostream& operator<<(std::ostream& os, const InstanceOfKind& comp) {
+ switch (comp) {
+ case InstanceOfKind::kSupertype:
+ return os << "kSupertype";
+ case InstanceOfKind::kSelf:
+ return os << "kSelf";
+ case InstanceOfKind::kUnrelatedLoaded:
+ return os << "kUnrelatedLoaded";
+ case InstanceOfKind::kUnrelatedUnloaded:
+ return os << "kUnrelatedUnloaded";
+ }
+}
+
+class InstanceOfInstructionSimplifierTestGroup
+ : public InstructionSimplifierTestBase<CommonCompilerTestWithParam<InstanceOfKind>> {
+ public:
+ bool GetConstantResult() const {
+ switch (GetParam()) {
+ case InstanceOfKind::kSupertype:
+ case InstanceOfKind::kSelf:
+ return true;
+ case InstanceOfKind::kUnrelatedLoaded:
+ case InstanceOfKind::kUnrelatedUnloaded:
+ return false;
+ }
+ }
+
+ std::pair<HLoadClass*, HLoadClass*> GetLoadClasses(VariableSizedHandleScope* vshs) {
+ InstanceOfKind kind = GetParam();
+ ScopedObjectAccess soa(Thread::Current());
+ // New inst always needs to have a valid rti since we dcheck that.
+ HLoadClass* new_inst = MakeClassLoad(
+ /* ti= */ std::nullopt, vshs->NewHandle<mirror::Class>(GetClassRoot<mirror::ClassExt>()));
+ new_inst->SetValidLoadedClassRTI();
+ if (kind == InstanceOfKind::kSelf) {
+ return {new_inst, new_inst};
+ }
+ if (kind == InstanceOfKind::kUnrelatedUnloaded) {
+ HLoadClass* target_class = MakeClassLoad();
+ EXPECT_FALSE(target_class->GetLoadedClassRTI().IsValid());
+ return {new_inst, target_class};
+ }
+ // Force both classes to be a real classes.
+ // For simplicity we use class-roots as the types. The new-inst will always
+ // be a ClassExt, unrelated-loaded will always be Throwable and super will
+ // always be Object
+ HLoadClass* target_class = MakeClassLoad(
+ /* ti= */ std::nullopt,
+ vshs->NewHandle<mirror::Class>(kind == InstanceOfKind::kSupertype ?
+ GetClassRoot<mirror::Object>() :
+ GetClassRoot<mirror::Throwable>()));
+ target_class->SetValidLoadedClassRTI();
+ EXPECT_TRUE(target_class->GetLoadedClassRTI().IsValid());
+ return {new_inst, target_class};
+ }
+};
+
// // ENTRY
// switch (param) {
// case 1:
@@ -272,6 +347,7 @@
HPhi* val_phi = MakePhi({c3, c10});
HPhi* obj_phi = MakePhi({obj1, obj2});
+ obj_phi->SetCanBeNull(false);
HInstruction* read_end = new (GetAllocator()) HPredicatedInstanceFieldGet(obj_phi,
nullptr,
val_phi,
@@ -307,4 +383,174 @@
EXPECT_INS_EQ(ifget->InputAt(0), obj_phi);
}
+// // ENTRY
+// obj = new Obj();
+// // Make sure this graph isn't broken
+// if (obj instanceof <other>) {
+// // LEFT
+// } else {
+// // RIGHT
+// }
+// EXIT
+// return obj.field
+TEST_P(InstanceOfInstructionSimplifierTestGroup, ExactClassInstanceOfOther) {
+ VariableSizedHandleScope vshs(Thread::Current());
+ InitGraph(/*handles=*/&vshs);
+
+ AdjacencyListGraph blks(SetupFromAdjacencyList("entry",
+ "exit",
+ {{"entry", "left"},
+ {"entry", "right"},
+ {"left", "breturn"},
+ {"right", "breturn"},
+ {"breturn", "exit"}}));
+#define GET_BLOCK(name) HBasicBlock* name = blks.Get(#name)
+ GET_BLOCK(entry);
+ GET_BLOCK(exit);
+ GET_BLOCK(breturn);
+ GET_BLOCK(left);
+ GET_BLOCK(right);
+#undef GET_BLOCK
+ EnsurePredecessorOrder(breturn, {left, right});
+ HInstruction* test_res = graph_->GetIntConstant(GetConstantResult() ? 1 : 0);
+
+ auto [new_inst_klass, target_klass] = GetLoadClasses(&vshs);
+ HInstruction* new_inst = MakeNewInstance(new_inst_klass);
+ new_inst->SetReferenceTypeInfo(
+ ReferenceTypeInfo::Create(new_inst_klass->GetClass(), /*is_exact=*/true));
+ HInstanceOf* instance_of = new (GetAllocator()) HInstanceOf(new_inst,
+ target_klass,
+ TypeCheckKind::kClassHierarchyCheck,
+ target_klass->GetClass(),
+ 0u,
+ GetAllocator(),
+ nullptr,
+ nullptr);
+ if (target_klass->GetLoadedClassRTI().IsValid()) {
+ instance_of->SetValidTargetClassRTI();
+ }
+ HInstruction* if_inst = new (GetAllocator()) HIf(instance_of);
+ entry->AddInstruction(new_inst_klass);
+ if (new_inst_klass != target_klass) {
+ entry->AddInstruction(target_klass);
+ }
+ entry->AddInstruction(new_inst);
+ entry->AddInstruction(instance_of);
+ entry->AddInstruction(if_inst);
+ ManuallyBuildEnvFor(new_inst_klass, {});
+ if (new_inst_klass != target_klass) {
+ target_klass->CopyEnvironmentFrom(new_inst_klass->GetEnvironment());
+ }
+ new_inst->CopyEnvironmentFrom(new_inst_klass->GetEnvironment());
+
+ HInstruction* goto_left = new (GetAllocator()) HGoto();
+ left->AddInstruction(goto_left);
+
+ HInstruction* goto_right = new (GetAllocator()) HGoto();
+ right->AddInstruction(goto_right);
+
+ HInstruction* read_bottom = MakeIFieldGet(new_inst, DataType::Type::kInt32, MemberOffset(32));
+ HInstruction* return_exit = new (GetAllocator()) HReturn(read_bottom);
+ breturn->AddInstruction(read_bottom);
+ breturn->AddInstruction(return_exit);
+
+ SetupExit(exit);
+
+ // PerformLSE expects this to be empty.
+ graph_->ClearDominanceInformation();
+
+ LOG(INFO) << "Pre simplification " << blks;
+ graph_->ClearDominanceInformation();
+ graph_->BuildDominatorTree();
+ InstructionSimplifier simp(graph_, /*codegen=*/nullptr);
+ simp.Run();
+
+ LOG(INFO) << "Post simplify " << blks;
+
+ if (!GetConstantResult() || GetParam() == InstanceOfKind::kSelf) {
+ EXPECT_INS_RETAINED(target_klass);
+ } else {
+ EXPECT_INS_REMOVED(target_klass);
+ }
+ EXPECT_INS_REMOVED(instance_of);
+ EXPECT_INS_EQ(if_inst->InputAt(0), test_res);
+}
+
+// // ENTRY
+// obj = new Obj();
+// (<other>)obj;
+// // Make sure this graph isn't broken
+// EXIT
+// return obj
+TEST_P(InstanceOfInstructionSimplifierTestGroup, ExactClassCheckCastOther) {
+ VariableSizedHandleScope vshs(Thread::Current());
+ InitGraph(/*handles=*/&vshs);
+
+ AdjacencyListGraph blks(SetupFromAdjacencyList("entry", "exit", {{"entry", "exit"}}));
+#define GET_BLOCK(name) HBasicBlock* name = blks.Get(#name)
+ GET_BLOCK(entry);
+ GET_BLOCK(exit);
+#undef GET_BLOCK
+
+ auto [new_inst_klass, target_klass] = GetLoadClasses(&vshs);
+ HInstruction* new_inst = MakeNewInstance(new_inst_klass);
+ new_inst->SetReferenceTypeInfo(
+ ReferenceTypeInfo::Create(new_inst_klass->GetClass(), /*is_exact=*/true));
+ HCheckCast* check_cast = new (GetAllocator()) HCheckCast(new_inst,
+ target_klass,
+ TypeCheckKind::kClassHierarchyCheck,
+ target_klass->GetClass(),
+ 0u,
+ GetAllocator(),
+ nullptr,
+ nullptr);
+ if (target_klass->GetLoadedClassRTI().IsValid()) {
+ check_cast->SetValidTargetClassRTI();
+ }
+ HInstruction* entry_return = new (GetAllocator()) HReturn(new_inst);
+ entry->AddInstruction(new_inst_klass);
+ if (new_inst_klass != target_klass) {
+ entry->AddInstruction(target_klass);
+ }
+ entry->AddInstruction(new_inst);
+ entry->AddInstruction(check_cast);
+ entry->AddInstruction(entry_return);
+ ManuallyBuildEnvFor(new_inst_klass, {});
+ if (new_inst_klass != target_klass) {
+ target_klass->CopyEnvironmentFrom(new_inst_klass->GetEnvironment());
+ }
+ new_inst->CopyEnvironmentFrom(new_inst_klass->GetEnvironment());
+
+ SetupExit(exit);
+
+ // PerformLSE expects this to be empty.
+ graph_->ClearDominanceInformation();
+
+ LOG(INFO) << "Pre simplification " << blks;
+ graph_->ClearDominanceInformation();
+ graph_->BuildDominatorTree();
+ InstructionSimplifier simp(graph_, /*codegen=*/nullptr);
+ simp.Run();
+
+ LOG(INFO) << "Post simplify " << blks;
+
+ if (!GetConstantResult() || GetParam() == InstanceOfKind::kSelf) {
+ EXPECT_INS_RETAINED(target_klass);
+ } else {
+ EXPECT_INS_REMOVED(target_klass);
+ }
+ if (GetConstantResult()) {
+ EXPECT_INS_REMOVED(check_cast);
+ } else {
+ EXPECT_INS_RETAINED(check_cast);
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(InstructionSimplifierTest,
+ InstanceOfInstructionSimplifierTestGroup,
+ testing::Values(InstanceOfKind::kSelf,
+ InstanceOfKind::kUnrelatedLoaded,
+ InstanceOfKind::kUnrelatedUnloaded,
+ InstanceOfKind::kSupertype));
+
} // namespace art
diff --git a/compiler/optimizing/load_store_analysis_test.cc b/compiler/optimizing/load_store_analysis_test.cc
index fd15802..67abc0f 100644
--- a/compiler/optimizing/load_store_analysis_test.cc
+++ b/compiler/optimizing/load_store_analysis_test.cc
@@ -38,9 +38,9 @@
namespace art {
-class LoadStoreAnalysisTest : public OptimizingUnitTest {
+class LoadStoreAnalysisTest : public CommonCompilerTest, public OptimizingUnitTestHelper {
public:
- LoadStoreAnalysisTest() : graph_(CreateGraph()) {}
+ LoadStoreAnalysisTest() {}
AdjacencyListGraph SetupFromAdjacencyList(
const std::string_view entry_name,
@@ -58,11 +58,10 @@
}
void CheckReachability(const AdjacencyListGraph& adj,
const std::vector<AdjacencyListGraph::Edge>& reach);
-
- HGraph* graph_;
};
TEST_F(LoadStoreAnalysisTest, ArrayHeapLocations) {
+ CreateGraph();
HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_);
graph_->AddBlock(entry);
graph_->SetEntryBlock(entry);
@@ -145,6 +144,7 @@
}
TEST_F(LoadStoreAnalysisTest, FieldHeapLocations) {
+ CreateGraph();
HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_);
graph_->AddBlock(entry);
graph_->SetEntryBlock(entry);
@@ -225,6 +225,7 @@
}
TEST_F(LoadStoreAnalysisTest, ArrayIndexAliasingTest) {
+ CreateGraph();
HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_);
graph_->AddBlock(entry);
graph_->SetEntryBlock(entry);
@@ -318,6 +319,7 @@
}
TEST_F(LoadStoreAnalysisTest, ArrayAliasingTest) {
+ CreateGraph();
HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_);
graph_->AddBlock(entry);
graph_->SetEntryBlock(entry);
@@ -529,6 +531,7 @@
}
TEST_F(LoadStoreAnalysisTest, ArrayIndexCalculationOverflowTest) {
+ CreateGraph();
HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_);
graph_->AddBlock(entry);
graph_->SetEntryBlock(entry);
@@ -647,6 +650,7 @@
}
TEST_F(LoadStoreAnalysisTest, TestHuntOriginalRef) {
+ CreateGraph();
HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_);
graph_->AddBlock(entry);
graph_->SetEntryBlock(entry);
@@ -753,6 +757,7 @@
}
TEST_F(LoadStoreAnalysisTest, ReachabilityTest1) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -768,6 +773,7 @@
}
TEST_F(LoadStoreAnalysisTest, ReachabilityTest2) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -784,6 +790,7 @@
}
TEST_F(LoadStoreAnalysisTest, ReachabilityTest3) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList("entry",
"exit",
{ { "entry", "loop-header" },
@@ -839,6 +846,7 @@
// // EXIT
// obj.field;
TEST_F(LoadStoreAnalysisTest, PartialEscape) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -946,6 +954,7 @@
// // EXIT
// obj.field2;
TEST_F(LoadStoreAnalysisTest, PartialEscape2) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -1054,6 +1063,7 @@
// // EXIT
// obj.field;
TEST_F(LoadStoreAnalysisTest, PartialEscape3) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -1162,6 +1172,171 @@
ASSERT_TRUE(contents.find(blks.Get("exit")) != contents.end());
}
+// For simplicity Partial LSE considers check-casts to escape. It means we don't
+// need to worry about inserting throws.
+// // ENTRY
+// obj = new Obj();
+// obj.field = 10;
+// if (parameter_value) {
+// // LEFT
+// (Foo)obj;
+// } else {
+// // RIGHT
+// obj.field = 20;
+// }
+// // EXIT
+// obj.field;
+TEST_F(LoadStoreAnalysisTest, PartialEscape4) {
+ CreateGraph();
+ AdjacencyListGraph blks(SetupFromAdjacencyList(
+ "entry",
+ "exit",
+ { { "entry", "left" }, { "entry", "right" }, { "left", "exit" }, { "right", "exit" } }));
+ HBasicBlock* entry = blks.Get("entry");
+ HBasicBlock* left = blks.Get("left");
+ HBasicBlock* right = blks.Get("right");
+ HBasicBlock* exit = blks.Get("exit");
+
+ HInstruction* bool_value = new (GetAllocator())
+ HParameterValue(graph_->GetDexFile(), dex::TypeIndex(1), 1, DataType::Type::kBool);
+ HInstruction* c10 = graph_->GetIntConstant(10);
+ HInstruction* c20 = graph_->GetIntConstant(20);
+ HInstruction* cls = MakeClassLoad();
+ HInstruction* new_inst = MakeNewInstance(cls);
+
+ HInstruction* write_entry = MakeIFieldSet(new_inst, c10, MemberOffset(32));
+ HInstruction* if_inst = new (GetAllocator()) HIf(bool_value);
+ entry->AddInstruction(bool_value);
+ entry->AddInstruction(cls);
+ entry->AddInstruction(new_inst);
+ entry->AddInstruction(write_entry);
+ entry->AddInstruction(if_inst);
+
+ ScopedNullHandle<mirror::Class> null_klass_;
+ HInstruction* cls2 = MakeClassLoad();
+ HInstruction* check_cast = new (GetAllocator()) HCheckCast(
+ new_inst, cls2, TypeCheckKind::kExactCheck, null_klass_, 0, GetAllocator(), nullptr, nullptr);
+ HInstruction* goto_left = new (GetAllocator()) HGoto();
+ left->AddInstruction(cls2);
+ left->AddInstruction(check_cast);
+ left->AddInstruction(goto_left);
+
+ HInstruction* write_right = MakeIFieldSet(new_inst, c20, MemberOffset(32));
+ HInstruction* goto_right = new (GetAllocator()) HGoto();
+ right->AddInstruction(write_right);
+ right->AddInstruction(goto_right);
+
+ HInstruction* read_final = MakeIFieldGet(new_inst, DataType::Type::kInt32, MemberOffset(32));
+ exit->AddInstruction(read_final);
+
+ ScopedArenaAllocator allocator(graph_->GetArenaStack());
+ LoadStoreAnalysis lsa(graph_, nullptr, &allocator, LoadStoreAnalysisType::kFull);
+ lsa.Run();
+
+ const HeapLocationCollector& heap_location_collector = lsa.GetHeapLocationCollector();
+ ReferenceInfo* info = heap_location_collector.FindReferenceInfoOf(new_inst);
+ const ExecutionSubgraph* esg = info->GetNoEscapeSubgraph();
+
+ ASSERT_TRUE(esg->IsValid());
+ ASSERT_TRUE(IsValidSubgraph(esg));
+ ASSERT_TRUE(AreExclusionsIndependent(graph_, esg));
+ std::unordered_set<const HBasicBlock*> contents(esg->ReachableBlocks().begin(),
+ esg->ReachableBlocks().end());
+
+ ASSERT_EQ(contents.size(), 3u);
+ ASSERT_TRUE(contents.find(blks.Get("left")) == contents.end());
+
+ ASSERT_TRUE(contents.find(blks.Get("right")) != contents.end());
+ ASSERT_TRUE(contents.find(blks.Get("entry")) != contents.end());
+ ASSERT_TRUE(contents.find(blks.Get("exit")) != contents.end());
+}
+
+// For simplicity Partial LSE considers instance-ofs with bitvectors to escape.
+// // ENTRY
+// obj = new Obj();
+// obj.field = 10;
+// if (parameter_value) {
+// // LEFT
+// obj instanceof /*bitvector*/ Foo;
+// } else {
+// // RIGHT
+// obj.field = 20;
+// }
+// // EXIT
+// obj.field;
+TEST_F(LoadStoreAnalysisTest, PartialEscape5) {
+ VariableSizedHandleScope vshs(Thread::Current());
+ CreateGraph(&vshs);
+ AdjacencyListGraph blks(SetupFromAdjacencyList(
+ "entry",
+ "exit",
+ { { "entry", "left" }, { "entry", "right" }, { "left", "exit" }, { "right", "exit" } }));
+ HBasicBlock* entry = blks.Get("entry");
+ HBasicBlock* left = blks.Get("left");
+ HBasicBlock* right = blks.Get("right");
+ HBasicBlock* exit = blks.Get("exit");
+
+ HInstruction* bool_value = new (GetAllocator())
+ HParameterValue(graph_->GetDexFile(), dex::TypeIndex(1), 1, DataType::Type::kBool);
+ HInstruction* c10 = graph_->GetIntConstant(10);
+ HInstruction* c20 = graph_->GetIntConstant(20);
+ HIntConstant* bs1 = graph_->GetIntConstant(0xffff);
+ HIntConstant* bs2 = graph_->GetIntConstant(0x00ff);
+ HInstruction* cls = MakeClassLoad();
+ HInstruction* null_const = graph_->GetNullConstant();
+ HInstruction* new_inst = MakeNewInstance(cls);
+
+ HInstruction* write_entry = MakeIFieldSet(new_inst, c10, MemberOffset(32));
+ HInstruction* if_inst = new (GetAllocator()) HIf(bool_value);
+ entry->AddInstruction(bool_value);
+ entry->AddInstruction(cls);
+ entry->AddInstruction(new_inst);
+ entry->AddInstruction(write_entry);
+ entry->AddInstruction(if_inst);
+
+ ScopedNullHandle<mirror::Class> null_klass_;
+ HInstruction* instanceof = new (GetAllocator()) HInstanceOf(new_inst,
+ null_const,
+ TypeCheckKind::kBitstringCheck,
+ null_klass_,
+ 0,
+ GetAllocator(),
+ bs1,
+ bs2);
+ HInstruction* goto_left = new (GetAllocator()) HGoto();
+ left->AddInstruction(instanceof);
+ left->AddInstruction(goto_left);
+
+ HInstruction* write_right = MakeIFieldSet(new_inst, c20, MemberOffset(32));
+ HInstruction* goto_right = new (GetAllocator()) HGoto();
+ right->AddInstruction(write_right);
+ right->AddInstruction(goto_right);
+
+ HInstruction* read_final = MakeIFieldGet(new_inst, DataType::Type::kInt32, MemberOffset(32));
+ exit->AddInstruction(read_final);
+
+ ScopedArenaAllocator allocator(graph_->GetArenaStack());
+ LoadStoreAnalysis lsa(graph_, nullptr, &allocator, LoadStoreAnalysisType::kFull);
+ lsa.Run();
+
+ const HeapLocationCollector& heap_location_collector = lsa.GetHeapLocationCollector();
+ ReferenceInfo* info = heap_location_collector.FindReferenceInfoOf(new_inst);
+ const ExecutionSubgraph* esg = info->GetNoEscapeSubgraph();
+
+ ASSERT_TRUE(esg->IsValid());
+ ASSERT_TRUE(IsValidSubgraph(esg));
+ ASSERT_TRUE(AreExclusionsIndependent(graph_, esg));
+ std::unordered_set<const HBasicBlock*> contents(esg->ReachableBlocks().begin(),
+ esg->ReachableBlocks().end());
+
+ ASSERT_EQ(contents.size(), 3u);
+ ASSERT_TRUE(contents.find(blks.Get("left")) == contents.end());
+
+ ASSERT_TRUE(contents.find(blks.Get("right")) != contents.end());
+ ASSERT_TRUE(contents.find(blks.Get("entry")) != contents.end());
+ ASSERT_TRUE(contents.find(blks.Get("exit")) != contents.end());
+}
+
// before we had predicated-set we needed to be able to remove the store as
// well. This test makes sure that still works.
// // ENTRY
@@ -1177,6 +1352,7 @@
// // call_func prevents the elimination of this store.
// obj.f2 = 0;
TEST_F(LoadStoreAnalysisTest, TotalEscapeAdjacentNoPredicated) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -1288,6 +1464,7 @@
// // call_func prevents the elimination of this store.
// obj.f2 = 0;
TEST_F(LoadStoreAnalysisTest, TotalEscapeAdjacent) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -1397,6 +1574,7 @@
// // EXIT
// obj.f0;
TEST_F(LoadStoreAnalysisTest, TotalEscape) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList(
"entry",
"exit",
@@ -1509,6 +1687,7 @@
// // EXIT
// return obj;
TEST_F(LoadStoreAnalysisTest, TotalEscape2) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList("entry", "exit", { { "entry", "exit" } }));
HBasicBlock* entry = blks.Get("entry");
HBasicBlock* exit = blks.Get("exit");
@@ -1587,6 +1766,7 @@
// // EXIT
// obj.f0
TEST_F(LoadStoreAnalysisTest, DoubleDiamondEscape) {
+ CreateGraph();
AdjacencyListGraph blks(SetupFromAdjacencyList("entry",
"exit",
{ { "entry", "high_left" },
diff --git a/compiler/optimizing/optimizing_unit_test.h b/compiler/optimizing/optimizing_unit_test.h
index 12c1b98..6600ff3 100644
--- a/compiler/optimizing/optimizing_unit_test.h
+++ b/compiler/optimizing/optimizing_unit_test.h
@@ -406,11 +406,12 @@
OptimizingUnitTestHelper::ManuallyBuildEnvFor(ins, ¤t_locals);
}
- HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt) {
+ HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt,
+ std::optional<Handle<mirror::Class>> klass = std::nullopt) {
return new (GetAllocator()) HLoadClass(graph_->GetCurrentMethod(),
ti ? *ti : dex::TypeIndex(class_idx_++),
graph_->GetDexFile(),
- /* klass= */ null_klass_,
+ /* klass= */ klass ? *klass : null_klass_,
/* is_referrers_class= */ false,
/* dex_pc= */ 0,
/* needs_access_check= */ false);