summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2022-04-22 16:51:59 +0100
committer Santiago Aboy Solanes <solanes@google.com> 2022-05-04 10:29:31 +0000
commitc961939b7ca03e54d50edc369dc09af921245a9e (patch)
treebad75934af744e45c2111f28966c923d37236649 /compiler
parent42f1af867210b5e747484141d5d41696381a1486 (diff)
LSE fix removing observable stores in try catch
Follow-up to aosp/2072271, focusing on try catches. Test: art/test/testrunner/testrunner.py --host --64 --optimizing -b Bug: 229706824, 227283233 Change-Id: Ic935a6d9f1b135b2ec9d28ca6b64df2001c0aa19 (cherry picked from commit 614638b9c178552953feda9ed366bd877002f1c9) Merged-In: Ic935a6d9f1b135b2ec9d28ca6b64df2001c0aa19
Diffstat (limited to 'compiler')
-rw-r--r--compiler/optimizing/load_store_elimination.cc31
1 files changed, 22 insertions, 9 deletions
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index e3d84815f9..9b8f07e969 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -1040,6 +1040,8 @@ class LSEVisitor final : private HGraphDelegateVisitor {
}
void VisitDeoptimize(HDeoptimize* instruction) override {
+ // If we are in a try catch, even singletons are observable.
+ const bool in_try_catch = instruction->GetBlock()->GetTryCatchInformation() != nullptr;
HBasicBlock* block = instruction->GetBlock();
ScopedArenaVector<ValueRecord>& heap_values = heap_values_for_[block->GetBlockId()];
for (size_t i = 0u, size = heap_values.size(); i != size; ++i) {
@@ -1051,7 +1053,7 @@ class LSEVisitor final : private HGraphDelegateVisitor {
// for singletons that don't escape in the deoptimization environment.
bool observable = true;
ReferenceInfo* info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
- if (info->IsSingleton()) {
+ if (!in_try_catch && info->IsSingleton()) {
HInstruction* reference = info->GetReference();
// Finalizable objects always escape.
const bool finalizable_object =
@@ -1076,11 +1078,11 @@ class LSEVisitor final : private HGraphDelegateVisitor {
}
// Keep necessary stores before exiting a method via return/throw.
- void HandleExit(HBasicBlock* block) {
+ void HandleExit(HBasicBlock* block, bool must_keep_stores = false) {
ScopedArenaVector<ValueRecord>& heap_values = heap_values_for_[block->GetBlockId()];
for (size_t i = 0u, size = heap_values.size(); i != size; ++i) {
ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
- if (IsEscapingObject(ref_info, block, i)) {
+ if (must_keep_stores || IsEscapingObject(ref_info, block, i)) {
KeepStores(heap_values[i].stored_by);
heap_values[i].stored_by = Value::PureUnknown();
}
@@ -1097,7 +1099,10 @@ class LSEVisitor final : private HGraphDelegateVisitor {
void HandleThrowingInstruction(HInstruction* instruction) {
DCHECK(instruction->CanThrow());
- HandleExit(instruction->GetBlock());
+ // If we are inside of a try catch, singletons can become visible since we may not exit the
+ // method.
+ HandleExit(instruction->GetBlock(),
+ instruction->GetBlock()->GetTryCatchInformation() != nullptr);
}
void VisitMethodEntryHook(HMethodEntryHook* method_entry) override {
@@ -1153,6 +1158,9 @@ class LSEVisitor final : private HGraphDelegateVisitor {
void HandleInvoke(HInstruction* instruction) {
// If `instruction` can throw we have to presume all stores are visible.
const bool can_throw = instruction->CanThrow();
+ // If we are in a try catch, even singletons are observable.
+ const bool can_throw_in_try_catch =
+ can_throw && instruction->GetBlock()->GetTryCatchInformation() != nullptr;
SideEffects side_effects = instruction->GetSideEffects();
ScopedArenaVector<ValueRecord>& heap_values =
heap_values_for_[instruction->GetBlock()->GetBlockId()];
@@ -1178,9 +1186,10 @@ class LSEVisitor final : private HGraphDelegateVisitor {
return cohort.PrecedesBlock(blk);
});
};
- if (ref_info->IsSingleton() ||
- // partial and we aren't currently escaping and we haven't escaped yet.
- (ref_info->IsPartialSingleton() && partial_singleton_did_not_escape(ref_info, blk))) {
+ if (!can_throw_in_try_catch &&
+ (ref_info->IsSingleton() ||
+ // partial and we aren't currently escaping and we haven't escaped yet.
+ (ref_info->IsPartialSingleton() && partial_singleton_did_not_escape(ref_info, blk)))) {
// Singleton references cannot be seen by the callee.
} else {
if (can_throw || side_effects.DoesAnyRead() || side_effects.DoesAnyWrite()) {
@@ -1226,6 +1235,8 @@ class LSEVisitor final : private HGraphDelegateVisitor {
}
void VisitNewInstance(HNewInstance* new_instance) override {
+ // If we are in a try catch, even singletons are observable.
+ const bool in_try_catch = new_instance->GetBlock()->GetTryCatchInformation() != nullptr;
ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
if (ref_info == nullptr) {
// new_instance isn't used for field accesses. No need to process it.
@@ -1254,7 +1265,7 @@ class LSEVisitor final : private HGraphDelegateVisitor {
heap_values[i].value = Value::ForInstruction(new_instance->GetLoadClass());
heap_values[i].stored_by = Value::PureUnknown();
}
- } else if (IsEscapingObject(info, block, i)) {
+ } else if (in_try_catch || IsEscapingObject(info, block, i)) {
// Since NewInstance can throw, we presume all previous stores could be visible.
KeepStores(heap_values[i].stored_by);
heap_values[i].stored_by = Value::PureUnknown();
@@ -1263,6 +1274,8 @@ class LSEVisitor final : private HGraphDelegateVisitor {
}
void VisitNewArray(HNewArray* new_array) override {
+ // If we are in a try catch, even singletons are observable.
+ const bool in_try_catch = new_array->GetBlock()->GetTryCatchInformation() != nullptr;
ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_array);
if (ref_info == nullptr) {
// new_array isn't used for array accesses. No need to process it.
@@ -1287,7 +1300,7 @@ class LSEVisitor final : private HGraphDelegateVisitor {
// Array elements are set to default heap values.
heap_values[i].value = Value::Default();
heap_values[i].stored_by = Value::PureUnknown();
- } else if (IsEscapingObject(info, block, i)) {
+ } else if (in_try_catch || IsEscapingObject(info, block, i)) {
// Since NewArray can throw, we presume all previous stores could be visible.
KeepStores(heap_values[i].stored_by);
heap_values[i].stored_by = Value::PureUnknown();