Revert^4 "Partial LSE analysis & store removal"
We incorrectly handled merging unknowns in some situations.
Specifically in cases where we are unable to materialize loop-phis we
could end up with PureUnknowns which could end up hiding stores that
need to be kept.
In an unrelated issue we were incorrectly considering some values as
escapes when live at the point of an invoke. Since
SearchPhiPlaceholdersForKeptStores used a more precise notion of
escapes we could end up removing stores without being able to replace
the values.
This reverts commit 2316b3a0779f3721a78681f5c70ed6624ecaebef.
This unreverts commit b6837f0350ff66c13582b0e94178dd5ca283ff0a
This reverts commit fe270426c8a2a69a8f669339e83b86fbf40e25a1.
This unreverts commit bb6cda60e4418c0ab557ea4090e046bed8206763.
Bug: 67037140
Bug: 173120044
Reason for revert: Fixed issue causing incorrect store elimination
Test: ./test.py --host
Test: Boot cuttlefish
atest FrameworksServicesTests:com.android.server.job.BackgroundRestrictionsTest#testPowerWhiteList
Change-Id: I2ebae9ccfaf5169d551c5019b547589d0fce1dc9
diff --git a/compiler/optimizing/load_store_analysis.cc b/compiler/optimizing/load_store_analysis.cc
index 7a67fc5..3daa647 100644
--- a/compiler/optimizing/load_store_analysis.cc
+++ b/compiler/optimizing/load_store_analysis.cc
@@ -88,6 +88,94 @@
return CanIntegerRangesOverlap(l1, h1, l2, h2);
}
+// Make sure we mark any writes/potential writes to heap-locations within partially
+// escaped values as escaping.
+void ReferenceInfo::PrunePartialEscapeWrites() {
+ if (!subgraph_.IsValid()) {
+ // All paths escape.
+ return;
+ }
+ HGraph* graph = reference_->GetBlock()->GetGraph();
+ ArenaBitVector additional_exclusions(
+ allocator_, graph->GetBlocks().size(), false, kArenaAllocLSA);
+ for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
+ const HInstruction* user = use.GetUser();
+ const bool possible_exclusion =
+ !additional_exclusions.IsBitSet(user->GetBlock()->GetBlockId()) &&
+ subgraph_.ContainsBlock(user->GetBlock());
+ const bool is_written_to =
+ (user->IsUnresolvedInstanceFieldSet() || user->IsUnresolvedStaticFieldSet() ||
+ user->IsInstanceFieldSet() || user->IsStaticFieldSet() || user->IsArraySet()) &&
+ (reference_ == user->InputAt(0));
+ if (possible_exclusion && is_written_to &&
+ std::any_of(subgraph_.UnreachableBlocks().begin(),
+ subgraph_.UnreachableBlocks().end(),
+ [&](const HBasicBlock* excluded) -> bool {
+ return reference_->GetBlock()->GetGraph()->PathBetween(excluded,
+ user->GetBlock());
+ })) {
+ // This object had memory written to it somewhere, if it escaped along
+ // some paths prior to the current block this write also counts as an
+ // escape.
+ additional_exclusions.SetBit(user->GetBlock()->GetBlockId());
+ }
+ }
+ if (UNLIKELY(additional_exclusions.IsAnyBitSet())) {
+ for (uint32_t exc : additional_exclusions.Indexes()) {
+ subgraph_.RemoveBlock(graph->GetBlocks()[exc]);
+ }
+ }
+}
+
+bool HeapLocationCollector::InstructionEligibleForLSERemoval(HInstruction* inst) const {
+ if (inst->IsNewInstance()) {
+ return !inst->AsNewInstance()->NeedsChecks();
+ } else if (inst->IsNewArray()) {
+ HInstruction* array_length = inst->AsNewArray()->GetLength();
+ bool known_array_length =
+ array_length->IsIntConstant() && array_length->AsIntConstant()->GetValue() >= 0;
+ return known_array_length &&
+ std::all_of(inst->GetUses().cbegin(),
+ inst->GetUses().cend(),
+ [&](const HUseListNode<HInstruction*>& user) {
+ if (user.GetUser()->IsArrayGet() || user.GetUser()->IsArraySet()) {
+ return user.GetUser()->InputAt(1)->IsIntConstant();
+ }
+ return true;
+ });
+ } else {
+ return false;
+ }
+}
+
+void HeapLocationCollector::DumpReferenceStats(OptimizingCompilerStats* stats) {
+ if (stats == nullptr) {
+ return;
+ }
+ std::vector<bool> seen_instructions(GetGraph()->GetCurrentInstructionId(), false);
+ for (auto hl : heap_locations_) {
+ auto ri = hl->GetReferenceInfo();
+ if (ri == nullptr || seen_instructions[ri->GetReference()->GetId()]) {
+ continue;
+ }
+ auto instruction = ri->GetReference();
+ seen_instructions[instruction->GetId()] = true;
+ if (ri->IsSingletonAndRemovable()) {
+ if (InstructionEligibleForLSERemoval(instruction)) {
+ MaybeRecordStat(stats, MethodCompilationStat::kFullLSEPossible);
+ }
+ }
+ // TODO This is an estimate of the number of allocations we will be able
+ // to (partially) remove. As additional work is done this can be refined.
+ if (ri->IsPartialSingleton() && instruction->IsNewInstance() &&
+ ri->GetNoEscapeSubgraph()->ContainsBlock(instruction->GetBlock()) &&
+ !ri->GetNoEscapeSubgraph()->GetExcludedCohorts().empty() &&
+ InstructionEligibleForLSERemoval(instruction)) {
+ MaybeRecordStat(stats, MethodCompilationStat::kPartialLSEPossible);
+ }
+ }
+}
+
bool HeapLocationCollector::CanArrayElementsAlias(const HInstruction* idx1,
const size_t vector_length1,
const HInstruction* idx2,
@@ -172,6 +260,7 @@
}
heap_location_collector_.BuildAliasingMatrix();
+ heap_location_collector_.DumpReferenceStats(stats_);
return true;
}