Improved LSE: Replacing loads with Phis.

Create "Phi placeholders" for tracking heap values that can
merge from different values and try to match existing Phis
or create new Phis to replace loads. For Phi placeholders
from loop headers we do not know whether they are fed by
unknown values through back-edges when processing the loop
header, so we delay processing loads that depend on them
until we walked the entire graph. We then try to match them
with existing instructions (when the location is unchanged
in the loop) or Phis or create new Phis if needed. If we
find a loop Phi placeholder fed with unknown value from a
back-edge, we mark the Phi placeholder unreplaceable and
reprocess loads and stores to propagate the unknown value.
This can sometimes allow other loads to be replaced. At the
end we re-calculate the heap values to find stores that can
be eliminated because they write over the same value.

Golem results:
  art-opt-cc           arm  arm64    x86 x86-64
  CaffeineFloat      +6.7%  +3.0%  +5.9%  +3.8%
  KotlinMicroWhen   +33.7%  +4.8%  +1.8%  +0.6%
  art-opt (more noisy than art-opt-cc)
  CaffeineFloat      +4.1%  +4.4%  +7.8% +10.5%
  KotlinMicroWhen   +33.6%  +2.0%  +1.8%  +1.8%
The MoveLiteralColumn benchmark seems to gain significantly
(up to 22% on art-opt-cc but under 10% on art-opt) but it is
very noisy and the results are therefore unreliable.

Insignificant code size changes for aosp_blueline-userdebug:
  - before:
    arm boot*.oat: 15303468
    arm64 boot*.oat: 18184736
    services.odex: 25195944
    grep -c pAllocObject boot.arm64.oatdump.txt: 27213
    grep -c pAllocArray boot.arm64.oatdump.txt: 3620
  - after:
    arm boot*.oat: 15299524 (-4KiB, -0.03%)
    arm64 boot*.oat: 18176528 (-8KiB, -0.05%)
    services.odex: 25191832 (-4KiB, -0.02%)
    grep -c pAllocObject boot.arm64.oatdump.txt: 27206 (-7)
    grep -c pAllocArray boot.arm64.oatdump.txt: 3615 (-5)

Test: New tests in 530-checker-lse.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: blueline-userdebug boots.
Bug: 77906240
Change-Id: Ia9fe0cd3530f9d3941650dfefc00a7f7fd821994
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index 15e7045..882fe28 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -107,16 +107,10 @@
         index_(index),
         vector_length_(vector_length),
         declaring_class_def_index_(declaring_class_def_index),
-        value_killed_by_loop_side_effects_(true),
         has_aliased_locations_(false) {
     DCHECK(ref_info != nullptr);
     DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
            (offset != kInvalidFieldOffset && index == nullptr));
-    if (ref_info->IsSingleton() && !IsArray()) {
-      // Assume this location's value cannot be killed by loop side effects
-      // until proven otherwise.
-      value_killed_by_loop_side_effects_ = false;
-    }
   }
 
   ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
@@ -135,14 +129,6 @@
     return index_ != nullptr;
   }
 
-  bool IsValueKilledByLoopSideEffects() const {
-    return value_killed_by_loop_side_effects_;
-  }
-
-  void SetValueKilledByLoopSideEffects(bool val) {
-    value_killed_by_loop_side_effects_ = val;
-  }
-
   bool HasAliasedLocations() const {
     return has_aliased_locations_;
   }
@@ -171,12 +157,6 @@
   // Invalid when this HeapLocation is not field access.
   const int16_t declaring_class_def_index_;
 
-  // Value of this location may be killed by loop side effects
-  // because this location is stored into inside a loop.
-  // This gives better info on whether a singleton's location
-  // value may be killed by loop side effects.
-  bool value_killed_by_loop_side_effects_;
-
   // Has aliased heap locations in the method, due to either the
   // reference is aliased or the array element is aliased via different
   // index names.
@@ -451,12 +431,12 @@
     GetOrCreateReferenceInfo(instruction);
   }
 
-  HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
-                                        DataType::Type type,
-                                        size_t offset,
-                                        HInstruction* index,
-                                        size_t vector_length,
-                                        int16_t declaring_class_def_index) {
+  void MaybeCreateHeapLocation(HInstruction* ref,
+                               DataType::Type type,
+                               size_t offset,
+                               HInstruction* index,
+                               size_t vector_length,
+                               int16_t declaring_class_def_index) {
     HInstruction* original_ref = HuntForOriginalReference(ref);
     ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
     size_t heap_location_idx = FindHeapLocationIndex(
@@ -465,31 +445,29 @@
       HeapLocation* heap_loc = new (allocator_)
           HeapLocation(ref_info, type, offset, index, vector_length, declaring_class_def_index);
       heap_locations_.push_back(heap_loc);
-      return heap_loc;
     }
-    return heap_locations_[heap_location_idx];
   }
 
-  HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
+  void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
     if (field_info.IsVolatile()) {
       has_volatile_ = true;
     }
     DataType::Type type = field_info.GetFieldType();
     const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
     const size_t offset = field_info.GetFieldOffset().SizeValue();
-    return GetOrCreateHeapLocation(ref,
-                                   type,
-                                   offset,
-                                   nullptr,
-                                   HeapLocation::kScalar,
-                                   declaring_class_def_index);
+    MaybeCreateHeapLocation(ref,
+                            type,
+                            offset,
+                            nullptr,
+                            HeapLocation::kScalar,
+                            declaring_class_def_index);
   }
 
   void VisitArrayAccess(HInstruction* array,
                         HInstruction* index,
                         DataType::Type type,
                         size_t vector_length) {
-    GetOrCreateHeapLocation(array,
+    MaybeCreateHeapLocation(array,
                             type,
                             HeapLocation::kInvalidFieldOffset,
                             index,
@@ -503,29 +481,8 @@
   }
 
   void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override {
-    HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+    VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
     has_heap_stores_ = true;
-    if (location->GetReferenceInfo()->IsSingleton()) {
-      // A singleton's location value may be killed by loop side effects if it's
-      // defined before that loop, and it's stored into inside that loop.
-      HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
-      if (loop_info != nullptr) {
-        HInstruction* ref = location->GetReferenceInfo()->GetReference();
-        DCHECK(ref->IsNewInstance());
-        if (loop_info->IsDefinedOutOfTheLoop(ref)) {
-          // ref's location value may be killed by this loop's side effects.
-          location->SetValueKilledByLoopSideEffects(true);
-        } else {
-          // ref is defined inside this loop so this loop's side effects cannot
-          // kill its location value at the loop header since ref/its location doesn't
-          // exist yet at the loop header.
-        }
-      }
-    } else {
-      // For non-singletons, value_killed_by_loop_side_effects_ is inited to
-      // true.
-      DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
-    }
   }
 
   void VisitStaticFieldGet(HStaticFieldGet* instruction) override {