Don't merge values for exit block in LSE.

This enables some additional optimizations since exit block doesn't
really merge values.

Test: run-test on host.
Change-Id: I21ed7e0e43a3bc5d9ed2dabfad8462129b904eb7
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 605fdae..89ad85e 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -199,6 +199,12 @@
     if (predecessors.size() == 0) {
       return;
     }
+    if (block->IsExitBlock()) {
+      // Exit block doesn't really merge values since the control flow ends in
+      // its predecessors. Each predecessor needs to make sure stores are kept
+      // if necessary.
+      return;
+    }
 
     ScopedArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
     for (size_t i = 0; i < heap_values.size(); i++) {
@@ -233,15 +239,23 @@
         }
       }
 
-      if (merged_value == kUnknownHeapValue || ref_info->IsSingletonAndNonRemovable()) {
-        // There are conflicting heap values from different predecessors,
-        // or the heap value may be needed after method return or deoptimization.
-        // Keep the last store in each predecessor since future loads cannot be eliminated.
-        for (HBasicBlock* predecessor : predecessors) {
-          ScopedArenaVector<HInstruction*>& pred_values =
-              heap_values_for_[predecessor->GetBlockId()];
-          KeepIfIsStore(pred_values[i]);
+      if (ref_info->IsSingleton()) {
+        if (ref_info->IsSingletonAndNonRemovable() ||
+            (merged_value == kUnknownHeapValue &&
+             !block->IsSingleReturnOrReturnVoidAllowingPhis())) {
+          // The heap value may be needed after method return or deoptimization,
+          // or there are conflicting heap values from different predecessors and
+          // this block is not a single return,
+          // keep the last store in each predecessor since future loads may not
+          // be eliminated.
+          for (HBasicBlock* predecessor : predecessors) {
+            ScopedArenaVector<HInstruction*>& pred_values =
+                heap_values_for_[predecessor->GetBlockId()];
+            KeepIfIsStore(pred_values[i]);
+          }
         }
+      } else {
+        // Currenctly we don't eliminate stores to non-singletons.
       }
 
       if ((merged_value == nullptr) || !from_all_predecessors) {
@@ -549,6 +563,31 @@
     }
   }
 
+  // Keep necessary stores before exiting a method via return/throw.
+  void HandleExit(HBasicBlock* block) {
+    const ScopedArenaVector<HInstruction*>& heap_values =
+        heap_values_for_[block->GetBlockId()];
+    for (size_t i = 0; i < heap_values.size(); i++) {
+      HInstruction* heap_value = heap_values[i];
+      ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
+      if (!ref_info->IsSingletonAndRemovable()) {
+        KeepIfIsStore(heap_value);
+      }
+    }
+  }
+
+  void VisitReturn(HReturn* instruction) OVERRIDE {
+    HandleExit(instruction->GetBlock());
+  }
+
+  void VisitReturnVoid(HReturnVoid* return_void) OVERRIDE {
+    HandleExit(return_void->GetBlock());
+  }
+
+  void VisitThrow(HThrow* throw_instruction) OVERRIDE {
+    HandleExit(throw_instruction->GetBlock());
+  }
+
   void HandleInvoke(HInstruction* instruction) {
     SideEffects side_effects = instruction->GetSideEffects();
     ScopedArenaVector<HInstruction*>& heap_values =