summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/Android.bp2
-rw-r--r--compiler/optimizing/load_store_analysis.cc51
-rw-r--r--compiler/optimizing/load_store_analysis.h518
-rw-r--r--compiler/optimizing/load_store_analysis_test.cc187
-rw-r--r--compiler/optimizing/load_store_elimination.cc499
-rw-r--r--compiler/optimizing/load_store_elimination.h9
-rw-r--r--compiler/optimizing/optimizing_compiler.cc19
7 files changed, 788 insertions, 497 deletions
diff --git a/compiler/Android.bp b/compiler/Android.bp
index df896dc73c..307a42cbba 100644
--- a/compiler/Android.bp
+++ b/compiler/Android.bp
@@ -67,6 +67,7 @@ art_cc_defaults {
"optimizing/intrinsics.cc",
"optimizing/licm.cc",
"optimizing/linear_order.cc",
+ "optimizing/load_store_analysis.cc",
"optimizing/load_store_elimination.cc",
"optimizing/locations.cc",
"optimizing/loop_optimization.cc",
@@ -374,6 +375,7 @@ art_cc_test {
"jni/jni_cfi_test.cc",
"optimizing/codegen_test.cc",
+ "optimizing/load_store_analysis_test.cc",
"optimizing/optimizing_cfi_test.cc",
"optimizing/scheduler_test.cc",
],
diff --git a/compiler/optimizing/load_store_analysis.cc b/compiler/optimizing/load_store_analysis.cc
new file mode 100644
index 0000000000..f2ee345c8c
--- /dev/null
+++ b/compiler/optimizing/load_store_analysis.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "load_store_analysis.h"
+
+namespace art {
+
+// A cap for the number of heap locations to prevent pathological time/space consumption.
+// The number of heap locations for most of the methods stays below this threshold.
+constexpr size_t kMaxNumberOfHeapLocations = 32;
+
+void LoadStoreAnalysis::Run() {
+ for (HBasicBlock* block : graph_->GetReversePostOrder()) {
+ heap_location_collector_.VisitBasicBlock(block);
+ }
+
+ if (heap_location_collector_.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
+ // Bail out if there are too many heap locations to deal with.
+ heap_location_collector_.CleanUp();
+ return;
+ }
+ if (!heap_location_collector_.HasHeapStores()) {
+ // Without heap stores, this pass would act mostly as GVN on heap accesses.
+ heap_location_collector_.CleanUp();
+ return;
+ }
+ if (heap_location_collector_.HasVolatile() || heap_location_collector_.HasMonitorOps()) {
+ // Don't do load/store elimination if the method has volatile field accesses or
+ // monitor operations, for now.
+ // TODO: do it right.
+ heap_location_collector_.CleanUp();
+ return;
+ }
+
+ heap_location_collector_.BuildAliasingMatrix();
+}
+
+} // namespace art
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
new file mode 100644
index 0000000000..4e940f30bf
--- /dev/null
+++ b/compiler/optimizing/load_store_analysis.h
@@ -0,0 +1,518 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
+#define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
+
+#include "escape.h"
+#include "nodes.h"
+#include "optimization.h"
+
+namespace art {
+
+// A ReferenceInfo contains additional info about a reference such as
+// whether it's a singleton, returned, etc.
+class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
+ public:
+ ReferenceInfo(HInstruction* reference, size_t pos)
+ : reference_(reference),
+ position_(pos),
+ is_singleton_(true),
+ is_singleton_and_not_returned_(true),
+ is_singleton_and_not_deopt_visible_(true),
+ has_index_aliasing_(false) {
+ CalculateEscape(reference_,
+ nullptr,
+ &is_singleton_,
+ &is_singleton_and_not_returned_,
+ &is_singleton_and_not_deopt_visible_);
+ }
+
+ HInstruction* GetReference() const {
+ return reference_;
+ }
+
+ size_t GetPosition() const {
+ return position_;
+ }
+
+ // Returns true if reference_ is the only name that can refer to its value during
+ // the lifetime of the method. So it's guaranteed to not have any alias in
+ // the method (including its callees).
+ bool IsSingleton() const {
+ return is_singleton_;
+ }
+
+ // Returns true if reference_ is a singleton and not returned to the caller or
+ // used as an environment local of an HDeoptimize instruction.
+ // The allocation and stores into reference_ may be eliminated for such cases.
+ bool IsSingletonAndRemovable() const {
+ return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
+ }
+
+ // Returns true if reference_ is a singleton and returned to the caller or
+ // used as an environment local of an HDeoptimize instruction.
+ bool IsSingletonAndNonRemovable() const {
+ return is_singleton_ &&
+ (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
+ }
+
+ bool HasIndexAliasing() {
+ return has_index_aliasing_;
+ }
+
+ void SetHasIndexAliasing(bool has_index_aliasing) {
+ // Only allow setting to true.
+ DCHECK(has_index_aliasing);
+ has_index_aliasing_ = has_index_aliasing;
+ }
+
+ private:
+ HInstruction* const reference_;
+ const size_t position_; // position in HeapLocationCollector's ref_info_array_.
+
+ // Can only be referred to by a single name in the method.
+ bool is_singleton_;
+ // Is singleton and not returned to caller.
+ bool is_singleton_and_not_returned_;
+ // Is singleton and not used as an environment local of HDeoptimize.
+ bool is_singleton_and_not_deopt_visible_;
+ // Some heap locations with reference_ have array index aliasing,
+ // e.g. arr[i] and arr[j] may be the same location.
+ bool has_index_aliasing_;
+
+ DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
+};
+
+// A heap location is a reference-offset/index pair that a value can be loaded from
+// or stored to.
+class HeapLocation : public ArenaObject<kArenaAllocMisc> {
+ public:
+ static constexpr size_t kInvalidFieldOffset = -1;
+
+ // TODO: more fine-grained array types.
+ static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
+
+ HeapLocation(ReferenceInfo* ref_info,
+ size_t offset,
+ HInstruction* index,
+ int16_t declaring_class_def_index)
+ : ref_info_(ref_info),
+ offset_(offset),
+ index_(index),
+ declaring_class_def_index_(declaring_class_def_index),
+ value_killed_by_loop_side_effects_(true) {
+ DCHECK(ref_info != nullptr);
+ DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
+ (offset != kInvalidFieldOffset && index == nullptr));
+ if (ref_info->IsSingleton() && !IsArrayElement()) {
+ // Assume this location's value cannot be killed by loop side effects
+ // until proven otherwise.
+ value_killed_by_loop_side_effects_ = false;
+ }
+ }
+
+ ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
+ size_t GetOffset() const { return offset_; }
+ HInstruction* GetIndex() const { return index_; }
+
+ // Returns the definition of declaring class' dex index.
+ // It's kDeclaringClassDefIndexForArrays for an array element.
+ int16_t GetDeclaringClassDefIndex() const {
+ return declaring_class_def_index_;
+ }
+
+ bool IsArrayElement() const {
+ return index_ != nullptr;
+ }
+
+ bool IsValueKilledByLoopSideEffects() const {
+ return value_killed_by_loop_side_effects_;
+ }
+
+ void SetValueKilledByLoopSideEffects(bool val) {
+ value_killed_by_loop_side_effects_ = val;
+ }
+
+ private:
+ ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
+ const size_t offset_; // offset of static/instance field.
+ HInstruction* const index_; // index of an array element.
+ const int16_t declaring_class_def_index_; // declaring class's def's dex index.
+ bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
+ // side effects because this location is stored
+ // into inside a loop. This gives
+ // better info on whether a singleton's location
+ // value may be killed by loop side effects.
+
+ DISALLOW_COPY_AND_ASSIGN(HeapLocation);
+};
+
+// A HeapLocationCollector collects all relevant heap locations and keeps
+// an aliasing matrix for all locations.
+class HeapLocationCollector : public HGraphVisitor {
+ public:
+ static constexpr size_t kHeapLocationNotFound = -1;
+ // Start with a single uint32_t word. That's enough bits for pair-wise
+ // aliasing matrix of 8 heap locations.
+ static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
+
+ explicit HeapLocationCollector(HGraph* graph)
+ : HGraphVisitor(graph),
+ ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
+ heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
+ aliasing_matrix_(graph->GetArena(),
+ kInitialAliasingMatrixBitVectorSize,
+ true,
+ kArenaAllocLSE),
+ has_heap_stores_(false),
+ has_volatile_(false),
+ has_monitor_operations_(false) {}
+
+ void CleanUp() {
+ heap_locations_.clear();
+ ref_info_array_.clear();
+ }
+
+ size_t GetNumberOfHeapLocations() const {
+ return heap_locations_.size();
+ }
+
+ HeapLocation* GetHeapLocation(size_t index) const {
+ return heap_locations_[index];
+ }
+
+ HInstruction* HuntForOriginalReference(HInstruction* ref) const {
+ DCHECK(ref != nullptr);
+ while (ref->IsNullCheck() || ref->IsBoundType()) {
+ ref = ref->InputAt(0);
+ }
+ return ref;
+ }
+
+ ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
+ for (size_t i = 0; i < ref_info_array_.size(); i++) {
+ ReferenceInfo* ref_info = ref_info_array_[i];
+ if (ref_info->GetReference() == ref) {
+ DCHECK_EQ(i, ref_info->GetPosition());
+ return ref_info;
+ }
+ }
+ return nullptr;
+ }
+
+ bool HasHeapStores() const {
+ return has_heap_stores_;
+ }
+
+ bool HasVolatile() const {
+ return has_volatile_;
+ }
+
+ bool HasMonitorOps() const {
+ return has_monitor_operations_;
+ }
+
+ // Find and return the heap location index in heap_locations_.
+ size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
+ size_t offset,
+ HInstruction* index,
+ int16_t declaring_class_def_index) const {
+ for (size_t i = 0; i < heap_locations_.size(); i++) {
+ HeapLocation* loc = heap_locations_[i];
+ if (loc->GetReferenceInfo() == ref_info &&
+ loc->GetOffset() == offset &&
+ loc->GetIndex() == index &&
+ loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
+ return i;
+ }
+ }
+ return kHeapLocationNotFound;
+ }
+
+ // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
+ bool MayAlias(size_t index1, size_t index2) const {
+ if (index1 < index2) {
+ return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
+ } else if (index1 > index2) {
+ return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
+ } else {
+ DCHECK(false) << "index1 and index2 are expected to be different";
+ return true;
+ }
+ }
+
+ void BuildAliasingMatrix() {
+ const size_t number_of_locations = heap_locations_.size();
+ if (number_of_locations == 0) {
+ return;
+ }
+ size_t pos = 0;
+ // Compute aliasing info between every pair of different heap locations.
+ // Save the result in a matrix represented as a BitVector.
+ for (size_t i = 0; i < number_of_locations - 1; i++) {
+ for (size_t j = i + 1; j < number_of_locations; j++) {
+ if (ComputeMayAlias(i, j)) {
+ aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
+ }
+ pos++;
+ }
+ }
+ }
+
+ private:
+ // An allocation cannot alias with a name which already exists at the point
+ // of the allocation, such as a parameter or a load happening before the allocation.
+ bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
+ if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
+ // Any reference that can alias with the allocation must appear after it in the block/in
+ // the block's successors. In reverse post order, those instructions will be visited after
+ // the allocation.
+ return ref_info2->GetPosition() >= ref_info1->GetPosition();
+ }
+ return true;
+ }
+
+ bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
+ if (ref_info1 == ref_info2) {
+ return true;
+ } else if (ref_info1->IsSingleton()) {
+ return false;
+ } else if (ref_info2->IsSingleton()) {
+ return false;
+ } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
+ !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
+ return false;
+ }
+ return true;
+ }
+
+ // `index1` and `index2` are indices in the array of collected heap locations.
+ // Returns the position in the bit vector that tracks whether the two heap
+ // locations may alias.
+ size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
+ DCHECK(index2 > index1);
+ const size_t number_of_locations = heap_locations_.size();
+ // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
+ return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
+ }
+
+ // An additional position is passed in to make sure the calculated position is correct.
+ size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
+ size_t calculated_position = AliasingMatrixPosition(index1, index2);
+ DCHECK_EQ(calculated_position, position);
+ return calculated_position;
+ }
+
+ // Compute if two locations may alias to each other.
+ bool ComputeMayAlias(size_t index1, size_t index2) const {
+ HeapLocation* loc1 = heap_locations_[index1];
+ HeapLocation* loc2 = heap_locations_[index2];
+ if (loc1->GetOffset() != loc2->GetOffset()) {
+ // Either two different instance fields, or one is an instance
+ // field and the other is an array element.
+ return false;
+ }
+ if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
+ // Different types.
+ return false;
+ }
+ if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
+ return false;
+ }
+ if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
+ HInstruction* array_index1 = loc1->GetIndex();
+ HInstruction* array_index2 = loc2->GetIndex();
+ DCHECK(array_index1 != nullptr);
+ DCHECK(array_index2 != nullptr);
+ if (array_index1->IsIntConstant() &&
+ array_index2->IsIntConstant() &&
+ array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
+ // Different constant indices do not alias.
+ return false;
+ }
+ ReferenceInfo* ref_info = loc1->GetReferenceInfo();
+ ref_info->SetHasIndexAliasing(true);
+ }
+ return true;
+ }
+
+ ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
+ ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
+ if (ref_info == nullptr) {
+ size_t pos = ref_info_array_.size();
+ ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
+ ref_info_array_.push_back(ref_info);
+ }
+ return ref_info;
+ }
+
+ void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
+ if (instruction->GetType() != Primitive::kPrimNot) {
+ return;
+ }
+ DCHECK(FindReferenceInfoOf(instruction) == nullptr);
+ GetOrCreateReferenceInfo(instruction);
+ }
+
+ HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
+ size_t offset,
+ HInstruction* index,
+ int16_t declaring_class_def_index) {
+ HInstruction* original_ref = HuntForOriginalReference(ref);
+ ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
+ size_t heap_location_idx = FindHeapLocationIndex(
+ ref_info, offset, index, declaring_class_def_index);
+ if (heap_location_idx == kHeapLocationNotFound) {
+ HeapLocation* heap_loc = new (GetGraph()->GetArena())
+ HeapLocation(ref_info, offset, index, declaring_class_def_index);
+ heap_locations_.push_back(heap_loc);
+ return heap_loc;
+ }
+ return heap_locations_[heap_location_idx];
+ }
+
+ HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
+ if (field_info.IsVolatile()) {
+ has_volatile_ = true;
+ }
+ const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
+ const size_t offset = field_info.GetFieldOffset().SizeValue();
+ return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
+ }
+
+ void VisitArrayAccess(HInstruction* array, HInstruction* index) {
+ GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
+ index, HeapLocation::kDeclaringClassDefIndexForArrays);
+ }
+
+ void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
+ VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
+ HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+ has_heap_stores_ = true;
+ if (location->GetReferenceInfo()->IsSingleton()) {
+ // A singleton's location value may be killed by loop side effects if it's
+ // defined before that loop, and it's stored into inside that loop.
+ HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
+ if (loop_info != nullptr) {
+ HInstruction* ref = location->GetReferenceInfo()->GetReference();
+ DCHECK(ref->IsNewInstance());
+ if (loop_info->IsDefinedOutOfTheLoop(ref)) {
+ // ref's location value may be killed by this loop's side effects.
+ location->SetValueKilledByLoopSideEffects(true);
+ } else {
+ // ref is defined inside this loop so this loop's side effects cannot
+ // kill its location value at the loop header since ref/its location doesn't
+ // exist yet at the loop header.
+ }
+ }
+ } else {
+ // For non-singletons, value_killed_by_loop_side_effects_ is inited to
+ // true.
+ DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
+ }
+ }
+
+ void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
+ VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
+ VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+ has_heap_stores_ = true;
+ }
+
+ // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
+ // since we cannot accurately track the fields.
+
+ void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
+ VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitArraySet(HArraySet* instruction) OVERRIDE {
+ VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
+ has_heap_stores_ = true;
+ }
+
+ void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
+ // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
+ CreateReferenceInfoForReferenceType(new_instance);
+ }
+
+ void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitSelect(HSelect* instruction) OVERRIDE {
+ CreateReferenceInfoForReferenceType(instruction);
+ }
+
+ void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
+ has_monitor_operations_ = true;
+ }
+
+ ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
+ ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
+ ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
+ bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
+ // alias analysis and won't be as effective.
+ bool has_volatile_; // If there are volatile field accesses.
+ bool has_monitor_operations_; // If there are monitor operations.
+
+ DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
+};
+
+class LoadStoreAnalysis : public HOptimization {
+ public:
+ explicit LoadStoreAnalysis(HGraph* graph)
+ : HOptimization(graph, kLoadStoreAnalysisPassName),
+ heap_location_collector_(graph) {}
+
+ const HeapLocationCollector& GetHeapLocationCollector() const {
+ return heap_location_collector_;
+ }
+
+ void Run() OVERRIDE;
+
+ static constexpr const char* kLoadStoreAnalysisPassName = "load_store_analysis";
+
+ private:
+ HeapLocationCollector heap_location_collector_;
+
+ DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis);
+};
+
+} // namespace art
+
+#endif // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
diff --git a/compiler/optimizing/load_store_analysis_test.cc b/compiler/optimizing/load_store_analysis_test.cc
new file mode 100644
index 0000000000..24187777f6
--- /dev/null
+++ b/compiler/optimizing/load_store_analysis_test.cc
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "load_store_analysis.h"
+#include "nodes.h"
+#include "optimizing_unit_test.h"
+
+#include "gtest/gtest.h"
+
+namespace art {
+
+class LoadStoreAnalysisTest : public CommonCompilerTest {
+ public:
+ LoadStoreAnalysisTest() : pool_(), allocator_(&pool_) {
+ graph_ = CreateGraph(&allocator_);
+ }
+
+ ArenaPool pool_;
+ ArenaAllocator allocator_;
+ HGraph* graph_;
+};
+
+TEST_F(LoadStoreAnalysisTest, ArrayHeapLocations) {
+ HBasicBlock* entry = new (&allocator_) HBasicBlock(graph_);
+ graph_->AddBlock(entry);
+ graph_->SetEntryBlock(entry);
+
+ // entry:
+ // array ParameterValue
+ // index ParameterValue
+ // c1 IntConstant
+ // c2 IntConstant
+ // c3 IntConstant
+ // array_get1 ArrayGet [array, c1]
+ // array_get2 ArrayGet [array, c2]
+ // array_set1 ArraySet [array, c1, c3]
+ // array_set2 ArraySet [array, index, c3]
+ HInstruction* array = new (&allocator_) HParameterValue(
+ graph_->GetDexFile(), dex::TypeIndex(0), 0, Primitive::kPrimNot);
+ HInstruction* index = new (&allocator_) HParameterValue(
+ graph_->GetDexFile(), dex::TypeIndex(1), 1, Primitive::kPrimInt);
+ HInstruction* c1 = graph_->GetIntConstant(1);
+ HInstruction* c2 = graph_->GetIntConstant(2);
+ HInstruction* c3 = graph_->GetIntConstant(3);
+ HInstruction* array_get1 = new (&allocator_) HArrayGet(array, c1, Primitive::kPrimInt, 0);
+ HInstruction* array_get2 = new (&allocator_) HArrayGet(array, c2, Primitive::kPrimInt, 0);
+ HInstruction* array_set1 = new (&allocator_) HArraySet(array, c1, c3, Primitive::kPrimInt, 0);
+ HInstruction* array_set2 = new (&allocator_) HArraySet(array, index, c3, Primitive::kPrimInt, 0);
+ entry->AddInstruction(array);
+ entry->AddInstruction(index);
+ entry->AddInstruction(array_get1);
+ entry->AddInstruction(array_get2);
+ entry->AddInstruction(array_set1);
+ entry->AddInstruction(array_set2);
+
+ // Test HeapLocationCollector initialization.
+ // Should be no heap locations, no operations on the heap.
+ HeapLocationCollector heap_location_collector(graph_);
+ ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 0U);
+ ASSERT_FALSE(heap_location_collector.HasHeapStores());
+
+ // Test that after visiting the graph_, it must see following heap locations
+ // array[c1], array[c2], array[index]; and it should see heap stores.
+ heap_location_collector.VisitBasicBlock(entry);
+ ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 3U);
+ ASSERT_TRUE(heap_location_collector.HasHeapStores());
+
+ // Test queries on HeapLocationCollector's ref info and index records.
+ ReferenceInfo* ref = heap_location_collector.FindReferenceInfoOf(array);
+ size_t field_off = HeapLocation::kInvalidFieldOffset;
+ size_t class_def = HeapLocation::kDeclaringClassDefIndexForArrays;
+ size_t loc1 = heap_location_collector.FindHeapLocationIndex(ref, field_off, c1, class_def);
+ size_t loc2 = heap_location_collector.FindHeapLocationIndex(ref, field_off, c2, class_def);
+ size_t loc3 = heap_location_collector.FindHeapLocationIndex(ref, field_off, index, class_def);
+ // must find this reference info for array in HeapLocationCollector.
+ ASSERT_TRUE(ref != nullptr);
+ // must find these heap locations;
+ // and array[1], array[2], array[3] should be different heap locations.
+ ASSERT_TRUE(loc1 != HeapLocationCollector::kHeapLocationNotFound);
+ ASSERT_TRUE(loc2 != HeapLocationCollector::kHeapLocationNotFound);
+ ASSERT_TRUE(loc3 != HeapLocationCollector::kHeapLocationNotFound);
+ ASSERT_TRUE(loc1 != loc2);
+ ASSERT_TRUE(loc2 != loc3);
+ ASSERT_TRUE(loc1 != loc3);
+
+ // Test alias relationships after building aliasing matrix.
+ // array[1] and array[2] clearly should not alias;
+ // array[index] should alias with the others, because index is an unknow value.
+ heap_location_collector.BuildAliasingMatrix();
+ ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
+ ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc3));
+ ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc3));
+}
+
+TEST_F(LoadStoreAnalysisTest, FieldHeapLocations) {
+ HBasicBlock* entry = new (&allocator_) HBasicBlock(graph_);
+ graph_->AddBlock(entry);
+ graph_->SetEntryBlock(entry);
+
+ // entry:
+ // object ParameterValue
+ // c1 IntConstant
+ // set_field10 InstanceFieldSet [object, c1, 10]
+ // get_field10 InstanceFieldGet [object, 10]
+ // get_field20 InstanceFieldGet [object, 20]
+
+ HInstruction* c1 = graph_->GetIntConstant(1);
+ HInstruction* object = new (&allocator_) HParameterValue(graph_->GetDexFile(),
+ dex::TypeIndex(0),
+ 0,
+ Primitive::kPrimNot);
+ HInstanceFieldSet* set_field10 = new (&allocator_) HInstanceFieldSet(object,
+ c1,
+ nullptr,
+ Primitive::kPrimInt,
+ MemberOffset(10),
+ false,
+ kUnknownFieldIndex,
+ kUnknownClassDefIndex,
+ graph_->GetDexFile(),
+ 0);
+ HInstanceFieldGet* get_field10 = new (&allocator_) HInstanceFieldGet(object,
+ nullptr,
+ Primitive::kPrimInt,
+ MemberOffset(10),
+ false,
+ kUnknownFieldIndex,
+ kUnknownClassDefIndex,
+ graph_->GetDexFile(),
+ 0);
+ HInstanceFieldGet* get_field20 = new (&allocator_) HInstanceFieldGet(object,
+ nullptr,
+ Primitive::kPrimInt,
+ MemberOffset(20),
+ false,
+ kUnknownFieldIndex,
+ kUnknownClassDefIndex,
+ graph_->GetDexFile(),
+ 0);
+ entry->AddInstruction(object);
+ entry->AddInstruction(set_field10);
+ entry->AddInstruction(get_field10);
+ entry->AddInstruction(get_field20);
+
+ // Test HeapLocationCollector initialization.
+ // Should be no heap locations, no operations on the heap.
+ HeapLocationCollector heap_location_collector(graph_);
+ ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 0U);
+ ASSERT_FALSE(heap_location_collector.HasHeapStores());
+
+ // Test that after visiting the graph, it must see following heap locations
+ // object.field10, object.field20 and it should see heap stores.
+ heap_location_collector.VisitBasicBlock(entry);
+ ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 2U);
+ ASSERT_TRUE(heap_location_collector.HasHeapStores());
+
+ // Test queries on HeapLocationCollector's ref info and index records.
+ ReferenceInfo* ref = heap_location_collector.FindReferenceInfoOf(object);
+ size_t loc1 = heap_location_collector.FindHeapLocationIndex(
+ ref, 10, nullptr, kUnknownClassDefIndex);
+ size_t loc2 = heap_location_collector.FindHeapLocationIndex(
+ ref, 20, nullptr, kUnknownClassDefIndex);
+ // must find references info for object and in HeapLocationCollector.
+ ASSERT_TRUE(ref != nullptr);
+ // must find these heap locations.
+ ASSERT_TRUE(loc1 != HeapLocationCollector::kHeapLocationNotFound);
+ ASSERT_TRUE(loc2 != HeapLocationCollector::kHeapLocationNotFound);
+ // different fields of same object.
+ ASSERT_TRUE(loc1 != loc2);
+ // accesses to different fields of the same object should not alias.
+ ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
+}
+
+} // namespace art
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 76c9d2324b..211528b4bd 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include "load_store_analysis.h"
#include "load_store_elimination.h"
#include "escape.h"
@@ -23,477 +24,6 @@
namespace art {
-class ReferenceInfo;
-
-// A cap for the number of heap locations to prevent pathological time/space consumption.
-// The number of heap locations for most of the methods stays below this threshold.
-constexpr size_t kMaxNumberOfHeapLocations = 32;
-
-// A ReferenceInfo contains additional info about a reference such as
-// whether it's a singleton, returned, etc.
-class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
- public:
- ReferenceInfo(HInstruction* reference, size_t pos)
- : reference_(reference),
- position_(pos),
- is_singleton_(true),
- is_singleton_and_not_returned_(true),
- is_singleton_and_not_deopt_visible_(true),
- has_index_aliasing_(false) {
- CalculateEscape(reference_,
- nullptr,
- &is_singleton_,
- &is_singleton_and_not_returned_,
- &is_singleton_and_not_deopt_visible_);
- }
-
- HInstruction* GetReference() const {
- return reference_;
- }
-
- size_t GetPosition() const {
- return position_;
- }
-
- // Returns true if reference_ is the only name that can refer to its value during
- // the lifetime of the method. So it's guaranteed to not have any alias in
- // the method (including its callees).
- bool IsSingleton() const {
- return is_singleton_;
- }
-
- // Returns true if reference_ is a singleton and not returned to the caller or
- // used as an environment local of an HDeoptimize instruction.
- // The allocation and stores into reference_ may be eliminated for such cases.
- bool IsSingletonAndRemovable() const {
- return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
- }
-
- // Returns true if reference_ is a singleton and returned to the caller or
- // used as an environment local of an HDeoptimize instruction.
- bool IsSingletonAndNonRemovable() const {
- return is_singleton_ &&
- (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
- }
-
- bool HasIndexAliasing() {
- return has_index_aliasing_;
- }
-
- void SetHasIndexAliasing(bool has_index_aliasing) {
- // Only allow setting to true.
- DCHECK(has_index_aliasing);
- has_index_aliasing_ = has_index_aliasing;
- }
-
- private:
- HInstruction* const reference_;
- const size_t position_; // position in HeapLocationCollector's ref_info_array_.
-
- // Can only be referred to by a single name in the method.
- bool is_singleton_;
- // Is singleton and not returned to caller.
- bool is_singleton_and_not_returned_;
- // Is singleton and not used as an environment local of HDeoptimize.
- bool is_singleton_and_not_deopt_visible_;
- // Some heap locations with reference_ have array index aliasing,
- // e.g. arr[i] and arr[j] may be the same location.
- bool has_index_aliasing_;
-
- DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
-};
-
-// A heap location is a reference-offset/index pair that a value can be loaded from
-// or stored to.
-class HeapLocation : public ArenaObject<kArenaAllocMisc> {
- public:
- static constexpr size_t kInvalidFieldOffset = -1;
-
- // TODO: more fine-grained array types.
- static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
-
- HeapLocation(ReferenceInfo* ref_info,
- size_t offset,
- HInstruction* index,
- int16_t declaring_class_def_index)
- : ref_info_(ref_info),
- offset_(offset),
- index_(index),
- declaring_class_def_index_(declaring_class_def_index),
- value_killed_by_loop_side_effects_(true) {
- DCHECK(ref_info != nullptr);
- DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
- (offset != kInvalidFieldOffset && index == nullptr));
- if (ref_info->IsSingleton() && !IsArrayElement()) {
- // Assume this location's value cannot be killed by loop side effects
- // until proven otherwise.
- value_killed_by_loop_side_effects_ = false;
- }
- }
-
- ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
- size_t GetOffset() const { return offset_; }
- HInstruction* GetIndex() const { return index_; }
-
- // Returns the definition of declaring class' dex index.
- // It's kDeclaringClassDefIndexForArrays for an array element.
- int16_t GetDeclaringClassDefIndex() const {
- return declaring_class_def_index_;
- }
-
- bool IsArrayElement() const {
- return index_ != nullptr;
- }
-
- bool IsValueKilledByLoopSideEffects() const {
- return value_killed_by_loop_side_effects_;
- }
-
- void SetValueKilledByLoopSideEffects(bool val) {
- value_killed_by_loop_side_effects_ = val;
- }
-
- private:
- ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
- const size_t offset_; // offset of static/instance field.
- HInstruction* const index_; // index of an array element.
- const int16_t declaring_class_def_index_; // declaring class's def's dex index.
- bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
- // side effects because this location is stored
- // into inside a loop. This gives
- // better info on whether a singleton's location
- // value may be killed by loop side effects.
-
- DISALLOW_COPY_AND_ASSIGN(HeapLocation);
-};
-
-static HInstruction* HuntForOriginalReference(HInstruction* ref) {
- DCHECK(ref != nullptr);
- while (ref->IsNullCheck() || ref->IsBoundType()) {
- ref = ref->InputAt(0);
- }
- return ref;
-}
-
-// A HeapLocationCollector collects all relevant heap locations and keeps
-// an aliasing matrix for all locations.
-class HeapLocationCollector : public HGraphVisitor {
- public:
- static constexpr size_t kHeapLocationNotFound = -1;
- // Start with a single uint32_t word. That's enough bits for pair-wise
- // aliasing matrix of 8 heap locations.
- static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
-
- explicit HeapLocationCollector(HGraph* graph)
- : HGraphVisitor(graph),
- ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
- heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
- aliasing_matrix_(graph->GetArena(),
- kInitialAliasingMatrixBitVectorSize,
- true,
- kArenaAllocLSE),
- has_heap_stores_(false),
- has_volatile_(false),
- has_monitor_operations_(false) {}
-
- size_t GetNumberOfHeapLocations() const {
- return heap_locations_.size();
- }
-
- HeapLocation* GetHeapLocation(size_t index) const {
- return heap_locations_[index];
- }
-
- ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
- for (size_t i = 0; i < ref_info_array_.size(); i++) {
- ReferenceInfo* ref_info = ref_info_array_[i];
- if (ref_info->GetReference() == ref) {
- DCHECK_EQ(i, ref_info->GetPosition());
- return ref_info;
- }
- }
- return nullptr;
- }
-
- bool HasHeapStores() const {
- return has_heap_stores_;
- }
-
- bool HasVolatile() const {
- return has_volatile_;
- }
-
- bool HasMonitorOps() const {
- return has_monitor_operations_;
- }
-
- // Find and return the heap location index in heap_locations_.
- size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
- size_t offset,
- HInstruction* index,
- int16_t declaring_class_def_index) const {
- for (size_t i = 0; i < heap_locations_.size(); i++) {
- HeapLocation* loc = heap_locations_[i];
- if (loc->GetReferenceInfo() == ref_info &&
- loc->GetOffset() == offset &&
- loc->GetIndex() == index &&
- loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
- return i;
- }
- }
- return kHeapLocationNotFound;
- }
-
- // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
- bool MayAlias(size_t index1, size_t index2) const {
- if (index1 < index2) {
- return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
- } else if (index1 > index2) {
- return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
- } else {
- DCHECK(false) << "index1 and index2 are expected to be different";
- return true;
- }
- }
-
- void BuildAliasingMatrix() {
- const size_t number_of_locations = heap_locations_.size();
- if (number_of_locations == 0) {
- return;
- }
- size_t pos = 0;
- // Compute aliasing info between every pair of different heap locations.
- // Save the result in a matrix represented as a BitVector.
- for (size_t i = 0; i < number_of_locations - 1; i++) {
- for (size_t j = i + 1; j < number_of_locations; j++) {
- if (ComputeMayAlias(i, j)) {
- aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
- }
- pos++;
- }
- }
- }
-
- private:
- // An allocation cannot alias with a name which already exists at the point
- // of the allocation, such as a parameter or a load happening before the allocation.
- bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
- if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
- // Any reference that can alias with the allocation must appear after it in the block/in
- // the block's successors. In reverse post order, those instructions will be visited after
- // the allocation.
- return ref_info2->GetPosition() >= ref_info1->GetPosition();
- }
- return true;
- }
-
- bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
- if (ref_info1 == ref_info2) {
- return true;
- } else if (ref_info1->IsSingleton()) {
- return false;
- } else if (ref_info2->IsSingleton()) {
- return false;
- } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
- !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
- return false;
- }
- return true;
- }
-
- // `index1` and `index2` are indices in the array of collected heap locations.
- // Returns the position in the bit vector that tracks whether the two heap
- // locations may alias.
- size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
- DCHECK(index2 > index1);
- const size_t number_of_locations = heap_locations_.size();
- // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
- return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
- }
-
- // An additional position is passed in to make sure the calculated position is correct.
- size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
- size_t calculated_position = AliasingMatrixPosition(index1, index2);
- DCHECK_EQ(calculated_position, position);
- return calculated_position;
- }
-
- // Compute if two locations may alias to each other.
- bool ComputeMayAlias(size_t index1, size_t index2) const {
- HeapLocation* loc1 = heap_locations_[index1];
- HeapLocation* loc2 = heap_locations_[index2];
- if (loc1->GetOffset() != loc2->GetOffset()) {
- // Either two different instance fields, or one is an instance
- // field and the other is an array element.
- return false;
- }
- if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
- // Different types.
- return false;
- }
- if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
- return false;
- }
- if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
- HInstruction* array_index1 = loc1->GetIndex();
- HInstruction* array_index2 = loc2->GetIndex();
- DCHECK(array_index1 != nullptr);
- DCHECK(array_index2 != nullptr);
- if (array_index1->IsIntConstant() &&
- array_index2->IsIntConstant() &&
- array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
- // Different constant indices do not alias.
- return false;
- }
- ReferenceInfo* ref_info = loc1->GetReferenceInfo();
- ref_info->SetHasIndexAliasing(true);
- }
- return true;
- }
-
- ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
- ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
- if (ref_info == nullptr) {
- size_t pos = ref_info_array_.size();
- ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
- ref_info_array_.push_back(ref_info);
- }
- return ref_info;
- }
-
- void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
- if (instruction->GetType() != Primitive::kPrimNot) {
- return;
- }
- DCHECK(FindReferenceInfoOf(instruction) == nullptr);
- GetOrCreateReferenceInfo(instruction);
- }
-
- HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
- size_t offset,
- HInstruction* index,
- int16_t declaring_class_def_index) {
- HInstruction* original_ref = HuntForOriginalReference(ref);
- ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
- size_t heap_location_idx = FindHeapLocationIndex(
- ref_info, offset, index, declaring_class_def_index);
- if (heap_location_idx == kHeapLocationNotFound) {
- HeapLocation* heap_loc = new (GetGraph()->GetArena())
- HeapLocation(ref_info, offset, index, declaring_class_def_index);
- heap_locations_.push_back(heap_loc);
- return heap_loc;
- }
- return heap_locations_[heap_location_idx];
- }
-
- HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
- if (field_info.IsVolatile()) {
- has_volatile_ = true;
- }
- const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
- const size_t offset = field_info.GetFieldOffset().SizeValue();
- return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
- }
-
- void VisitArrayAccess(HInstruction* array, HInstruction* index) {
- GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
- index, HeapLocation::kDeclaringClassDefIndexForArrays);
- }
-
- void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
- VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
- HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
- has_heap_stores_ = true;
- if (location->GetReferenceInfo()->IsSingleton()) {
- // A singleton's location value may be killed by loop side effects if it's
- // defined before that loop, and it's stored into inside that loop.
- HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
- if (loop_info != nullptr) {
- HInstruction* ref = location->GetReferenceInfo()->GetReference();
- DCHECK(ref->IsNewInstance());
- if (loop_info->IsDefinedOutOfTheLoop(ref)) {
- // ref's location value may be killed by this loop's side effects.
- location->SetValueKilledByLoopSideEffects(true);
- } else {
- // ref is defined inside this loop so this loop's side effects cannot
- // kill its location value at the loop header since ref/its location doesn't
- // exist yet at the loop header.
- }
- }
- } else {
- // For non-singletons, value_killed_by_loop_side_effects_ is inited to
- // true.
- DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
- }
- }
-
- void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
- VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
- VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
- has_heap_stores_ = true;
- }
-
- // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
- // since we cannot accurately track the fields.
-
- void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
- VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitArraySet(HArraySet* instruction) OVERRIDE {
- VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
- has_heap_stores_ = true;
- }
-
- void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
- // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
- CreateReferenceInfoForReferenceType(new_instance);
- }
-
- void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitSelect(HSelect* instruction) OVERRIDE {
- CreateReferenceInfoForReferenceType(instruction);
- }
-
- void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
- has_monitor_operations_ = true;
- }
-
- ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
- ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
- ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
- bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
- // alias analysis and won't be as effective.
- bool has_volatile_; // If there are volatile field accesses.
- bool has_monitor_operations_; // If there are monitor operations.
-
- DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
-};
-
// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
// A heap location can be set to kUnknownHeapValue when:
// - initially set a value.
@@ -516,7 +46,7 @@ class LSEVisitor : public HGraphVisitor {
side_effects_(side_effects),
heap_values_for_(graph->GetBlocks().size(),
ArenaVector<HInstruction*>(heap_locations_collector.
- GetNumberOfHeapLocations(),
+ GetNumberOfHeapLocations(),
kUnknownHeapValue,
graph->GetArena()->Adapter(kArenaAllocLSE)),
graph->GetArena()->Adapter(kArenaAllocLSE)),
@@ -760,7 +290,7 @@ class LSEVisitor : public HGraphVisitor {
size_t offset,
HInstruction* index,
int16_t declaring_class_def_index) {
- HInstruction* original_ref = HuntForOriginalReference(ref);
+ HInstruction* original_ref = heap_location_collector_.HuntForOriginalReference(ref);
ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
size_t idx = heap_location_collector_.FindHeapLocationIndex(
ref_info, offset, index, declaring_class_def_index);
@@ -827,7 +357,7 @@ class LSEVisitor : public HGraphVisitor {
HInstruction* index,
int16_t declaring_class_def_index,
HInstruction* value) {
- HInstruction* original_ref = HuntForOriginalReference(ref);
+ HInstruction* original_ref = heap_location_collector_.HuntForOriginalReference(ref);
ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
size_t idx = heap_location_collector_.FindHeapLocationIndex(
ref_info, offset, index, declaring_class_def_index);
@@ -1127,25 +657,12 @@ void LoadStoreElimination::Run() {
// Skip this optimization.
return;
}
- HeapLocationCollector heap_location_collector(graph_);
- for (HBasicBlock* block : graph_->GetReversePostOrder()) {
- heap_location_collector.VisitBasicBlock(block);
- }
- if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
- // Bail out if there are too many heap locations to deal with.
- return;
- }
- if (!heap_location_collector.HasHeapStores()) {
- // Without heap stores, this pass would act mostly as GVN on heap accesses.
+ const HeapLocationCollector& heap_location_collector = lsa_.GetHeapLocationCollector();
+ if (heap_location_collector.GetNumberOfHeapLocations() == 0) {
+ // No HeapLocation information from LSA, skip this optimization.
return;
}
- if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
- // Don't do load/store elimination if the method has volatile field accesses or
- // monitor operations, for now.
- // TODO: do it right.
- return;
- }
- heap_location_collector.BuildAliasingMatrix();
+
LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
for (HBasicBlock* block : graph_->GetReversePostOrder()) {
lse_visitor.VisitBasicBlock(block);
diff --git a/compiler/optimizing/load_store_elimination.h b/compiler/optimizing/load_store_elimination.h
index 1d9e5c8da6..efe71c733a 100644
--- a/compiler/optimizing/load_store_elimination.h
+++ b/compiler/optimizing/load_store_elimination.h
@@ -22,12 +22,16 @@
namespace art {
class SideEffectsAnalysis;
+class LoadStoreAnalysis;
class LoadStoreElimination : public HOptimization {
public:
- LoadStoreElimination(HGraph* graph, const SideEffectsAnalysis& side_effects)
+ LoadStoreElimination(HGraph* graph,
+ const SideEffectsAnalysis& side_effects,
+ const LoadStoreAnalysis& lsa)
: HOptimization(graph, kLoadStoreEliminationPassName),
- side_effects_(side_effects) {}
+ side_effects_(side_effects),
+ lsa_(lsa) {}
void Run() OVERRIDE;
@@ -35,6 +39,7 @@ class LoadStoreElimination : public HOptimization {
private:
const SideEffectsAnalysis& side_effects_;
+ const LoadStoreAnalysis& lsa_;
DISALLOW_COPY_AND_ASSIGN(LoadStoreElimination);
};
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index f928f71209..2c65e5ca65 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -83,6 +83,7 @@
#include "jit/jit_code_cache.h"
#include "jni/quick/jni_compiler.h"
#include "licm.h"
+#include "load_store_analysis.h"
#include "load_store_elimination.h"
#include "loop_optimization.h"
#include "nodes.h"
@@ -465,7 +466,8 @@ static HOptimization* BuildOptimization(
const DexCompilationUnit& dex_compilation_unit,
VariableSizedHandleScope* handles,
SideEffectsAnalysis* most_recent_side_effects,
- HInductionVarAnalysis* most_recent_induction) {
+ HInductionVarAnalysis* most_recent_induction,
+ LoadStoreAnalysis* most_recent_lsa) {
std::string opt_name = ConvertPassNameToOptimizationName(pass_name);
if (opt_name == BoundsCheckElimination::kBoundsCheckEliminationPassName) {
CHECK(most_recent_side_effects != nullptr && most_recent_induction != nullptr);
@@ -505,9 +507,12 @@ static HOptimization* BuildOptimization(
} else if (opt_name == LICM::kLoopInvariantCodeMotionPassName) {
CHECK(most_recent_side_effects != nullptr);
return new (arena) LICM(graph, *most_recent_side_effects, stats);
+ } else if (opt_name == LoadStoreAnalysis::kLoadStoreAnalysisPassName) {
+ return new (arena) LoadStoreAnalysis(graph);
} else if (opt_name == LoadStoreElimination::kLoadStoreEliminationPassName) {
CHECK(most_recent_side_effects != nullptr);
- return new (arena) LoadStoreElimination(graph, *most_recent_side_effects);
+ CHECK(most_recent_lsa != nullptr);
+ return new (arena) LoadStoreElimination(graph, *most_recent_side_effects, *most_recent_lsa);
} else if (opt_name == SideEffectsAnalysis::kSideEffectsAnalysisPassName) {
return new (arena) SideEffectsAnalysis(graph);
} else if (opt_name == HLoopOptimization::kLoopOptimizationPassName) {
@@ -556,6 +561,7 @@ static ArenaVector<HOptimization*> BuildOptimizations(
// in the pass name list.
SideEffectsAnalysis* most_recent_side_effects = nullptr;
HInductionVarAnalysis* most_recent_induction = nullptr;
+ LoadStoreAnalysis* most_recent_lsa = nullptr;
ArenaVector<HOptimization*> ret(arena->Adapter());
for (const std::string& pass_name : pass_names) {
HOptimization* opt = BuildOptimization(
@@ -568,7 +574,8 @@ static ArenaVector<HOptimization*> BuildOptimizations(
dex_compilation_unit,
handles,
most_recent_side_effects,
- most_recent_induction);
+ most_recent_induction,
+ most_recent_lsa);
CHECK(opt != nullptr) << "Couldn't build optimization: \"" << pass_name << "\"";
ret.push_back(opt);
@@ -577,6 +584,8 @@ static ArenaVector<HOptimization*> BuildOptimizations(
most_recent_side_effects = down_cast<SideEffectsAnalysis*>(opt);
} else if (opt_name == HInductionVarAnalysis::kInductionPassName) {
most_recent_induction = down_cast<HInductionVarAnalysis*>(opt);
+ } else if (opt_name == LoadStoreAnalysis::kLoadStoreAnalysisPassName) {
+ most_recent_lsa = down_cast<LoadStoreAnalysis*>(opt);
}
}
return ret;
@@ -777,7 +786,8 @@ void OptimizingCompiler::RunOptimizations(HGraph* graph,
HInductionVarAnalysis* induction = new (arena) HInductionVarAnalysis(graph);
BoundsCheckElimination* bce = new (arena) BoundsCheckElimination(graph, *side_effects1, induction);
HLoopOptimization* loop = new (arena) HLoopOptimization(graph, driver, induction);
- LoadStoreElimination* lse = new (arena) LoadStoreElimination(graph, *side_effects2);
+ LoadStoreAnalysis* lsa = new (arena) LoadStoreAnalysis(graph);
+ LoadStoreElimination* lse = new (arena) LoadStoreElimination(graph, *side_effects2, *lsa);
HSharpening* sharpening = new (arena) HSharpening(
graph, codegen, dex_compilation_unit, driver, handles);
InstructionSimplifier* simplify2 = new (arena) InstructionSimplifier(
@@ -817,6 +827,7 @@ void OptimizingCompiler::RunOptimizations(HGraph* graph,
fold3, // evaluates code generated by dynamic bce
simplify3,
side_effects2,
+ lsa,
lse,
cha_guard,
dce3,