Merge "Pass driver to loop opt. Add new side_effects phase."
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 2d3c00f..46ba048 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -38,7 +38,8 @@
position_(pos),
is_singleton_(true),
is_singleton_and_not_returned_(true),
- is_singleton_and_not_deopt_visible_(true) {
+ is_singleton_and_not_deopt_visible_(true),
+ has_index_aliasing_(false) {
CalculateEscape(reference_,
nullptr,
&is_singleton_,
@@ -68,13 +69,29 @@
return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
}
+ bool HasIndexAliasing() {
+ return has_index_aliasing_;
+ }
+
+ void SetHasIndexAliasing(bool has_index_aliasing) {
+ // Only allow setting to true.
+ DCHECK(has_index_aliasing);
+ has_index_aliasing_ = has_index_aliasing;
+ }
+
private:
HInstruction* const reference_;
const size_t position_; // position in HeapLocationCollector's ref_info_array_.
- bool is_singleton_; // can only be referred to by a single name in the method,
- bool is_singleton_and_not_returned_; // and not returned to caller,
- bool is_singleton_and_not_deopt_visible_; // and not used as an environment local of HDeoptimize.
+ // Can only be referred to by a single name in the method.
+ bool is_singleton_;
+ // Is singleton and not returned to caller.
+ bool is_singleton_and_not_returned_;
+ // Is singleton and not used as an environment local of HDeoptimize.
+ bool is_singleton_and_not_deopt_visible_;
+ // Some heap locations with reference_ have array index aliasing,
+ // e.g. arr[i] and arr[j] may be the same location.
+ bool has_index_aliasing_;
DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
};
@@ -321,6 +338,12 @@
// Different constant indices do not alias.
return false;
}
+ ReferenceInfo* ref_info = loc1->GetReferenceInfo();
+ if (ref_info->IsSingleton()) {
+ // This is guaranteed by the CanReferencesAlias() test above.
+ DCHECK_EQ(ref_info, loc2->GetReferenceInfo());
+ ref_info->SetHasIndexAliasing(true);
+ }
}
return true;
}
@@ -497,7 +520,8 @@
removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
- singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
+ singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)),
+ singleton_new_arrays_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
}
void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
@@ -534,20 +558,24 @@
}
// At this point, stores in possibly_removed_stores_ can be safely removed.
- for (size_t i = 0, e = possibly_removed_stores_.size(); i < e; i++) {
- HInstruction* store = possibly_removed_stores_[i];
+ for (HInstruction* store : possibly_removed_stores_) {
DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
store->GetBlock()->RemoveInstruction(store);
}
// Eliminate allocations that are not used.
- for (size_t i = 0, e = singleton_new_instances_.size(); i < e; i++) {
- HInstruction* new_instance = singleton_new_instances_[i];
+ for (HInstruction* new_instance : singleton_new_instances_) {
if (!new_instance->HasNonEnvironmentUses()) {
new_instance->RemoveEnvironmentUsers();
new_instance->GetBlock()->RemoveInstruction(new_instance);
}
}
+ for (HInstruction* new_array : singleton_new_arrays_) {
+ if (!new_array->HasNonEnvironmentUses()) {
+ new_array->RemoveEnvironmentUsers();
+ new_array->GetBlock()->RemoveInstruction(new_array);
+ }
+ }
}
private:
@@ -558,7 +586,7 @@
void KeepIfIsStore(HInstruction* heap_value) {
if (heap_value == kDefaultHeapValue ||
heap_value == kUnknownHeapValue ||
- !heap_value->IsInstanceFieldSet()) {
+ !(heap_value->IsInstanceFieldSet() || heap_value->IsArraySet())) {
return;
}
auto idx = std::find(possibly_removed_stores_.begin(),
@@ -734,13 +762,16 @@
heap_values[idx] = constant;
return;
}
- if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
- HInstruction* store = heap_value;
- // This load must be from a singleton since it's from the same field
- // that a "removed" store puts the value. That store must be to a singleton's field.
- DCHECK(ref_info->IsSingleton());
- // Get the real heap value of the store.
- heap_value = store->InputAt(1);
+ if (heap_value != kUnknownHeapValue) {
+ if (heap_value->IsInstanceFieldSet() || heap_value->IsArraySet()) {
+ HInstruction* store = heap_value;
+ // This load must be from a singleton since it's from the same
+ // field/element that a "removed" store puts the value. That store
+ // must be to a singleton's field/element.
+ DCHECK(ref_info->IsSingleton());
+ // Get the real heap value of the store.
+ heap_value = heap_value->IsInstanceFieldSet() ? store->InputAt(1) : store->InputAt(2);
+ }
}
if (heap_value == kUnknownHeapValue) {
// Load isn't eliminated. Put the load as the value into the HeapLocation.
@@ -796,19 +827,19 @@
if (Equal(heap_value, value)) {
// Store into the heap location with the same value.
same_value = true;
- } else if (index != nullptr) {
- // For array element, don't eliminate stores since it can be easily aliased
- // with non-constant index.
+ } else if (index != nullptr && ref_info->HasIndexAliasing()) {
+ // For array element, don't eliminate stores if the index can be
+ // aliased.
} else if (ref_info->IsSingletonAndRemovable()) {
- // Store into a field of a singleton that's not returned. The value cannot be
- // killed due to aliasing/invocation. It can be redundant since future loads can
- // directly get the value set by this instruction. The value can still be killed due to
- // merging or loop side effects. Stores whose values are killed due to merging/loop side
- // effects later will be removed from possibly_removed_stores_ when that is detected.
+ // Store into a field/element of a singleton instance/array that's not returned.
+ // The value cannot be killed due to aliasing/invocation. It can be redundant since
+ // future loads can directly get the value set by this instruction. The value can
+ // still be killed due to merging or loop side effects. Stores whose values are
+ // killed due to merging/loop side effects later will be removed from
+ // possibly_removed_stores_ when that is detected.
possibly_redundant = true;
HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
- DCHECK(new_instance != nullptr);
- if (new_instance->IsFinalizable()) {
+ if (new_instance != nullptr && new_instance->IsFinalizable()) {
// Finalizable objects escape globally. Need to keep the store.
possibly_redundant = false;
} else {
@@ -834,7 +865,7 @@
if (!same_value) {
if (possibly_redundant) {
- DCHECK(instruction->IsInstanceFieldSet());
+ DCHECK(instruction->IsInstanceFieldSet() || instruction->IsArraySet());
// Put the store as the heap value. If the value is loaded from heap
// by a load later, this store isn't really redundant.
heap_values[idx] = instruction;
@@ -995,6 +1026,27 @@
}
}
+ void VisitNewArray(HNewArray* new_array) OVERRIDE {
+ ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_array);
+ if (ref_info == nullptr) {
+ // new_array isn't used for array accesses. No need to process it.
+ return;
+ }
+ if (ref_info->IsSingletonAndRemovable()) {
+ singleton_new_arrays_.push_back(new_array);
+ }
+ ArenaVector<HInstruction*>& heap_values =
+ heap_values_for_[new_array->GetBlock()->GetBlockId()];
+ for (size_t i = 0; i < heap_values.size(); i++) {
+ HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
+ HInstruction* ref = location->GetReferenceInfo()->GetReference();
+ if (ref == new_array && location->GetIndex() != nullptr) {
+ // Array elements are set to default heap values.
+ heap_values[i] = kDefaultHeapValue;
+ }
+ }
+ }
+
// Find an instruction's substitute if it should be removed.
// Return the same instruction if it should not be removed.
HInstruction* FindSubstitute(HInstruction* instruction) {
@@ -1023,6 +1075,7 @@
ArenaVector<HInstruction*> possibly_removed_stores_;
ArenaVector<HInstruction*> singleton_new_instances_;
+ ArenaVector<HInstruction*> singleton_new_arrays_;
DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
};
diff --git a/test/530-checker-lse/src/Main.java b/test/530-checker-lse/src/Main.java
index 9f4be6c..14a40ef 100644
--- a/test/530-checker-lse/src/Main.java
+++ b/test/530-checker-lse/src/Main.java
@@ -785,6 +785,86 @@
return new Circle(Math.PI).getArea();
}
+ /// CHECK-START: int Main.testAllocationEliminationOfArray1() load_store_elimination (before)
+ /// CHECK: NewArray
+ /// CHECK: ArraySet
+ /// CHECK: ArraySet
+ /// CHECK: ArrayGet
+ /// CHECK: ArrayGet
+ /// CHECK: ArrayGet
+ /// CHECK: ArrayGet
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray1() load_store_elimination (after)
+ /// CHECK-NOT: NewArray
+ /// CHECK-NOT: ArraySet
+ /// CHECK-NOT: ArrayGet
+ private static int testAllocationEliminationOfArray1() {
+ int[] array = new int[4];
+ array[2] = 4;
+ array[3] = 7;
+ return array[0] + array[1] + array[2] + array[3];
+ }
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray2() load_store_elimination (before)
+ /// CHECK: NewArray
+ /// CHECK: ArraySet
+ /// CHECK: ArraySet
+ /// CHECK: ArrayGet
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray2() load_store_elimination (after)
+ /// CHECK: NewArray
+ /// CHECK: ArraySet
+ /// CHECK: ArraySet
+ /// CHECK: ArrayGet
+ private static int testAllocationEliminationOfArray2() {
+ // Cannot eliminate array allocation since array is accessed with non-constant
+ // index.
+ int[] array = new int[4];
+ array[2] = 4;
+ array[3] = 7;
+ int sum = 0;
+ for (int e : array) {
+ sum += e;
+ }
+ return sum;
+ }
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray3(int) load_store_elimination (before)
+ /// CHECK: NewArray
+ /// CHECK: ArraySet
+ /// CHECK: ArrayGet
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray3(int) load_store_elimination (after)
+ /// CHECK-NOT: NewArray
+ /// CHECK-NOT: ArraySet
+ /// CHECK-NOT: ArrayGet
+ private static int testAllocationEliminationOfArray3(int i) {
+ int[] array = new int[4];
+ array[i] = 4;
+ return array[i];
+ }
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray4(int) load_store_elimination (before)
+ /// CHECK: NewArray
+ /// CHECK: ArraySet
+ /// CHECK: ArraySet
+ /// CHECK: ArrayGet
+ /// CHECK: ArrayGet
+
+ /// CHECK-START: int Main.testAllocationEliminationOfArray4(int) load_store_elimination (after)
+ /// CHECK: NewArray
+ /// CHECK: ArraySet
+ /// CHECK: ArraySet
+ /// CHECK: ArrayGet
+ /// CHECK-NOT: ArrayGet
+ private static int testAllocationEliminationOfArray4(int i) {
+ // Cannot eliminate array allocation due to index aliasing between 1 and i.
+ int[] array = new int[4];
+ array[1] = 2;
+ array[i] = 4;
+ return array[1] + array[i];
+ }
+
static void assertIntEquals(int result, int expected) {
if (expected != result) {
throw new Error("Expected: " + expected + ", found: " + result);
@@ -865,6 +945,11 @@
assertDoubleEquals(darray[0], Math.PI);
assertDoubleEquals(darray[1], Math.PI);
assertDoubleEquals(darray[2], Math.PI);
+
+ assertIntEquals(testAllocationEliminationOfArray1(), 11);
+ assertIntEquals(testAllocationEliminationOfArray2(), 11);
+ assertIntEquals(testAllocationEliminationOfArray3(2), 4);
+ assertIntEquals(testAllocationEliminationOfArray4(2), 6);
}
static boolean sFlag;
diff --git a/test/532-checker-nonnull-arrayset/src/Main.java b/test/532-checker-nonnull-arrayset/src/Main.java
index 2c701bb..61c9e88 100644
--- a/test/532-checker-nonnull-arrayset/src/Main.java
+++ b/test/532-checker-nonnull-arrayset/src/Main.java
@@ -30,10 +30,14 @@
/// CHECK: ReturnVoid
public static void test() {
Object[] array = new Object[2];
+ // Storing to static to avoid some lse optimization.
+ sArray = array;
Object nonNull = array[0];
nonNull.getClass(); // Ensure nonNull has an implicit null check.
array[1] = nonNull;
}
public static void main(String[] args) {}
+
+ static Object[] sArray;
}
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index e86340e..ca1fac6 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -529,12 +529,14 @@
# Known broken tests for the JIT.
# CFI unwinding expects managed frames, and the test does not iterate enough to even compile. JIT
# also uses Generic JNI instead of the JNI compiler.
+# 154-gc-loop requires more deterministic GC behavior than what JIT does.
# Test 906 iterates the heap filtering with different options. No instances should be created
# between those runs to be able to have precise checks.
# Test 629 requires compilation.
# 912: b/34655682
TEST_ART_BROKEN_JIT_RUN_TESTS := \
137-cfi \
+ 154-gc-loop \
629-vdex-speed \
904-object-allocation \
906-iterate-heap \
diff --git a/test/knownfailures.json b/test/knownfailures.json
index 6caf7b0..8681815 100644
--- a/test/knownfailures.json
+++ b/test/knownfailures.json
@@ -129,11 +129,15 @@
"lot."]
},
{
- "tests": ["964-default-iface-init-gen",
- "154-gc-loop"],
+ "test": "964-default-iface-init-gen",
"variant": "gcstress"
},
{
+ "tests": "154-gc-loop",
+ "variant": "gcstress | jit",
+ "description": ["154-gc-loop depends GC not happening too often"]
+ },
+ {
"test": "115-native-bridge",
"variant": "target",
"description": ["115-native-bridge setup is complicated. Need to",