summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/load_store_analysis.cc8
-rw-r--r--compiler/optimizing/load_store_analysis.h19
-rw-r--r--compiler/optimizing/load_store_elimination.cc74
-rw-r--r--compiler/optimizing/scheduler.cc1
4 files changed, 69 insertions, 33 deletions
diff --git a/compiler/optimizing/load_store_analysis.cc b/compiler/optimizing/load_store_analysis.cc
index 3fe42aff2e..5e4cffab84 100644
--- a/compiler/optimizing/load_store_analysis.cc
+++ b/compiler/optimizing/load_store_analysis.cc
@@ -283,14 +283,6 @@ bool LoadStoreAnalysis::Run() {
heap_location_collector_.CleanUp();
return false;
}
- if (heap_location_collector_.HasVolatile() || heap_location_collector_.HasMonitorOps()) {
- // Don't do load/store elimination if the method has volatile field accesses or
- // monitor operations, for now.
- // TODO: do it right.
- heap_location_collector_.CleanUp();
- return false;
- }
-
heap_location_collector_.BuildAliasingMatrix();
heap_location_collector_.DumpReferenceStats(stats_);
return true;
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index 4975bae2a2..b12e70b194 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -253,8 +253,6 @@ class HeapLocationCollector : public HGraphVisitor {
heap_locations_(allocator->Adapter(kArenaAllocLSA)),
aliasing_matrix_(allocator, kInitialAliasingMatrixBitVectorSize, true, kArenaAllocLSA),
has_heap_stores_(false),
- has_volatile_(false),
- has_monitor_operations_(false),
lse_type_(lse_type) {
aliasing_matrix_.ClearAllBits();
}
@@ -350,14 +348,6 @@ class HeapLocationCollector : public HGraphVisitor {
return has_heap_stores_;
}
- bool HasVolatile() const {
- return has_volatile_;
- }
-
- bool HasMonitorOps() const {
- return has_monitor_operations_;
- }
-
// Find and return the heap location index in heap_locations_.
// NOTE: When heap locations are created, potentially aliasing/overlapping
// accesses are given different indexes. This find function also
@@ -540,9 +530,6 @@ class HeapLocationCollector : public HGraphVisitor {
}
void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
- if (field_info.IsVolatile()) {
- has_volatile_ = true;
- }
DataType::Type type = field_info.GetFieldType();
const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
const size_t offset = field_info.GetFieldOffset().SizeValue();
@@ -637,18 +624,12 @@ class HeapLocationCollector : public HGraphVisitor {
CreateReferenceInfoForReferenceType(instruction);
}
- void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) override {
- has_monitor_operations_ = true;
- }
-
ScopedArenaAllocator* allocator_;
ScopedArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
ScopedArenaVector<HeapLocation*> heap_locations_; // All heap locations.
ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
// alias analysis and won't be as effective.
- bool has_volatile_; // If there are volatile field accesses.
- bool has_monitor_operations_; // If there are monitor operations.
LoadStoreAnalysisType lse_type_;
DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index cafc3e9c16..4862f5810b 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -974,13 +974,63 @@ class LSEVisitor final : private HGraphDelegateVisitor {
<< " but LSE should be the only source of predicated-ifield-gets!";
}
+ void HandleAcquireLoad(HInstruction* instruction) {
+ DCHECK((instruction->IsInstanceFieldGet() && instruction->AsInstanceFieldGet()->IsVolatile()) ||
+ (instruction->IsStaticFieldGet() && instruction->AsStaticFieldGet()->IsVolatile()) ||
+ (instruction->IsMonitorOperation() && instruction->AsMonitorOperation()->IsEnter()))
+ << "Unexpected instruction " << instruction->GetId() << ": " << instruction->DebugName();
+
+ // Acquire operations e.g. MONITOR_ENTER change the thread's view of the memory, so we must
+ // invalidate all current values.
+ ScopedArenaVector<ValueRecord>& heap_values =
+ heap_values_for_[instruction->GetBlock()->GetBlockId()];
+ for (size_t i = 0u, size = heap_values.size(); i != size; ++i) {
+ KeepStores(heap_values[i].stored_by);
+ heap_values[i].stored_by = Value::PureUnknown();
+ heap_values[i].value = Value::PartialUnknown(heap_values[i].value);
+ }
+
+ // Note that there's no need to record the load as subsequent acquire loads shouldn't be
+ // eliminated either.
+ }
+
+ void HandleReleaseStore(HInstruction* instruction) {
+ DCHECK((instruction->IsInstanceFieldSet() && instruction->AsInstanceFieldSet()->IsVolatile()) ||
+ (instruction->IsStaticFieldSet() && instruction->AsStaticFieldSet()->IsVolatile()) ||
+ (instruction->IsMonitorOperation() && !instruction->AsMonitorOperation()->IsEnter()))
+ << "Unexpected instruction " << instruction->GetId() << ": " << instruction->DebugName();
+
+ // Release operations e.g. MONITOR_EXIT do not affect this thread's view of the memory, but
+ // they will push the modifications for other threads to see. Therefore, we must keep the
+ // stores but there's no need to clobber the value.
+ ScopedArenaVector<ValueRecord>& heap_values =
+ heap_values_for_[instruction->GetBlock()->GetBlockId()];
+ for (size_t i = 0u, size = heap_values.size(); i != size; ++i) {
+ KeepStores(heap_values[i].stored_by);
+ heap_values[i].stored_by = Value::PureUnknown();
+ }
+
+ // Note that there's no need to record the store as subsequent release store shouldn't be
+ // eliminated either.
+ }
+
void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override {
+ if (instruction->IsVolatile()) {
+ HandleAcquireLoad(instruction);
+ return;
+ }
+
HInstruction* object = instruction->InputAt(0);
const FieldInfo& field = instruction->GetFieldInfo();
VisitGetLocation(instruction, heap_location_collector_.GetFieldHeapLocation(object, &field));
}
void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override {
+ if (instruction->IsVolatile()) {
+ HandleReleaseStore(instruction);
+ return;
+ }
+
HInstruction* object = instruction->InputAt(0);
const FieldInfo& field = instruction->GetFieldInfo();
HInstruction* value = instruction->InputAt(1);
@@ -989,12 +1039,22 @@ class LSEVisitor final : private HGraphDelegateVisitor {
}
void VisitStaticFieldGet(HStaticFieldGet* instruction) override {
+ if (instruction->IsVolatile()) {
+ HandleAcquireLoad(instruction);
+ return;
+ }
+
HInstruction* cls = instruction->InputAt(0);
const FieldInfo& field = instruction->GetFieldInfo();
VisitGetLocation(instruction, heap_location_collector_.GetFieldHeapLocation(cls, &field));
}
void VisitStaticFieldSet(HStaticFieldSet* instruction) override {
+ if (instruction->IsVolatile()) {
+ HandleReleaseStore(instruction);
+ return;
+ }
+
HInstruction* cls = instruction->InputAt(0);
const FieldInfo& field = instruction->GetFieldInfo();
HInstruction* value = instruction->InputAt(1);
@@ -1002,6 +1062,14 @@ class LSEVisitor final : private HGraphDelegateVisitor {
VisitSetLocation(instruction, idx, value);
}
+ void VisitMonitorOperation(HMonitorOperation* monitor_op) override {
+ if (monitor_op->IsEnter()) {
+ HandleAcquireLoad(monitor_op);
+ } else {
+ HandleReleaseStore(monitor_op);
+ }
+ }
+
void VisitArrayGet(HArrayGet* instruction) override {
VisitGetLocation(instruction, heap_location_collector_.GetArrayHeapLocation(instruction));
}
@@ -1152,12 +1220,6 @@ class LSEVisitor final : private HGraphDelegateVisitor {
HandleThrowingInstruction(check_cast);
}
- void VisitMonitorOperation(HMonitorOperation* monitor_op) override {
- if (monitor_op->CanThrow()) {
- HandleThrowingInstruction(monitor_op);
- }
- }
-
void HandleInvoke(HInstruction* instruction) {
// If `instruction` can throw we have to presume all stores are visible.
const bool can_throw = instruction->CanThrow();
diff --git a/compiler/optimizing/scheduler.cc b/compiler/optimizing/scheduler.cc
index d228aba95a..d6568980c8 100644
--- a/compiler/optimizing/scheduler.cc
+++ b/compiler/optimizing/scheduler.cc
@@ -721,6 +721,7 @@ bool HScheduler::IsSchedulable(const HInstruction* instruction) const {
// HNop
// HThrow
// HTryBoundary
+ // All volatile field access e.g. HInstanceFieldGet
// TODO: Some of the instructions above may be safe to schedule (maybe as
// scheduling barriers).
return instruction->IsArrayGet() ||