diff options
| author | 2017-10-05 14:45:17 -0700 | |
|---|---|---|
| committer | 2017-11-16 09:10:32 -0800 | |
| commit | 46721ef33e8f5cd405c291d72e3f259e3085fb5f (patch) | |
| tree | 88709396bf7439400125f5d5c1883a42826cd5ca /compiler | |
| parent | 6b337da6e9498e62b4c3e399366be7ad3b99169e (diff) | |
Don't merge values for exit block in LSE.
This enables some additional optimizations since exit block doesn't
really merge values.
Test: run-test on host.
Change-Id: I21ed7e0e43a3bc5d9ed2dabfad8462129b904eb7
Diffstat (limited to 'compiler')
| -rw-r--r-- | compiler/optimizing/load_store_elimination.cc | 55 | ||||
| -rw-r--r-- | compiler/optimizing/nodes.cc | 5 | ||||
| -rw-r--r-- | compiler/optimizing/nodes.h | 1 |
3 files changed, 53 insertions, 8 deletions
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc index 605fdae0f8..89ad85e0b4 100644 --- a/compiler/optimizing/load_store_elimination.cc +++ b/compiler/optimizing/load_store_elimination.cc @@ -199,6 +199,12 @@ class LSEVisitor : public HGraphDelegateVisitor { if (predecessors.size() == 0) { return; } + if (block->IsExitBlock()) { + // Exit block doesn't really merge values since the control flow ends in + // its predecessors. Each predecessor needs to make sure stores are kept + // if necessary. + return; + } ScopedArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()]; for (size_t i = 0; i < heap_values.size(); i++) { @@ -233,15 +239,23 @@ class LSEVisitor : public HGraphDelegateVisitor { } } - if (merged_value == kUnknownHeapValue || ref_info->IsSingletonAndNonRemovable()) { - // There are conflicting heap values from different predecessors, - // or the heap value may be needed after method return or deoptimization. - // Keep the last store in each predecessor since future loads cannot be eliminated. - for (HBasicBlock* predecessor : predecessors) { - ScopedArenaVector<HInstruction*>& pred_values = - heap_values_for_[predecessor->GetBlockId()]; - KeepIfIsStore(pred_values[i]); + if (ref_info->IsSingleton()) { + if (ref_info->IsSingletonAndNonRemovable() || + (merged_value == kUnknownHeapValue && + !block->IsSingleReturnOrReturnVoidAllowingPhis())) { + // The heap value may be needed after method return or deoptimization, + // or there are conflicting heap values from different predecessors and + // this block is not a single return, + // keep the last store in each predecessor since future loads may not + // be eliminated. + for (HBasicBlock* predecessor : predecessors) { + ScopedArenaVector<HInstruction*>& pred_values = + heap_values_for_[predecessor->GetBlockId()]; + KeepIfIsStore(pred_values[i]); + } } + } else { + // Currenctly we don't eliminate stores to non-singletons. } if ((merged_value == nullptr) || !from_all_predecessors) { @@ -549,6 +563,31 @@ class LSEVisitor : public HGraphDelegateVisitor { } } + // Keep necessary stores before exiting a method via return/throw. + void HandleExit(HBasicBlock* block) { + const ScopedArenaVector<HInstruction*>& heap_values = + heap_values_for_[block->GetBlockId()]; + for (size_t i = 0; i < heap_values.size(); i++) { + HInstruction* heap_value = heap_values[i]; + ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo(); + if (!ref_info->IsSingletonAndRemovable()) { + KeepIfIsStore(heap_value); + } + } + } + + void VisitReturn(HReturn* instruction) OVERRIDE { + HandleExit(instruction->GetBlock()); + } + + void VisitReturnVoid(HReturnVoid* return_void) OVERRIDE { + HandleExit(return_void->GetBlock()); + } + + void VisitThrow(HThrow* throw_instruction) OVERRIDE { + HandleExit(throw_instruction->GetBlock()); + } + void HandleInvoke(HInstruction* instruction) { SideEffects side_effects = instruction->GetSideEffects(); ScopedArenaVector<HInstruction*>& heap_values = diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index f4f6434678..fff61f5727 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -1810,6 +1810,11 @@ bool HBasicBlock::IsSingleReturn() const { return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsReturn(); } +bool HBasicBlock::IsSingleReturnOrReturnVoidAllowingPhis() const { + return (GetFirstInstruction() == GetLastInstruction()) && + (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid()); +} + bool HBasicBlock::IsSingleTryBoundary() const { return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary(); } diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 29c78a1e34..6672901781 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -968,6 +968,7 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { bool IsSingleGoto() const; bool IsSingleReturn() const; + bool IsSingleReturnOrReturnVoidAllowingPhis() const; bool IsSingleTryBoundary() const; // Returns true if this block emits nothing but a jump. |