summaryrefslogtreecommitdiff
path: root/compiler/optimizing/nodes.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/nodes.cc')
-rw-r--r--compiler/optimizing/nodes.cc247
1 files changed, 212 insertions, 35 deletions
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index b82e37cb4e..f2b63ae678 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -98,26 +98,31 @@ void HGraph::VisitBlockForBackEdges(HBasicBlock* block,
}
void HGraph::BuildDominatorTree() {
+ // (1) Simplify the CFG so that catch blocks have only exceptional incoming
+ // edges. This invariant simplifies building SSA form because Phis cannot
+ // collect both normal- and exceptional-flow values at the same time.
+ SimplifyCatchBlocks();
+
ArenaBitVector visited(arena_, blocks_.Size(), false);
- // (1) Find the back edges in the graph doing a DFS traversal.
+ // (2) Find the back edges in the graph doing a DFS traversal.
FindBackEdges(&visited);
- // (2) Remove instructions and phis from blocks not visited during
+ // (3) Remove instructions and phis from blocks not visited during
// the initial DFS as users from other instructions, so that
// users can be safely removed before uses later.
RemoveInstructionsAsUsersFromDeadBlocks(visited);
- // (3) Remove blocks not visited during the initial DFS.
+ // (4) Remove blocks not visited during the initial DFS.
// Step (4) requires dead blocks to be removed from the
// predecessors list of live blocks.
RemoveDeadBlocks(visited);
- // (4) Simplify the CFG now, so that we don't need to recompute
+ // (5) Simplify the CFG now, so that we don't need to recompute
// dominators and the reverse post order.
SimplifyCFG();
- // (5) Compute the dominance information and the reverse post order.
+ // (6) Compute the dominance information and the reverse post order.
ComputeDominanceInformation();
}
@@ -261,6 +266,83 @@ void HGraph::SimplifyLoop(HBasicBlock* header) {
info->SetSuspendCheck(first_instruction->AsSuspendCheck());
}
+static bool CheckIfPredecessorAtIsExceptional(const HBasicBlock& block, size_t pred_idx) {
+ HBasicBlock* predecessor = block.GetPredecessors().Get(pred_idx);
+ if (!predecessor->EndsWithTryBoundary()) {
+ // Only edges from HTryBoundary can be exceptional.
+ return false;
+ }
+ HTryBoundary* try_boundary = predecessor->GetLastInstruction()->AsTryBoundary();
+ if (try_boundary->GetNormalFlowSuccessor() == &block) {
+ // This block is the normal-flow successor of `try_boundary`, but it could
+ // also be one of its exception handlers if catch blocks have not been
+ // simplified yet. Predecessors are unordered, so we will consider the first
+ // occurrence to be the normal edge and a possible second occurrence to be
+ // the exceptional edge.
+ return !block.IsFirstIndexOfPredecessor(predecessor, pred_idx);
+ } else {
+ // This is not the normal-flow successor of `try_boundary`, hence it must be
+ // one of its exception handlers.
+ DCHECK(try_boundary->HasExceptionHandler(block));
+ return true;
+ }
+}
+
+void HGraph::SimplifyCatchBlocks() {
+ for (size_t i = 0; i < blocks_.Size(); ++i) {
+ HBasicBlock* catch_block = blocks_.Get(i);
+ if (!catch_block->IsCatchBlock()) {
+ continue;
+ }
+
+ bool exceptional_predecessors_only = true;
+ for (size_t j = 0; j < catch_block->GetPredecessors().Size(); ++j) {
+ if (!CheckIfPredecessorAtIsExceptional(*catch_block, j)) {
+ exceptional_predecessors_only = false;
+ break;
+ }
+ }
+
+ if (!exceptional_predecessors_only) {
+ // Catch block has normal-flow predecessors and needs to be simplified.
+ // Splitting the block before its first instruction moves all its
+ // instructions into `normal_block` and links the two blocks with a Goto.
+ // Afterwards, incoming normal-flow edges are re-linked to `normal_block`,
+ // leaving `catch_block` with the exceptional edges only.
+ // Note that catch blocks with normal-flow predecessors cannot begin with
+ // a MOVE_EXCEPTION instruction, as guaranteed by the verifier.
+ DCHECK(!catch_block->GetFirstInstruction()->IsLoadException());
+ HBasicBlock* normal_block = catch_block->SplitBefore(catch_block->GetFirstInstruction());
+ for (size_t j = 0; j < catch_block->GetPredecessors().Size(); ++j) {
+ if (!CheckIfPredecessorAtIsExceptional(*catch_block, j)) {
+ catch_block->GetPredecessors().Get(j)->ReplaceSuccessor(catch_block, normal_block);
+ --j;
+ }
+ }
+ }
+ }
+}
+
+void HGraph::ComputeTryBlockInformation() {
+ // Iterate in reverse post order to propagate try membership information from
+ // predecessors to their successors.
+ for (HReversePostOrderIterator it(*this); !it.Done(); it.Advance()) {
+ HBasicBlock* block = it.Current();
+ if (block->IsEntryBlock() || block->IsCatchBlock()) {
+ // Catch blocks after simplification have only exceptional predecessors
+ // and hence are never in tries.
+ continue;
+ }
+
+ // Infer try membership from the first predecessor. Having simplified loops,
+ // the first predecessor can never be a back edge and therefore it must have
+ // been visited already and had its try membership set.
+ HBasicBlock* first_predecessor = block->GetPredecessors().Get(0);
+ DCHECK(!block->IsLoopHeader() || !block->GetLoopInformation()->IsBackEdge(*first_predecessor));
+ block->SetTryEntry(first_predecessor->ComputeTryEntryOfSuccessors());
+ }
+}
+
void HGraph::SimplifyCFG() {
// Simplify the CFG for future analysis, and code generation:
// (1): Split critical edges.
@@ -268,9 +350,10 @@ void HGraph::SimplifyCFG() {
for (size_t i = 0; i < blocks_.Size(); ++i) {
HBasicBlock* block = blocks_.Get(i);
if (block == nullptr) continue;
- if (block->GetSuccessors().Size() > 1) {
+ if (block->NumberOfNormalSuccessors() > 1) {
for (size_t j = 0; j < block->GetSuccessors().Size(); ++j) {
HBasicBlock* successor = block->GetSuccessors().Get(j);
+ DCHECK(!successor->IsCatchBlock());
if (successor->GetPredecessors().Size() > 1) {
SplitCriticalEdge(block, successor);
--j;
@@ -288,6 +371,11 @@ bool HGraph::AnalyzeNaturalLoops() const {
for (HReversePostOrderIterator it(*this); !it.Done(); it.Advance()) {
HBasicBlock* block = it.Current();
if (block->IsLoopHeader()) {
+ if (block->IsCatchBlock()) {
+ // TODO: Dealing with exceptional back edges could be tricky because
+ // they only approximate the real control flow. Bail out for now.
+ return false;
+ }
HLoopInformation* info = block->GetLoopInformation();
if (!info->Populate()) {
// Abort if the loop is non natural. We currently bailout in such cases.
@@ -917,32 +1005,25 @@ HConstant* HTypeConversion::TryStaticEvaluation() const {
HConstant* HUnaryOperation::TryStaticEvaluation() const {
if (GetInput()->IsIntConstant()) {
- int32_t value = Evaluate(GetInput()->AsIntConstant()->GetValue());
- return GetBlock()->GetGraph()->GetIntConstant(value);
+ return Evaluate(GetInput()->AsIntConstant());
} else if (GetInput()->IsLongConstant()) {
- // TODO: Implement static evaluation of long unary operations.
- //
- // Do not exit with a fatal condition here. Instead, simply
- // return `null' to notify the caller that this instruction
- // cannot (yet) be statically evaluated.
- return nullptr;
+ return Evaluate(GetInput()->AsLongConstant());
}
return nullptr;
}
HConstant* HBinaryOperation::TryStaticEvaluation() const {
- if (GetLeft()->IsIntConstant() && GetRight()->IsIntConstant()) {
- int32_t value = Evaluate(GetLeft()->AsIntConstant()->GetValue(),
- GetRight()->AsIntConstant()->GetValue());
- return GetBlock()->GetGraph()->GetIntConstant(value);
- } else if (GetLeft()->IsLongConstant() && GetRight()->IsLongConstant()) {
- int64_t value = Evaluate(GetLeft()->AsLongConstant()->GetValue(),
- GetRight()->AsLongConstant()->GetValue());
- if (GetResultType() == Primitive::kPrimLong) {
- return GetBlock()->GetGraph()->GetLongConstant(value);
- } else {
- DCHECK_EQ(GetResultType(), Primitive::kPrimInt);
- return GetBlock()->GetGraph()->GetIntConstant(static_cast<int32_t>(value));
+ if (GetLeft()->IsIntConstant()) {
+ if (GetRight()->IsIntConstant()) {
+ return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsIntConstant());
+ } else if (GetRight()->IsLongConstant()) {
+ return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsLongConstant());
+ }
+ } else if (GetLeft()->IsLongConstant()) {
+ if (GetRight()->IsIntConstant()) {
+ return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsIntConstant());
+ } else if (GetRight()->IsLongConstant()) {
+ return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsLongConstant());
}
}
return nullptr;
@@ -1083,10 +1164,20 @@ HBasicBlock* HBasicBlock::SplitAfter(HInstruction* cursor) {
return new_block;
}
-bool HBasicBlock::IsExceptionalSuccessor(size_t idx) const {
- return !GetInstructions().IsEmpty()
- && GetLastInstruction()->IsTryBoundary()
- && GetLastInstruction()->AsTryBoundary()->IsExceptionalSuccessor(idx);
+HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const {
+ if (EndsWithTryBoundary()) {
+ HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary();
+ if (try_boundary->IsEntry()) {
+ DCHECK(try_entry_ == nullptr);
+ return try_boundary;
+ } else {
+ DCHECK(try_entry_ != nullptr);
+ DCHECK(try_entry_->HasSameExceptionHandlersAs(*try_boundary));
+ return nullptr;
+ }
+ } else {
+ return try_entry_;
+ }
}
static bool HasOnlyOneInstruction(const HBasicBlock& block) {
@@ -1111,10 +1202,31 @@ bool HBasicBlock::EndsWithIf() const {
return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf();
}
+bool HBasicBlock::EndsWithTryBoundary() const {
+ return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary();
+}
+
bool HBasicBlock::HasSinglePhi() const {
return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr;
}
+bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const {
+ if (GetBlock()->GetSuccessors().Size() != other.GetBlock()->GetSuccessors().Size()) {
+ return false;
+ }
+
+ // Exception handlers need to be stored in the same order.
+ for (HExceptionHandlerIterator it1(*this), it2(other);
+ !it1.Done();
+ it1.Advance(), it2.Advance()) {
+ DCHECK(!it2.Done());
+ if (it1.Current() != it2.Current()) {
+ return false;
+ }
+ }
+ return true;
+}
+
size_t HInstructionList::CountSize() const {
size_t size = 0;
HInstruction* current = first_instruction_;
@@ -1365,7 +1477,7 @@ void HGraph::DeleteDeadBlock(HBasicBlock* block) {
blocks_.Put(block->GetBlockId(), nullptr);
}
-void HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
+HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
DCHECK(HasExitBlock()) << "Unimplemented scenario";
// Update the environments in this graph to have the invoke's environment
// as parent.
@@ -1390,6 +1502,7 @@ void HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
outer_graph->SetHasBoundsChecks(true);
}
+ HInstruction* return_value = nullptr;
if (GetBlocks().Size() == 3) {
// Simple case of an entry block, a body block, and an exit block.
// Put the body block's instruction into `invoke`'s block.
@@ -1404,7 +1517,8 @@ void HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
// Replace the invoke with the return value of the inlined graph.
if (last->IsReturn()) {
- invoke->ReplaceWith(last->InputAt(0));
+ return_value = last->InputAt(0);
+ invoke->ReplaceWith(return_value);
} else {
DCHECK(last->IsReturnVoid());
}
@@ -1426,7 +1540,6 @@ void HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
// Update all predecessors of the exit block (now the `to` block)
// to not `HReturn` but `HGoto` instead.
- HInstruction* return_value = nullptr;
bool returns_void = to->GetPredecessors().Get(0)->GetLastInstruction()->IsReturnVoid();
if (to->GetPredecessors().Size() == 1) {
HBasicBlock* predecessor = to->GetPredecessors().Get(0);
@@ -1560,6 +1673,8 @@ void HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
// Finally remove the invoke from the caller.
invoke->GetBlock()->RemoveInstruction(invoke);
+
+ return return_value;
}
/*
@@ -1637,14 +1752,76 @@ void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
}
}
+void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
+ if (kIsDebugBuild) {
+ DCHECK_EQ(GetType(), Primitive::kPrimNot);
+ ScopedObjectAccess soa(Thread::Current());
+ DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName();
+ if (IsBoundType()) {
+ // Having the test here spares us from making the method virtual just for
+ // the sake of a DCHECK.
+ ReferenceTypeInfo upper_bound_rti = AsBoundType()->GetUpperBound();
+ DCHECK(upper_bound_rti.IsSupertypeOf(rti))
+ << " upper_bound_rti: " << upper_bound_rti
+ << " rti: " << rti;
+ DCHECK(!upper_bound_rti.GetTypeHandle()->IsFinal() || rti.IsExact());
+ }
+ }
+ reference_type_info_ = rti;
+}
+
+ReferenceTypeInfo::ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
+
+ReferenceTypeInfo::ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
+ : type_handle_(type_handle), is_exact_(is_exact) {
+ if (kIsDebugBuild) {
+ ScopedObjectAccess soa(Thread::Current());
+ DCHECK(IsValidHandle(type_handle));
+ }
+}
+
std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) {
ScopedObjectAccess soa(Thread::Current());
os << "["
- << " is_top=" << rhs.IsTop()
- << " type=" << (rhs.IsTop() ? "?" : PrettyClass(rhs.GetTypeHandle().Get()))
+ << " is_valid=" << rhs.IsValid()
+ << " type=" << (!rhs.IsValid() ? "?" : PrettyClass(rhs.GetTypeHandle().Get()))
<< " is_exact=" << rhs.IsExact()
<< " ]";
return os;
}
+bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
+ // For now, assume that instructions in different blocks may use the
+ // environment.
+ // TODO: Use the control flow to decide if this is true.
+ if (GetBlock() != other->GetBlock()) {
+ return true;
+ }
+
+ // We know that we are in the same block. Walk from 'this' to 'other',
+ // checking to see if there is any instruction with an environment.
+ HInstruction* current = this;
+ for (; current != other && current != nullptr; current = current->GetNext()) {
+ // This is a conservative check, as the instruction result may not be in
+ // the referenced environment.
+ if (current->HasEnvironment()) {
+ return true;
+ }
+ }
+
+ // We should have been called with 'this' before 'other' in the block.
+ // Just confirm this.
+ DCHECK(current != nullptr);
+ return false;
+}
+
+void HInstruction::RemoveEnvironmentUsers() {
+ for (HUseIterator<HEnvironment*> use_it(GetEnvUses()); !use_it.Done(); use_it.Advance()) {
+ HUseListNode<HEnvironment*>* user_node = use_it.Current();
+ HEnvironment* user = user_node->GetUser();
+ user->SetRawEnvAt(user_node->GetIndex(), nullptr);
+ }
+ env_uses_.Clear();
+}
+
} // namespace art