summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/ssa_liveness_analysis.cc34
-rw-r--r--compiler/optimizing/ssa_liveness_analysis.h34
2 files changed, 34 insertions, 34 deletions
diff --git a/compiler/optimizing/ssa_liveness_analysis.cc b/compiler/optimizing/ssa_liveness_analysis.cc
index a01e107e02..a4d52d7761 100644
--- a/compiler/optimizing/ssa_liveness_analysis.cc
+++ b/compiler/optimizing/ssa_liveness_analysis.cc
@@ -59,6 +59,38 @@ static void AddToListForLinearization(ArenaVector<HBasicBlock*>* worklist, HBasi
worklist->insert(insert_pos.base(), block);
}
+static bool IsLinearOrderWellFormed(const HGraph& graph) {
+ for (HBasicBlock* header : graph.GetBlocks()) {
+ if (header == nullptr || !header->IsLoopHeader()) {
+ continue;
+ }
+
+ HLoopInformation* loop = header->GetLoopInformation();
+ size_t num_blocks = loop->GetBlocks().NumSetBits();
+ size_t found_blocks = 0u;
+
+ for (HLinearOrderIterator it(graph); !it.Done(); it.Advance()) {
+ HBasicBlock* current = it.Current();
+ if (loop->Contains(*current)) {
+ found_blocks++;
+ if (found_blocks == 1u && current != header) {
+ // First block is not the header.
+ return false;
+ } else if (found_blocks == num_blocks && !loop->IsBackEdge(*current)) {
+ // Last block is not a back edge.
+ return false;
+ }
+ } else if (found_blocks != 0u && found_blocks != num_blocks) {
+ // Blocks are not adjacent.
+ return false;
+ }
+ }
+ DCHECK_EQ(found_blocks, num_blocks);
+ }
+
+ return true;
+}
+
void SsaLivenessAnalysis::LinearizeGraph() {
// Create a reverse post ordering with the following properties:
// - Blocks in a loop are consecutive,
@@ -100,6 +132,8 @@ void SsaLivenessAnalysis::LinearizeGraph() {
forward_predecessors[block_id] = number_of_remaining_predecessors - 1;
}
} while (!worklist.empty());
+
+ DCHECK(graph_->HasIrreducibleLoops() || IsLinearOrderWellFormed(*graph_));
}
void SsaLivenessAnalysis::NumberInstructions() {
diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h
index 92788fe6b8..9f94c8316b 100644
--- a/compiler/optimizing/ssa_liveness_analysis.h
+++ b/compiler/optimizing/ssa_liveness_analysis.h
@@ -983,38 +983,6 @@ class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
return false;
}
- bool IsLinearOrderWellFormed(const HGraph& graph) {
- for (HBasicBlock* header : graph.GetBlocks()) {
- if (header == nullptr || !header->IsLoopHeader()) {
- continue;
- }
-
- HLoopInformation* loop = header->GetLoopInformation();
- size_t num_blocks = loop->GetBlocks().NumSetBits();
- size_t found_blocks = 0u;
-
- for (HLinearOrderIterator it(graph); !it.Done(); it.Advance()) {
- HBasicBlock* current = it.Current();
- if (loop->Contains(*current)) {
- found_blocks++;
- if (found_blocks == 1u && current != header) {
- // First block is not the header.
- return false;
- } else if (found_blocks == num_blocks && !loop->IsBackEdge(*current)) {
- // Last block is not a back edge.
- return false;
- }
- } else if (found_blocks != 0u && found_blocks != num_blocks) {
- // Blocks are not adjacent.
- return false;
- }
- }
- DCHECK_EQ(found_blocks, num_blocks);
- }
-
- return true;
- }
-
void AddBackEdgeUses(const HBasicBlock& block_at_use) {
DCHECK(block_at_use.IsInLoop());
if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
@@ -1024,8 +992,6 @@ class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
return;
}
- DCHECK(IsLinearOrderWellFormed(*block_at_use.GetGraph()));
-
// Add synthesized uses at the back edge of loops to help the register allocator.
// Note that this method is called in decreasing liveness order, to faciliate adding
// uses at the head of the `first_use_` linked list. Because below