blob: ee868db7590519d9b50bdde9b8421fa15d6c743b [file] [log] [blame]
Roland Levillain72bceff2014-09-15 18:29:00 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dead_code_elimination.h"
18
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +010019#include "android-base/logging.h"
David Brazdild9c90372016-09-14 16:53:55 +010020#include "base/array_ref.h"
Roland Levillain72bceff2014-09-15 18:29:00 +010021#include "base/bit_vector-inl.h"
Vladimir Marko009d1662017-10-10 13:21:15 +010022#include "base/scoped_arena_allocator.h"
23#include "base/scoped_arena_containers.h"
Vladimir Marko2c45bc92016-10-25 16:54:12 +010024#include "base/stl_util.h"
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +010025#include "optimizing/nodes.h"
David Brazdil84daae52015-05-18 12:06:52 +010026#include "ssa_phi_elimination.h"
Roland Levillain72bceff2014-09-15 18:29:00 +010027
Vladimir Marko0a516052019-10-14 13:00:44 +000028namespace art {
Roland Levillain72bceff2014-09-15 18:29:00 +010029
Vladimir Marko211c2112015-09-24 16:52:33 +010030static void MarkReachableBlocks(HGraph* graph, ArenaBitVector* visited) {
Vladimir Marko009d1662017-10-10 13:21:15 +010031 // Use local allocator for allocating memory.
32 ScopedArenaAllocator allocator(graph->GetArenaStack());
33
34 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocDCE));
Vladimir Marko211c2112015-09-24 16:52:33 +010035 constexpr size_t kDefaultWorlistSize = 8;
36 worklist.reserve(kDefaultWorlistSize);
37 visited->SetBit(graph->GetEntryBlock()->GetBlockId());
38 worklist.push_back(graph->GetEntryBlock());
David Brazdil2d7352b2015-04-20 14:52:42 +010039
Vladimir Marko211c2112015-09-24 16:52:33 +010040 while (!worklist.empty()) {
41 HBasicBlock* block = worklist.back();
42 worklist.pop_back();
43 int block_id = block->GetBlockId();
44 DCHECK(visited->IsBitSet(block_id));
45
46 ArrayRef<HBasicBlock* const> live_successors(block->GetSuccessors());
47 HInstruction* last_instruction = block->GetLastInstruction();
48 if (last_instruction->IsIf()) {
49 HIf* if_instruction = last_instruction->AsIf();
50 HInstruction* condition = if_instruction->InputAt(0);
51 if (condition->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +000052 if (condition->AsIntConstant()->IsTrue()) {
Vladimir Marko211c2112015-09-24 16:52:33 +010053 live_successors = live_successors.SubArray(0u, 1u);
54 DCHECK_EQ(live_successors[0], if_instruction->IfTrueSuccessor());
55 } else {
Roland Levillain1a653882016-03-18 18:05:57 +000056 DCHECK(condition->AsIntConstant()->IsFalse()) << condition->AsIntConstant()->GetValue();
Vladimir Marko211c2112015-09-24 16:52:33 +010057 live_successors = live_successors.SubArray(1u, 1u);
58 DCHECK_EQ(live_successors[0], if_instruction->IfFalseSuccessor());
59 }
Mark Mendellfe57faa2015-09-18 09:26:15 -040060 }
Vladimir Marko211c2112015-09-24 16:52:33 +010061 } else if (last_instruction->IsPackedSwitch()) {
62 HPackedSwitch* switch_instruction = last_instruction->AsPackedSwitch();
63 HInstruction* switch_input = switch_instruction->InputAt(0);
64 if (switch_input->IsIntConstant()) {
65 int32_t switch_value = switch_input->AsIntConstant()->GetValue();
66 int32_t start_value = switch_instruction->GetStartValue();
Vladimir Marko430c4f52015-09-25 17:10:15 +010067 // Note: Though the spec forbids packed-switch values to wrap around, we leave
68 // that task to the verifier and use unsigned arithmetic with it's "modulo 2^32"
69 // semantics to check if the value is in range, wrapped or not.
70 uint32_t switch_index =
71 static_cast<uint32_t>(switch_value) - static_cast<uint32_t>(start_value);
Vladimir Marko211c2112015-09-24 16:52:33 +010072 if (switch_index < switch_instruction->GetNumEntries()) {
73 live_successors = live_successors.SubArray(switch_index, 1u);
Vladimir Markoec7802a2015-10-01 20:57:57 +010074 DCHECK_EQ(live_successors[0], block->GetSuccessors()[switch_index]);
Vladimir Marko211c2112015-09-24 16:52:33 +010075 } else {
76 live_successors = live_successors.SubArray(switch_instruction->GetNumEntries(), 1u);
77 DCHECK_EQ(live_successors[0], switch_instruction->GetDefaultBlock());
78 }
Mark Mendellfe57faa2015-09-18 09:26:15 -040079 }
80 }
Vladimir Marko211c2112015-09-24 16:52:33 +010081
82 for (HBasicBlock* successor : live_successors) {
83 // Add only those successors that have not been visited yet.
84 if (!visited->IsBitSet(successor->GetBlockId())) {
85 visited->SetBit(successor->GetBlockId());
86 worklist.push_back(successor);
87 }
David Brazdil2d7352b2015-04-20 14:52:42 +010088 }
89 }
90}
91
92void HDeadCodeElimination::MaybeRecordDeadBlock(HBasicBlock* block) {
93 if (stats_ != nullptr) {
94 stats_->RecordStat(MethodCompilationStat::kRemovedDeadInstruction,
95 block->GetPhis().CountSize() + block->GetInstructions().CountSize());
96 }
97}
98
Nicolas Geoffraydac9b192016-07-15 10:46:17 +010099void HDeadCodeElimination::MaybeRecordSimplifyIf() {
100 if (stats_ != nullptr) {
101 stats_->RecordStat(MethodCompilationStat::kSimplifyIf);
Nicolas Geoffray09aa1472016-01-19 10:52:54 +0000102 }
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100103}
104
105static bool HasInput(HCondition* instruction, HInstruction* input) {
106 return (instruction->InputAt(0) == input) ||
107 (instruction->InputAt(1) == input);
108}
109
110static bool HasEquality(IfCondition condition) {
111 switch (condition) {
112 case kCondEQ:
113 case kCondLE:
114 case kCondGE:
115 case kCondBE:
116 case kCondAE:
117 return true;
118 case kCondNE:
119 case kCondLT:
120 case kCondGT:
121 case kCondB:
122 case kCondA:
123 return false;
124 }
125}
126
127static HConstant* Evaluate(HCondition* condition, HInstruction* left, HInstruction* right) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100128 if (left == right && !DataType::IsFloatingPointType(left->GetType())) {
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100129 return condition->GetBlock()->GetGraph()->GetIntConstant(
130 HasEquality(condition->GetCondition()) ? 1 : 0);
131 }
132
133 if (!left->IsConstant() || !right->IsConstant()) {
134 return nullptr;
135 }
136
137 if (left->IsIntConstant()) {
138 return condition->Evaluate(left->AsIntConstant(), right->AsIntConstant());
139 } else if (left->IsNullConstant()) {
140 return condition->Evaluate(left->AsNullConstant(), right->AsNullConstant());
141 } else if (left->IsLongConstant()) {
142 return condition->Evaluate(left->AsLongConstant(), right->AsLongConstant());
143 } else if (left->IsFloatConstant()) {
144 return condition->Evaluate(left->AsFloatConstant(), right->AsFloatConstant());
145 } else {
146 DCHECK(left->IsDoubleConstant());
147 return condition->Evaluate(left->AsDoubleConstant(), right->AsDoubleConstant());
148 }
149}
150
Aart Bik4c563ca2018-01-24 16:34:25 -0800151static bool RemoveNonNullControlDependences(HBasicBlock* block, HBasicBlock* throws) {
152 // Test for an if as last statement.
153 if (!block->EndsWithIf()) {
154 return false;
155 }
156 HIf* ifs = block->GetLastInstruction()->AsIf();
157 // Find either:
158 // if obj == null
159 // throws
160 // else
161 // not_throws
162 // or:
163 // if obj != null
164 // not_throws
165 // else
166 // throws
167 HInstruction* cond = ifs->InputAt(0);
168 HBasicBlock* not_throws = nullptr;
169 if (throws == ifs->IfTrueSuccessor() && cond->IsEqual()) {
170 not_throws = ifs->IfFalseSuccessor();
171 } else if (throws == ifs->IfFalseSuccessor() && cond->IsNotEqual()) {
172 not_throws = ifs->IfTrueSuccessor();
173 } else {
174 return false;
175 }
176 DCHECK(cond->IsEqual() || cond->IsNotEqual());
177 HInstruction* obj = cond->InputAt(1);
178 if (obj->IsNullConstant()) {
179 obj = cond->InputAt(0);
180 } else if (!cond->InputAt(0)->IsNullConstant()) {
181 return false;
182 }
183 // Scan all uses of obj and find null check under control dependence.
184 HBoundType* bound = nullptr;
185 const HUseList<HInstruction*>& uses = obj->GetUses();
186 for (auto it = uses.begin(), end = uses.end(); it != end;) {
187 HInstruction* user = it->GetUser();
188 ++it; // increment before possibly replacing
189 if (user->IsNullCheck()) {
190 HBasicBlock* user_block = user->GetBlock();
191 if (user_block != block &&
192 user_block != throws &&
193 block->Dominates(user_block)) {
194 if (bound == nullptr) {
195 ReferenceTypeInfo ti = obj->GetReferenceTypeInfo();
196 bound = new (obj->GetBlock()->GetGraph()->GetAllocator()) HBoundType(obj);
197 bound->SetUpperBound(ti, /*can_be_null*/ false);
198 bound->SetReferenceTypeInfo(ti);
199 bound->SetCanBeNull(false);
200 not_throws->InsertInstructionBefore(bound, not_throws->GetFirstInstruction());
201 }
202 user->ReplaceWith(bound);
203 user_block->RemoveInstruction(user);
204 }
205 }
206 }
207 return bound != nullptr;
208}
209
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100210// Simplify the pattern:
211//
Aart Bika8b8e9b2018-01-09 11:01:02 -0800212// B1
213// / \
Santiago Aboy Solanescef72a62022-04-06 14:13:18 +0000214// | instr_1
215// | ...
216// | instr_n
Aart Bika8b8e9b2018-01-09 11:01:02 -0800217// | foo() // always throws
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +0100218// | instr_n+2
219// | ...
220// | instr_n+m
Aart Bika8b8e9b2018-01-09 11:01:02 -0800221// \ goto B2
222// \ /
223// B2
224//
225// Into:
226//
227// B1
228// / \
Santiago Aboy Solanescef72a62022-04-06 14:13:18 +0000229// | instr_1
230// | ...
231// | instr_n
Aart Bika8b8e9b2018-01-09 11:01:02 -0800232// | foo()
233// | goto Exit
234// | |
235// B2 Exit
236//
237// Rationale:
238// Removal of the never taken edge to B2 may expose
239// other optimization opportunities, such as code sinking.
240bool HDeadCodeElimination::SimplifyAlwaysThrows() {
Aart Bika8b8e9b2018-01-09 11:01:02 -0800241 HBasicBlock* exit = graph_->GetExitBlock();
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +0100242 if (!graph_->HasAlwaysThrowingInvokes() || exit == nullptr) {
Aart Bika8b8e9b2018-01-09 11:01:02 -0800243 return false;
244 }
245
246 bool rerun_dominance_and_loop_analysis = false;
247
248 // Order does not matter, just pick one.
249 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
Santiago Aboy Solanescef72a62022-04-06 14:13:18 +0000250 if (block->GetTryCatchInformation() != nullptr) {
251 // We don't want to perform the simplify always throws optimizations for throws inside of
252 // tries since those throws might not go to the exit block. We do that by checking the
253 // TryCatchInformation of the blocks.
254 //
255 // As a special case the `catch_block` is the first block of the catch and it has
256 // TryCatchInformation. Other blocks in the catch don't have try catch information (as long as
257 // they are not part of an outer try). Knowing if a `catch_block` is part of an outer try is
258 // possible by checking its successors, but other restrictions of the simplify always throws
259 // optimization will block `catch_block` nevertheless (e.g. only one predecessor) so it is not
260 // worth the effort.
261
262 // TODO(solanes): Maybe we can do a `goto catch` if inside of a try catch instead of going to
263 // the exit. If we do so, we have to take into account that we should go to the nearest valid
264 // catch i.e. one that would accept our exception type.
265 continue;
266 }
267
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +0100268 if (block->GetLastInstruction()->IsGoto() &&
Aart Bika8b8e9b2018-01-09 11:01:02 -0800269 block->GetPhis().IsEmpty() &&
270 block->GetPredecessors().size() == 1u) {
Aart Bika8b8e9b2018-01-09 11:01:02 -0800271 HBasicBlock* pred = block->GetSinglePredecessor();
272 HBasicBlock* succ = block->GetSingleSuccessor();
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +0100273 // Ensure no computations are merged through throwing block. This does not prevent the
274 // optimization per se, but would require an elaborate clean up of the SSA graph.
Aart Bika8b8e9b2018-01-09 11:01:02 -0800275 if (succ != exit &&
276 !block->Dominates(pred) &&
277 pred->Dominates(succ) &&
278 succ->GetPredecessors().size() > 1u &&
279 succ->GetPhis().IsEmpty()) {
Santiago Aboy Solanes78f3d3a2022-07-15 14:30:05 +0100280 // We iterate to find the first instruction that always throws. If two instructions always
281 // throw, the first one will throw and the second one will never be reached.
282 HInstruction* throwing_instruction = nullptr;
283 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
284 if (it.Current()->AlwaysThrows()) {
285 throwing_instruction = it.Current();
286 break;
287 }
288 }
289
290 if (throwing_instruction == nullptr) {
291 // No always-throwing instruction found. Continue with the rest of the blocks.
292 continue;
293 }
294
295 // We split the block at the throwing instruction, and the instructions after the throwing
296 // instructions will be disconnected from the graph after `block` points to the exit.
297 // `RemoveDeadBlocks` will take care of removing this new block and its instructions.
298 // Even though `SplitBefore` doesn't guarantee the graph to remain in SSA form, it is fine
299 // since we do not break it.
300 HBasicBlock* new_block = block->SplitBefore(throwing_instruction->GetNext(),
301 /* require_graph_not_in_ssa_form= */ false);
302 DCHECK_EQ(block->GetSingleSuccessor(), new_block);
303 block->ReplaceSuccessor(new_block, exit);
304
Aart Bika8b8e9b2018-01-09 11:01:02 -0800305 rerun_dominance_and_loop_analysis = true;
306 MaybeRecordStat(stats_, MethodCompilationStat::kSimplifyThrowingInvoke);
Aart Bik4c563ca2018-01-24 16:34:25 -0800307 // Perform a quick follow up optimization on object != null control dependences
308 // that is much cheaper to perform now than in a later phase.
309 if (RemoveNonNullControlDependences(pred, block)) {
310 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedNullCheck);
311 }
Aart Bika8b8e9b2018-01-09 11:01:02 -0800312 }
313 }
314 }
315
316 // We need to re-analyze the graph in order to run DCE afterwards.
317 if (rerun_dominance_and_loop_analysis) {
318 graph_->ClearLoopInformation();
319 graph_->ClearDominanceInformation();
320 graph_->BuildDominatorTree();
321 return true;
322 }
323 return false;
324}
325
326// Simplify the pattern:
327//
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100328// B1 B2 ...
329// goto goto goto
330// \ | /
331// \ | /
332// B3
333// i1 = phi(input, input)
334// (i2 = condition on i1)
335// if i1 (or i2)
336// / \
337// / \
338// B4 B5
339//
340// Into:
341//
342// B1 B2 ...
343// | | |
344// B4 B5 B?
345//
Vladimir Marko606c8f02016-11-03 13:01:28 +0000346// Note that individual edges can be redirected (for example B2->B3
347// can be redirected as B2->B5) without applying this optimization
348// to other incoming edges.
349//
350// This simplification cannot be applied to catch blocks, because
351// exception handler edges do not represent normal control flow.
352// Though in theory this could still apply to normal control flow
353// going directly to a catch block, we cannot support it at the
354// moment because the catch Phi's inputs do not correspond to the
355// catch block's predecessors, so we cannot identify which
356// predecessor corresponds to a given statically evaluated input.
357//
358// We do not apply this optimization to loop headers as this could
359// create irreducible loops. We rely on the suspend check in the
360// loop header to prevent the pattern match.
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100361//
362// Note that we rely on the dead code elimination to get rid of B3.
363bool HDeadCodeElimination::SimplifyIfs() {
364 bool simplified_one_or_more_ifs = false;
365 bool rerun_dominance_and_loop_analysis = false;
366
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100367 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100368 HInstruction* last = block->GetLastInstruction();
369 HInstruction* first = block->GetFirstInstruction();
Vladimir Marko606c8f02016-11-03 13:01:28 +0000370 if (!block->IsCatchBlock() &&
371 last->IsIf() &&
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100372 block->HasSinglePhi() &&
373 block->GetFirstPhi()->HasOnlyOneNonEnvironmentUse()) {
374 bool has_only_phi_and_if = (last == first) && (last->InputAt(0) == block->GetFirstPhi());
375 bool has_only_phi_condition_and_if =
376 !has_only_phi_and_if &&
377 first->IsCondition() &&
378 HasInput(first->AsCondition(), block->GetFirstPhi()) &&
379 (first->GetNext() == last) &&
380 (last->InputAt(0) == first) &&
381 first->HasOnlyOneNonEnvironmentUse();
382
383 if (has_only_phi_and_if || has_only_phi_condition_and_if) {
384 DCHECK(!block->IsLoopHeader());
385 HPhi* phi = block->GetFirstPhi()->AsPhi();
386 bool phi_input_is_left = (first->InputAt(0) == phi);
387
388 // Walk over all inputs of the phis and update the control flow of
389 // predecessors feeding constants to the phi.
390 // Note that phi->InputCount() may change inside the loop.
391 for (size_t i = 0; i < phi->InputCount();) {
392 HInstruction* input = phi->InputAt(i);
393 HInstruction* value_to_check = nullptr;
394 if (has_only_phi_and_if) {
395 if (input->IsIntConstant()) {
396 value_to_check = input;
397 }
398 } else {
399 DCHECK(has_only_phi_condition_and_if);
400 if (phi_input_is_left) {
401 value_to_check = Evaluate(first->AsCondition(), input, first->InputAt(1));
402 } else {
403 value_to_check = Evaluate(first->AsCondition(), first->InputAt(0), input);
404 }
405 }
406 if (value_to_check == nullptr) {
407 // Could not evaluate to a constant, continue iterating over the inputs.
408 ++i;
409 } else {
410 HBasicBlock* predecessor_to_update = block->GetPredecessors()[i];
411 HBasicBlock* successor_to_update = nullptr;
412 if (value_to_check->AsIntConstant()->IsTrue()) {
413 successor_to_update = last->AsIf()->IfTrueSuccessor();
414 } else {
415 DCHECK(value_to_check->AsIntConstant()->IsFalse())
416 << value_to_check->AsIntConstant()->GetValue();
417 successor_to_update = last->AsIf()->IfFalseSuccessor();
418 }
419 predecessor_to_update->ReplaceSuccessor(block, successor_to_update);
420 phi->RemoveInputAt(i);
421 simplified_one_or_more_ifs = true;
422 if (block->IsInLoop()) {
423 rerun_dominance_and_loop_analysis = true;
424 }
425 // For simplicity, don't create a dead block, let the dead code elimination
426 // pass deal with it.
427 if (phi->InputCount() == 1) {
428 break;
429 }
430 }
431 }
432 if (block->GetPredecessors().size() == 1) {
433 phi->ReplaceWith(phi->InputAt(0));
434 block->RemovePhi(phi);
435 if (has_only_phi_condition_and_if) {
436 // Evaluate here (and not wait for a constant folding pass) to open
437 // more opportunities for DCE.
438 HInstruction* result = first->AsCondition()->TryStaticEvaluation();
439 if (result != nullptr) {
440 first->ReplaceWith(result);
441 block->RemoveInstruction(first);
442 }
443 }
444 }
445 if (simplified_one_or_more_ifs) {
446 MaybeRecordSimplifyIf();
447 }
448 }
449 }
450 }
451 // We need to re-analyze the graph in order to run DCE afterwards.
452 if (simplified_one_or_more_ifs) {
453 if (rerun_dominance_and_loop_analysis) {
454 graph_->ClearLoopInformation();
455 graph_->ClearDominanceInformation();
456 graph_->BuildDominatorTree();
457 } else {
458 graph_->ClearDominanceInformation();
459 // We have introduced critical edges, remove them.
460 graph_->SimplifyCFG();
461 graph_->ComputeDominanceInformation();
462 graph_->ComputeTryBlockInformation();
463 }
464 }
465
466 return simplified_one_or_more_ifs;
467}
468
469void HDeadCodeElimination::ConnectSuccessiveBlocks() {
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100470 // Order does not matter. Skip the entry block by starting at index 1 in reverse post order.
471 for (size_t i = 1u, size = graph_->GetReversePostOrder().size(); i != size; ++i) {
472 HBasicBlock* block = graph_->GetReversePostOrder()[i];
473 DCHECK(!block->IsEntryBlock());
474 while (block->GetLastInstruction()->IsGoto()) {
475 HBasicBlock* successor = block->GetSingleSuccessor();
476 if (successor->IsExitBlock() || successor->GetPredecessors().size() != 1u) {
477 break;
478 }
479 DCHECK_LT(i, IndexOfElement(graph_->GetReversePostOrder(), successor));
480 block->MergeWith(successor);
481 --size;
482 DCHECK_EQ(size, graph_->GetReversePostOrder().size());
483 DCHECK_EQ(block, graph_->GetReversePostOrder()[i]);
484 // Reiterate on this block in case it can be merged with its new successor.
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100485 }
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100486 }
487}
488
489bool HDeadCodeElimination::RemoveDeadBlocks() {
Vladimir Marko009d1662017-10-10 13:21:15 +0100490 // Use local allocator for allocating memory.
491 ScopedArenaAllocator allocator(graph_->GetArenaStack());
492
David Brazdil2d7352b2015-04-20 14:52:42 +0100493 // Classify blocks as reachable/unreachable.
Vladimir Marko009d1662017-10-10 13:21:15 +0100494 ArenaBitVector live_blocks(&allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE);
495 live_blocks.ClearAllBits();
David Brazdila4b8c212015-05-07 09:59:30 +0100496
Vladimir Marko211c2112015-09-24 16:52:33 +0100497 MarkReachableBlocks(graph_, &live_blocks);
Nicolas Geoffray1f82ecc2015-06-24 12:20:24 +0100498 bool removed_one_or_more_blocks = false;
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000499 bool rerun_dominance_and_loop_analysis = false;
David Brazdil2d7352b2015-04-20 14:52:42 +0100500
David Brazdila4b8c212015-05-07 09:59:30 +0100501 // Remove all dead blocks. Iterate in post order because removal needs the
502 // block's chain of dominators and nested loops need to be updated from the
503 // inside out.
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100504 for (HBasicBlock* block : graph_->GetPostOrder()) {
David Brazdila4b8c212015-05-07 09:59:30 +0100505 int id = block->GetBlockId();
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000506 if (!live_blocks.IsBitSet(id)) {
David Brazdil69a28042015-04-29 17:16:07 +0100507 MaybeRecordDeadBlock(block);
508 block->DisconnectAndDelete();
Nicolas Geoffray1f82ecc2015-06-24 12:20:24 +0100509 removed_one_or_more_blocks = true;
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000510 if (block->IsInLoop()) {
511 rerun_dominance_and_loop_analysis = true;
512 }
David Brazdil2d7352b2015-04-20 14:52:42 +0100513 }
David Brazdil2d7352b2015-04-20 14:52:42 +0100514 }
515
Nicolas Geoffray1f82ecc2015-06-24 12:20:24 +0100516 // If we removed at least one block, we need to recompute the full
David Brazdil8a7c0fe2015-11-02 20:24:55 +0000517 // dominator tree and try block membership.
Nicolas Geoffray1f82ecc2015-06-24 12:20:24 +0100518 if (removed_one_or_more_blocks) {
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000519 if (rerun_dominance_and_loop_analysis) {
520 graph_->ClearLoopInformation();
521 graph_->ClearDominanceInformation();
522 graph_->BuildDominatorTree();
523 } else {
524 graph_->ClearDominanceInformation();
525 graph_->ComputeDominanceInformation();
526 graph_->ComputeTryBlockInformation();
527 }
Nicolas Geoffray1f82ecc2015-06-24 12:20:24 +0100528 }
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100529 return removed_one_or_more_blocks;
David Brazdil2d7352b2015-04-20 14:52:42 +0100530}
531
532void HDeadCodeElimination::RemoveDeadInstructions() {
Roland Levillain72bceff2014-09-15 18:29:00 +0100533 // Process basic blocks in post-order in the dominator tree, so that
David Brazdil2d7352b2015-04-20 14:52:42 +0100534 // a dead instruction depending on another dead instruction is removed.
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100535 for (HBasicBlock* block : graph_->GetPostOrder()) {
Roland Levillain72bceff2014-09-15 18:29:00 +0100536 // Traverse this block's instructions in backward order and remove
537 // the unused ones.
538 HBackwardInstructionIterator i(block->GetInstructions());
539 // Skip the first iteration, as the last instruction of a block is
540 // a branching instruction.
541 DCHECK(i.Current()->IsControlFlow());
542 for (i.Advance(); !i.Done(); i.Advance()) {
543 HInstruction* inst = i.Current();
544 DCHECK(!inst->IsControlFlow());
Aart Bik482095d2016-10-10 15:39:10 -0700545 if (inst->IsDeadAndRemovable()) {
Roland Levillain72bceff2014-09-15 18:29:00 +0100546 block->RemoveInstruction(inst);
Igor Murashkin1e065a52017-08-09 13:20:34 -0700547 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedDeadInstruction);
Roland Levillain72bceff2014-09-15 18:29:00 +0100548 }
549 }
550 }
551}
552
Aart Bik24773202018-04-26 10:28:51 -0700553bool HDeadCodeElimination::Run() {
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100554 // Do not eliminate dead blocks if the graph has irreducible loops. We could
555 // support it, but that would require changes in our loop representation to handle
556 // multiple entry points. We decided it was not worth the complexity.
557 if (!graph_->HasIrreducibleLoops()) {
558 // Simplify graph to generate more dead block patterns.
559 ConnectSuccessiveBlocks();
560 bool did_any_simplification = false;
Aart Bika8b8e9b2018-01-09 11:01:02 -0800561 did_any_simplification |= SimplifyAlwaysThrows();
Nicolas Geoffraydac9b192016-07-15 10:46:17 +0100562 did_any_simplification |= SimplifyIfs();
563 did_any_simplification |= RemoveDeadBlocks();
564 if (did_any_simplification) {
565 // Connect successive blocks created by dead branches.
566 ConnectSuccessiveBlocks();
567 }
568 }
David Brazdil84daae52015-05-18 12:06:52 +0100569 SsaRedundantPhiElimination(graph_).Run();
David Brazdil2d7352b2015-04-20 14:52:42 +0100570 RemoveDeadInstructions();
Aart Bik24773202018-04-26 10:28:51 -0700571 return true;
David Brazdil2d7352b2015-04-20 14:52:42 +0100572}
573
Roland Levillain72bceff2014-09-15 18:29:00 +0100574} // namespace art