Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "dead_code_elimination.h" |
| 18 | |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 19 | #include "android-base/logging.h" |
David Brazdil | d9c9037 | 2016-09-14 16:53:55 +0100 | [diff] [blame] | 20 | #include "base/array_ref.h" |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 21 | #include "base/bit_vector-inl.h" |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 22 | #include "base/scoped_arena_allocator.h" |
| 23 | #include "base/scoped_arena_containers.h" |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 24 | #include "base/stl_util.h" |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 25 | #include "optimizing/nodes.h" |
David Brazdil | 84daae5 | 2015-05-18 12:06:52 +0100 | [diff] [blame] | 26 | #include "ssa_phi_elimination.h" |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 27 | |
Vladimir Marko | 0a51605 | 2019-10-14 13:00:44 +0000 | [diff] [blame] | 28 | namespace art { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 29 | |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 30 | static void MarkReachableBlocks(HGraph* graph, ArenaBitVector* visited) { |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 31 | // Use local allocator for allocating memory. |
| 32 | ScopedArenaAllocator allocator(graph->GetArenaStack()); |
| 33 | |
| 34 | ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocDCE)); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 35 | constexpr size_t kDefaultWorlistSize = 8; |
| 36 | worklist.reserve(kDefaultWorlistSize); |
| 37 | visited->SetBit(graph->GetEntryBlock()->GetBlockId()); |
| 38 | worklist.push_back(graph->GetEntryBlock()); |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 39 | |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 40 | while (!worklist.empty()) { |
| 41 | HBasicBlock* block = worklist.back(); |
| 42 | worklist.pop_back(); |
| 43 | int block_id = block->GetBlockId(); |
| 44 | DCHECK(visited->IsBitSet(block_id)); |
| 45 | |
| 46 | ArrayRef<HBasicBlock* const> live_successors(block->GetSuccessors()); |
| 47 | HInstruction* last_instruction = block->GetLastInstruction(); |
| 48 | if (last_instruction->IsIf()) { |
| 49 | HIf* if_instruction = last_instruction->AsIf(); |
| 50 | HInstruction* condition = if_instruction->InputAt(0); |
| 51 | if (condition->IsIntConstant()) { |
Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 52 | if (condition->AsIntConstant()->IsTrue()) { |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 53 | live_successors = live_successors.SubArray(0u, 1u); |
| 54 | DCHECK_EQ(live_successors[0], if_instruction->IfTrueSuccessor()); |
| 55 | } else { |
Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 56 | DCHECK(condition->AsIntConstant()->IsFalse()) << condition->AsIntConstant()->GetValue(); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 57 | live_successors = live_successors.SubArray(1u, 1u); |
| 58 | DCHECK_EQ(live_successors[0], if_instruction->IfFalseSuccessor()); |
| 59 | } |
Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 60 | } |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 61 | } else if (last_instruction->IsPackedSwitch()) { |
| 62 | HPackedSwitch* switch_instruction = last_instruction->AsPackedSwitch(); |
| 63 | HInstruction* switch_input = switch_instruction->InputAt(0); |
| 64 | if (switch_input->IsIntConstant()) { |
| 65 | int32_t switch_value = switch_input->AsIntConstant()->GetValue(); |
| 66 | int32_t start_value = switch_instruction->GetStartValue(); |
Vladimir Marko | 430c4f5 | 2015-09-25 17:10:15 +0100 | [diff] [blame] | 67 | // Note: Though the spec forbids packed-switch values to wrap around, we leave |
| 68 | // that task to the verifier and use unsigned arithmetic with it's "modulo 2^32" |
| 69 | // semantics to check if the value is in range, wrapped or not. |
| 70 | uint32_t switch_index = |
| 71 | static_cast<uint32_t>(switch_value) - static_cast<uint32_t>(start_value); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 72 | if (switch_index < switch_instruction->GetNumEntries()) { |
| 73 | live_successors = live_successors.SubArray(switch_index, 1u); |
Vladimir Marko | ec7802a | 2015-10-01 20:57:57 +0100 | [diff] [blame] | 74 | DCHECK_EQ(live_successors[0], block->GetSuccessors()[switch_index]); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 75 | } else { |
| 76 | live_successors = live_successors.SubArray(switch_instruction->GetNumEntries(), 1u); |
| 77 | DCHECK_EQ(live_successors[0], switch_instruction->GetDefaultBlock()); |
| 78 | } |
Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 79 | } |
| 80 | } |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 81 | |
| 82 | for (HBasicBlock* successor : live_successors) { |
| 83 | // Add only those successors that have not been visited yet. |
| 84 | if (!visited->IsBitSet(successor->GetBlockId())) { |
| 85 | visited->SetBit(successor->GetBlockId()); |
| 86 | worklist.push_back(successor); |
| 87 | } |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 88 | } |
| 89 | } |
| 90 | } |
| 91 | |
| 92 | void HDeadCodeElimination::MaybeRecordDeadBlock(HBasicBlock* block) { |
| 93 | if (stats_ != nullptr) { |
| 94 | stats_->RecordStat(MethodCompilationStat::kRemovedDeadInstruction, |
| 95 | block->GetPhis().CountSize() + block->GetInstructions().CountSize()); |
| 96 | } |
| 97 | } |
| 98 | |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 99 | void HDeadCodeElimination::MaybeRecordSimplifyIf() { |
| 100 | if (stats_ != nullptr) { |
| 101 | stats_->RecordStat(MethodCompilationStat::kSimplifyIf); |
Nicolas Geoffray | 09aa147 | 2016-01-19 10:52:54 +0000 | [diff] [blame] | 102 | } |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 103 | } |
| 104 | |
| 105 | static bool HasInput(HCondition* instruction, HInstruction* input) { |
| 106 | return (instruction->InputAt(0) == input) || |
| 107 | (instruction->InputAt(1) == input); |
| 108 | } |
| 109 | |
| 110 | static bool HasEquality(IfCondition condition) { |
| 111 | switch (condition) { |
| 112 | case kCondEQ: |
| 113 | case kCondLE: |
| 114 | case kCondGE: |
| 115 | case kCondBE: |
| 116 | case kCondAE: |
| 117 | return true; |
| 118 | case kCondNE: |
| 119 | case kCondLT: |
| 120 | case kCondGT: |
| 121 | case kCondB: |
| 122 | case kCondA: |
| 123 | return false; |
| 124 | } |
| 125 | } |
| 126 | |
| 127 | static HConstant* Evaluate(HCondition* condition, HInstruction* left, HInstruction* right) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 128 | if (left == right && !DataType::IsFloatingPointType(left->GetType())) { |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 129 | return condition->GetBlock()->GetGraph()->GetIntConstant( |
| 130 | HasEquality(condition->GetCondition()) ? 1 : 0); |
| 131 | } |
| 132 | |
| 133 | if (!left->IsConstant() || !right->IsConstant()) { |
| 134 | return nullptr; |
| 135 | } |
| 136 | |
| 137 | if (left->IsIntConstant()) { |
| 138 | return condition->Evaluate(left->AsIntConstant(), right->AsIntConstant()); |
| 139 | } else if (left->IsNullConstant()) { |
| 140 | return condition->Evaluate(left->AsNullConstant(), right->AsNullConstant()); |
| 141 | } else if (left->IsLongConstant()) { |
| 142 | return condition->Evaluate(left->AsLongConstant(), right->AsLongConstant()); |
| 143 | } else if (left->IsFloatConstant()) { |
| 144 | return condition->Evaluate(left->AsFloatConstant(), right->AsFloatConstant()); |
| 145 | } else { |
| 146 | DCHECK(left->IsDoubleConstant()); |
| 147 | return condition->Evaluate(left->AsDoubleConstant(), right->AsDoubleConstant()); |
| 148 | } |
| 149 | } |
| 150 | |
Aart Bik | 4c563ca | 2018-01-24 16:34:25 -0800 | [diff] [blame] | 151 | static bool RemoveNonNullControlDependences(HBasicBlock* block, HBasicBlock* throws) { |
| 152 | // Test for an if as last statement. |
| 153 | if (!block->EndsWithIf()) { |
| 154 | return false; |
| 155 | } |
| 156 | HIf* ifs = block->GetLastInstruction()->AsIf(); |
| 157 | // Find either: |
| 158 | // if obj == null |
| 159 | // throws |
| 160 | // else |
| 161 | // not_throws |
| 162 | // or: |
| 163 | // if obj != null |
| 164 | // not_throws |
| 165 | // else |
| 166 | // throws |
| 167 | HInstruction* cond = ifs->InputAt(0); |
| 168 | HBasicBlock* not_throws = nullptr; |
| 169 | if (throws == ifs->IfTrueSuccessor() && cond->IsEqual()) { |
| 170 | not_throws = ifs->IfFalseSuccessor(); |
| 171 | } else if (throws == ifs->IfFalseSuccessor() && cond->IsNotEqual()) { |
| 172 | not_throws = ifs->IfTrueSuccessor(); |
| 173 | } else { |
| 174 | return false; |
| 175 | } |
| 176 | DCHECK(cond->IsEqual() || cond->IsNotEqual()); |
| 177 | HInstruction* obj = cond->InputAt(1); |
| 178 | if (obj->IsNullConstant()) { |
| 179 | obj = cond->InputAt(0); |
| 180 | } else if (!cond->InputAt(0)->IsNullConstant()) { |
| 181 | return false; |
| 182 | } |
| 183 | // Scan all uses of obj and find null check under control dependence. |
| 184 | HBoundType* bound = nullptr; |
| 185 | const HUseList<HInstruction*>& uses = obj->GetUses(); |
| 186 | for (auto it = uses.begin(), end = uses.end(); it != end;) { |
| 187 | HInstruction* user = it->GetUser(); |
| 188 | ++it; // increment before possibly replacing |
| 189 | if (user->IsNullCheck()) { |
| 190 | HBasicBlock* user_block = user->GetBlock(); |
| 191 | if (user_block != block && |
| 192 | user_block != throws && |
| 193 | block->Dominates(user_block)) { |
| 194 | if (bound == nullptr) { |
| 195 | ReferenceTypeInfo ti = obj->GetReferenceTypeInfo(); |
| 196 | bound = new (obj->GetBlock()->GetGraph()->GetAllocator()) HBoundType(obj); |
| 197 | bound->SetUpperBound(ti, /*can_be_null*/ false); |
| 198 | bound->SetReferenceTypeInfo(ti); |
| 199 | bound->SetCanBeNull(false); |
| 200 | not_throws->InsertInstructionBefore(bound, not_throws->GetFirstInstruction()); |
| 201 | } |
| 202 | user->ReplaceWith(bound); |
| 203 | user_block->RemoveInstruction(user); |
| 204 | } |
| 205 | } |
| 206 | } |
| 207 | return bound != nullptr; |
| 208 | } |
| 209 | |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 210 | // Simplify the pattern: |
| 211 | // |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 212 | // B1 |
| 213 | // / \ |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 214 | // | instr_1 |
| 215 | // | ... |
| 216 | // | instr_n |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 217 | // | foo() // always throws |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 218 | // | instr_n+2 |
| 219 | // | ... |
| 220 | // | instr_n+m |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 221 | // \ goto B2 |
| 222 | // \ / |
| 223 | // B2 |
| 224 | // |
| 225 | // Into: |
| 226 | // |
| 227 | // B1 |
| 228 | // / \ |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 229 | // | instr_1 |
| 230 | // | ... |
| 231 | // | instr_n |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 232 | // | foo() |
| 233 | // | goto Exit |
| 234 | // | | |
| 235 | // B2 Exit |
| 236 | // |
| 237 | // Rationale: |
| 238 | // Removal of the never taken edge to B2 may expose |
| 239 | // other optimization opportunities, such as code sinking. |
| 240 | bool HDeadCodeElimination::SimplifyAlwaysThrows() { |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 241 | HBasicBlock* exit = graph_->GetExitBlock(); |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 242 | if (!graph_->HasAlwaysThrowingInvokes() || exit == nullptr) { |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 243 | return false; |
| 244 | } |
| 245 | |
| 246 | bool rerun_dominance_and_loop_analysis = false; |
| 247 | |
| 248 | // Order does not matter, just pick one. |
| 249 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 250 | if (block->GetTryCatchInformation() != nullptr) { |
| 251 | // We don't want to perform the simplify always throws optimizations for throws inside of |
| 252 | // tries since those throws might not go to the exit block. We do that by checking the |
| 253 | // TryCatchInformation of the blocks. |
| 254 | // |
| 255 | // As a special case the `catch_block` is the first block of the catch and it has |
| 256 | // TryCatchInformation. Other blocks in the catch don't have try catch information (as long as |
| 257 | // they are not part of an outer try). Knowing if a `catch_block` is part of an outer try is |
| 258 | // possible by checking its successors, but other restrictions of the simplify always throws |
| 259 | // optimization will block `catch_block` nevertheless (e.g. only one predecessor) so it is not |
| 260 | // worth the effort. |
| 261 | |
| 262 | // TODO(solanes): Maybe we can do a `goto catch` if inside of a try catch instead of going to |
| 263 | // the exit. If we do so, we have to take into account that we should go to the nearest valid |
| 264 | // catch i.e. one that would accept our exception type. |
| 265 | continue; |
| 266 | } |
| 267 | |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 268 | if (block->GetLastInstruction()->IsGoto() && |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 269 | block->GetPhis().IsEmpty() && |
| 270 | block->GetPredecessors().size() == 1u) { |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 271 | HBasicBlock* pred = block->GetSinglePredecessor(); |
| 272 | HBasicBlock* succ = block->GetSingleSuccessor(); |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 273 | // Ensure no computations are merged through throwing block. This does not prevent the |
| 274 | // optimization per se, but would require an elaborate clean up of the SSA graph. |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 275 | if (succ != exit && |
| 276 | !block->Dominates(pred) && |
| 277 | pred->Dominates(succ) && |
| 278 | succ->GetPredecessors().size() > 1u && |
| 279 | succ->GetPhis().IsEmpty()) { |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 280 | // We iterate to find the first instruction that always throws. If two instructions always |
| 281 | // throw, the first one will throw and the second one will never be reached. |
| 282 | HInstruction* throwing_instruction = nullptr; |
| 283 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 284 | if (it.Current()->AlwaysThrows()) { |
| 285 | throwing_instruction = it.Current(); |
| 286 | break; |
| 287 | } |
| 288 | } |
| 289 | |
| 290 | if (throwing_instruction == nullptr) { |
| 291 | // No always-throwing instruction found. Continue with the rest of the blocks. |
| 292 | continue; |
| 293 | } |
| 294 | |
| 295 | // We split the block at the throwing instruction, and the instructions after the throwing |
| 296 | // instructions will be disconnected from the graph after `block` points to the exit. |
| 297 | // `RemoveDeadBlocks` will take care of removing this new block and its instructions. |
| 298 | // Even though `SplitBefore` doesn't guarantee the graph to remain in SSA form, it is fine |
| 299 | // since we do not break it. |
| 300 | HBasicBlock* new_block = block->SplitBefore(throwing_instruction->GetNext(), |
| 301 | /* require_graph_not_in_ssa_form= */ false); |
| 302 | DCHECK_EQ(block->GetSingleSuccessor(), new_block); |
| 303 | block->ReplaceSuccessor(new_block, exit); |
| 304 | |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 305 | rerun_dominance_and_loop_analysis = true; |
| 306 | MaybeRecordStat(stats_, MethodCompilationStat::kSimplifyThrowingInvoke); |
Aart Bik | 4c563ca | 2018-01-24 16:34:25 -0800 | [diff] [blame] | 307 | // Perform a quick follow up optimization on object != null control dependences |
| 308 | // that is much cheaper to perform now than in a later phase. |
| 309 | if (RemoveNonNullControlDependences(pred, block)) { |
| 310 | MaybeRecordStat(stats_, MethodCompilationStat::kRemovedNullCheck); |
| 311 | } |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 312 | } |
| 313 | } |
| 314 | } |
| 315 | |
| 316 | // We need to re-analyze the graph in order to run DCE afterwards. |
| 317 | if (rerun_dominance_and_loop_analysis) { |
| 318 | graph_->ClearLoopInformation(); |
| 319 | graph_->ClearDominanceInformation(); |
| 320 | graph_->BuildDominatorTree(); |
| 321 | return true; |
| 322 | } |
| 323 | return false; |
| 324 | } |
| 325 | |
| 326 | // Simplify the pattern: |
| 327 | // |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 328 | // B1 B2 ... |
| 329 | // goto goto goto |
| 330 | // \ | / |
| 331 | // \ | / |
| 332 | // B3 |
| 333 | // i1 = phi(input, input) |
| 334 | // (i2 = condition on i1) |
| 335 | // if i1 (or i2) |
| 336 | // / \ |
| 337 | // / \ |
| 338 | // B4 B5 |
| 339 | // |
| 340 | // Into: |
| 341 | // |
| 342 | // B1 B2 ... |
| 343 | // | | | |
| 344 | // B4 B5 B? |
| 345 | // |
Vladimir Marko | 606c8f0 | 2016-11-03 13:01:28 +0000 | [diff] [blame] | 346 | // Note that individual edges can be redirected (for example B2->B3 |
| 347 | // can be redirected as B2->B5) without applying this optimization |
| 348 | // to other incoming edges. |
| 349 | // |
| 350 | // This simplification cannot be applied to catch blocks, because |
| 351 | // exception handler edges do not represent normal control flow. |
| 352 | // Though in theory this could still apply to normal control flow |
| 353 | // going directly to a catch block, we cannot support it at the |
| 354 | // moment because the catch Phi's inputs do not correspond to the |
| 355 | // catch block's predecessors, so we cannot identify which |
| 356 | // predecessor corresponds to a given statically evaluated input. |
| 357 | // |
| 358 | // We do not apply this optimization to loop headers as this could |
| 359 | // create irreducible loops. We rely on the suspend check in the |
| 360 | // loop header to prevent the pattern match. |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 361 | // |
| 362 | // Note that we rely on the dead code elimination to get rid of B3. |
| 363 | bool HDeadCodeElimination::SimplifyIfs() { |
| 364 | bool simplified_one_or_more_ifs = false; |
| 365 | bool rerun_dominance_and_loop_analysis = false; |
| 366 | |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 367 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 368 | HInstruction* last = block->GetLastInstruction(); |
| 369 | HInstruction* first = block->GetFirstInstruction(); |
Vladimir Marko | 606c8f0 | 2016-11-03 13:01:28 +0000 | [diff] [blame] | 370 | if (!block->IsCatchBlock() && |
| 371 | last->IsIf() && |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 372 | block->HasSinglePhi() && |
| 373 | block->GetFirstPhi()->HasOnlyOneNonEnvironmentUse()) { |
| 374 | bool has_only_phi_and_if = (last == first) && (last->InputAt(0) == block->GetFirstPhi()); |
| 375 | bool has_only_phi_condition_and_if = |
| 376 | !has_only_phi_and_if && |
| 377 | first->IsCondition() && |
| 378 | HasInput(first->AsCondition(), block->GetFirstPhi()) && |
| 379 | (first->GetNext() == last) && |
| 380 | (last->InputAt(0) == first) && |
| 381 | first->HasOnlyOneNonEnvironmentUse(); |
| 382 | |
| 383 | if (has_only_phi_and_if || has_only_phi_condition_and_if) { |
| 384 | DCHECK(!block->IsLoopHeader()); |
| 385 | HPhi* phi = block->GetFirstPhi()->AsPhi(); |
| 386 | bool phi_input_is_left = (first->InputAt(0) == phi); |
| 387 | |
| 388 | // Walk over all inputs of the phis and update the control flow of |
| 389 | // predecessors feeding constants to the phi. |
| 390 | // Note that phi->InputCount() may change inside the loop. |
| 391 | for (size_t i = 0; i < phi->InputCount();) { |
| 392 | HInstruction* input = phi->InputAt(i); |
| 393 | HInstruction* value_to_check = nullptr; |
| 394 | if (has_only_phi_and_if) { |
| 395 | if (input->IsIntConstant()) { |
| 396 | value_to_check = input; |
| 397 | } |
| 398 | } else { |
| 399 | DCHECK(has_only_phi_condition_and_if); |
| 400 | if (phi_input_is_left) { |
| 401 | value_to_check = Evaluate(first->AsCondition(), input, first->InputAt(1)); |
| 402 | } else { |
| 403 | value_to_check = Evaluate(first->AsCondition(), first->InputAt(0), input); |
| 404 | } |
| 405 | } |
| 406 | if (value_to_check == nullptr) { |
| 407 | // Could not evaluate to a constant, continue iterating over the inputs. |
| 408 | ++i; |
| 409 | } else { |
| 410 | HBasicBlock* predecessor_to_update = block->GetPredecessors()[i]; |
| 411 | HBasicBlock* successor_to_update = nullptr; |
| 412 | if (value_to_check->AsIntConstant()->IsTrue()) { |
| 413 | successor_to_update = last->AsIf()->IfTrueSuccessor(); |
| 414 | } else { |
| 415 | DCHECK(value_to_check->AsIntConstant()->IsFalse()) |
| 416 | << value_to_check->AsIntConstant()->GetValue(); |
| 417 | successor_to_update = last->AsIf()->IfFalseSuccessor(); |
| 418 | } |
| 419 | predecessor_to_update->ReplaceSuccessor(block, successor_to_update); |
| 420 | phi->RemoveInputAt(i); |
| 421 | simplified_one_or_more_ifs = true; |
| 422 | if (block->IsInLoop()) { |
| 423 | rerun_dominance_and_loop_analysis = true; |
| 424 | } |
| 425 | // For simplicity, don't create a dead block, let the dead code elimination |
| 426 | // pass deal with it. |
| 427 | if (phi->InputCount() == 1) { |
| 428 | break; |
| 429 | } |
| 430 | } |
| 431 | } |
| 432 | if (block->GetPredecessors().size() == 1) { |
| 433 | phi->ReplaceWith(phi->InputAt(0)); |
| 434 | block->RemovePhi(phi); |
| 435 | if (has_only_phi_condition_and_if) { |
| 436 | // Evaluate here (and not wait for a constant folding pass) to open |
| 437 | // more opportunities for DCE. |
| 438 | HInstruction* result = first->AsCondition()->TryStaticEvaluation(); |
| 439 | if (result != nullptr) { |
| 440 | first->ReplaceWith(result); |
| 441 | block->RemoveInstruction(first); |
| 442 | } |
| 443 | } |
| 444 | } |
| 445 | if (simplified_one_or_more_ifs) { |
| 446 | MaybeRecordSimplifyIf(); |
| 447 | } |
| 448 | } |
| 449 | } |
| 450 | } |
| 451 | // We need to re-analyze the graph in order to run DCE afterwards. |
| 452 | if (simplified_one_or_more_ifs) { |
| 453 | if (rerun_dominance_and_loop_analysis) { |
| 454 | graph_->ClearLoopInformation(); |
| 455 | graph_->ClearDominanceInformation(); |
| 456 | graph_->BuildDominatorTree(); |
| 457 | } else { |
| 458 | graph_->ClearDominanceInformation(); |
| 459 | // We have introduced critical edges, remove them. |
| 460 | graph_->SimplifyCFG(); |
| 461 | graph_->ComputeDominanceInformation(); |
| 462 | graph_->ComputeTryBlockInformation(); |
| 463 | } |
| 464 | } |
| 465 | |
| 466 | return simplified_one_or_more_ifs; |
| 467 | } |
| 468 | |
| 469 | void HDeadCodeElimination::ConnectSuccessiveBlocks() { |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 470 | // Order does not matter. Skip the entry block by starting at index 1 in reverse post order. |
| 471 | for (size_t i = 1u, size = graph_->GetReversePostOrder().size(); i != size; ++i) { |
| 472 | HBasicBlock* block = graph_->GetReversePostOrder()[i]; |
| 473 | DCHECK(!block->IsEntryBlock()); |
| 474 | while (block->GetLastInstruction()->IsGoto()) { |
| 475 | HBasicBlock* successor = block->GetSingleSuccessor(); |
| 476 | if (successor->IsExitBlock() || successor->GetPredecessors().size() != 1u) { |
| 477 | break; |
| 478 | } |
| 479 | DCHECK_LT(i, IndexOfElement(graph_->GetReversePostOrder(), successor)); |
| 480 | block->MergeWith(successor); |
| 481 | --size; |
| 482 | DCHECK_EQ(size, graph_->GetReversePostOrder().size()); |
| 483 | DCHECK_EQ(block, graph_->GetReversePostOrder()[i]); |
| 484 | // Reiterate on this block in case it can be merged with its new successor. |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 485 | } |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 486 | } |
| 487 | } |
| 488 | |
| 489 | bool HDeadCodeElimination::RemoveDeadBlocks() { |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 490 | // Use local allocator for allocating memory. |
| 491 | ScopedArenaAllocator allocator(graph_->GetArenaStack()); |
| 492 | |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 493 | // Classify blocks as reachable/unreachable. |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 494 | ArenaBitVector live_blocks(&allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE); |
| 495 | live_blocks.ClearAllBits(); |
David Brazdil | a4b8c21 | 2015-05-07 09:59:30 +0100 | [diff] [blame] | 496 | |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 497 | MarkReachableBlocks(graph_, &live_blocks); |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 498 | bool removed_one_or_more_blocks = false; |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 499 | bool rerun_dominance_and_loop_analysis = false; |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 500 | |
David Brazdil | a4b8c21 | 2015-05-07 09:59:30 +0100 | [diff] [blame] | 501 | // Remove all dead blocks. Iterate in post order because removal needs the |
| 502 | // block's chain of dominators and nested loops need to be updated from the |
| 503 | // inside out. |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 504 | for (HBasicBlock* block : graph_->GetPostOrder()) { |
David Brazdil | a4b8c21 | 2015-05-07 09:59:30 +0100 | [diff] [blame] | 505 | int id = block->GetBlockId(); |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 506 | if (!live_blocks.IsBitSet(id)) { |
David Brazdil | 69a2804 | 2015-04-29 17:16:07 +0100 | [diff] [blame] | 507 | MaybeRecordDeadBlock(block); |
| 508 | block->DisconnectAndDelete(); |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 509 | removed_one_or_more_blocks = true; |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 510 | if (block->IsInLoop()) { |
| 511 | rerun_dominance_and_loop_analysis = true; |
| 512 | } |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 513 | } |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 514 | } |
| 515 | |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 516 | // If we removed at least one block, we need to recompute the full |
David Brazdil | 8a7c0fe | 2015-11-02 20:24:55 +0000 | [diff] [blame] | 517 | // dominator tree and try block membership. |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 518 | if (removed_one_or_more_blocks) { |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 519 | if (rerun_dominance_and_loop_analysis) { |
| 520 | graph_->ClearLoopInformation(); |
| 521 | graph_->ClearDominanceInformation(); |
| 522 | graph_->BuildDominatorTree(); |
| 523 | } else { |
| 524 | graph_->ClearDominanceInformation(); |
| 525 | graph_->ComputeDominanceInformation(); |
| 526 | graph_->ComputeTryBlockInformation(); |
| 527 | } |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 528 | } |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 529 | return removed_one_or_more_blocks; |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 530 | } |
| 531 | |
| 532 | void HDeadCodeElimination::RemoveDeadInstructions() { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 533 | // Process basic blocks in post-order in the dominator tree, so that |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 534 | // a dead instruction depending on another dead instruction is removed. |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 535 | for (HBasicBlock* block : graph_->GetPostOrder()) { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 536 | // Traverse this block's instructions in backward order and remove |
| 537 | // the unused ones. |
| 538 | HBackwardInstructionIterator i(block->GetInstructions()); |
| 539 | // Skip the first iteration, as the last instruction of a block is |
| 540 | // a branching instruction. |
| 541 | DCHECK(i.Current()->IsControlFlow()); |
| 542 | for (i.Advance(); !i.Done(); i.Advance()) { |
| 543 | HInstruction* inst = i.Current(); |
| 544 | DCHECK(!inst->IsControlFlow()); |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 545 | if (inst->IsDeadAndRemovable()) { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 546 | block->RemoveInstruction(inst); |
Igor Murashkin | 1e065a5 | 2017-08-09 13:20:34 -0700 | [diff] [blame] | 547 | MaybeRecordStat(stats_, MethodCompilationStat::kRemovedDeadInstruction); |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 548 | } |
| 549 | } |
| 550 | } |
| 551 | } |
| 552 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 553 | bool HDeadCodeElimination::Run() { |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 554 | // Do not eliminate dead blocks if the graph has irreducible loops. We could |
| 555 | // support it, but that would require changes in our loop representation to handle |
| 556 | // multiple entry points. We decided it was not worth the complexity. |
| 557 | if (!graph_->HasIrreducibleLoops()) { |
| 558 | // Simplify graph to generate more dead block patterns. |
| 559 | ConnectSuccessiveBlocks(); |
| 560 | bool did_any_simplification = false; |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 561 | did_any_simplification |= SimplifyAlwaysThrows(); |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 562 | did_any_simplification |= SimplifyIfs(); |
| 563 | did_any_simplification |= RemoveDeadBlocks(); |
| 564 | if (did_any_simplification) { |
| 565 | // Connect successive blocks created by dead branches. |
| 566 | ConnectSuccessiveBlocks(); |
| 567 | } |
| 568 | } |
David Brazdil | 84daae5 | 2015-05-18 12:06:52 +0100 | [diff] [blame] | 569 | SsaRedundantPhiElimination(graph_).Run(); |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 570 | RemoveDeadInstructions(); |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 571 | return true; |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 572 | } |
| 573 | |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 574 | } // namespace art |