Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "dead_code_elimination.h" |
| 18 | |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 19 | #include "android-base/logging.h" |
David Brazdil | d9c9037 | 2016-09-14 16:53:55 +0100 | [diff] [blame] | 20 | #include "base/array_ref.h" |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 21 | #include "base/bit_vector-inl.h" |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 22 | #include "base/logging.h" |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 23 | #include "base/scoped_arena_allocator.h" |
| 24 | #include "base/scoped_arena_containers.h" |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 25 | #include "base/stl_util.h" |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 26 | #include "optimizing/nodes.h" |
David Brazdil | 84daae5 | 2015-05-18 12:06:52 +0100 | [diff] [blame] | 27 | #include "ssa_phi_elimination.h" |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 28 | |
VladimĂr Marko | 434d968 | 2022-11-04 14:04:17 +0000 | [diff] [blame] | 29 | namespace art HIDDEN { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 30 | |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 31 | static void MarkReachableBlocks(HGraph* graph, ArenaBitVector* visited) { |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 32 | // Use local allocator for allocating memory. |
| 33 | ScopedArenaAllocator allocator(graph->GetArenaStack()); |
| 34 | |
| 35 | ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocDCE)); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 36 | constexpr size_t kDefaultWorlistSize = 8; |
| 37 | worklist.reserve(kDefaultWorlistSize); |
| 38 | visited->SetBit(graph->GetEntryBlock()->GetBlockId()); |
| 39 | worklist.push_back(graph->GetEntryBlock()); |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 40 | |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 41 | while (!worklist.empty()) { |
| 42 | HBasicBlock* block = worklist.back(); |
| 43 | worklist.pop_back(); |
| 44 | int block_id = block->GetBlockId(); |
| 45 | DCHECK(visited->IsBitSet(block_id)); |
| 46 | |
| 47 | ArrayRef<HBasicBlock* const> live_successors(block->GetSuccessors()); |
| 48 | HInstruction* last_instruction = block->GetLastInstruction(); |
| 49 | if (last_instruction->IsIf()) { |
| 50 | HIf* if_instruction = last_instruction->AsIf(); |
| 51 | HInstruction* condition = if_instruction->InputAt(0); |
| 52 | if (condition->IsIntConstant()) { |
Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 53 | if (condition->AsIntConstant()->IsTrue()) { |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 54 | live_successors = live_successors.SubArray(0u, 1u); |
| 55 | DCHECK_EQ(live_successors[0], if_instruction->IfTrueSuccessor()); |
| 56 | } else { |
Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 57 | DCHECK(condition->AsIntConstant()->IsFalse()) << condition->AsIntConstant()->GetValue(); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 58 | live_successors = live_successors.SubArray(1u, 1u); |
| 59 | DCHECK_EQ(live_successors[0], if_instruction->IfFalseSuccessor()); |
| 60 | } |
Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 61 | } |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 62 | } else if (last_instruction->IsPackedSwitch()) { |
| 63 | HPackedSwitch* switch_instruction = last_instruction->AsPackedSwitch(); |
| 64 | HInstruction* switch_input = switch_instruction->InputAt(0); |
| 65 | if (switch_input->IsIntConstant()) { |
| 66 | int32_t switch_value = switch_input->AsIntConstant()->GetValue(); |
| 67 | int32_t start_value = switch_instruction->GetStartValue(); |
Vladimir Marko | 430c4f5 | 2015-09-25 17:10:15 +0100 | [diff] [blame] | 68 | // Note: Though the spec forbids packed-switch values to wrap around, we leave |
| 69 | // that task to the verifier and use unsigned arithmetic with it's "modulo 2^32" |
| 70 | // semantics to check if the value is in range, wrapped or not. |
| 71 | uint32_t switch_index = |
| 72 | static_cast<uint32_t>(switch_value) - static_cast<uint32_t>(start_value); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 73 | if (switch_index < switch_instruction->GetNumEntries()) { |
| 74 | live_successors = live_successors.SubArray(switch_index, 1u); |
Vladimir Marko | ec7802a | 2015-10-01 20:57:57 +0100 | [diff] [blame] | 75 | DCHECK_EQ(live_successors[0], block->GetSuccessors()[switch_index]); |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 76 | } else { |
| 77 | live_successors = live_successors.SubArray(switch_instruction->GetNumEntries(), 1u); |
| 78 | DCHECK_EQ(live_successors[0], switch_instruction->GetDefaultBlock()); |
| 79 | } |
Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 80 | } |
| 81 | } |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 82 | |
| 83 | for (HBasicBlock* successor : live_successors) { |
| 84 | // Add only those successors that have not been visited yet. |
| 85 | if (!visited->IsBitSet(successor->GetBlockId())) { |
| 86 | visited->SetBit(successor->GetBlockId()); |
| 87 | worklist.push_back(successor); |
| 88 | } |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 89 | } |
| 90 | } |
| 91 | } |
| 92 | |
| 93 | void HDeadCodeElimination::MaybeRecordDeadBlock(HBasicBlock* block) { |
| 94 | if (stats_ != nullptr) { |
| 95 | stats_->RecordStat(MethodCompilationStat::kRemovedDeadInstruction, |
| 96 | block->GetPhis().CountSize() + block->GetInstructions().CountSize()); |
| 97 | } |
| 98 | } |
| 99 | |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 100 | void HDeadCodeElimination::MaybeRecordSimplifyIf() { |
| 101 | if (stats_ != nullptr) { |
| 102 | stats_->RecordStat(MethodCompilationStat::kSimplifyIf); |
Nicolas Geoffray | 09aa147 | 2016-01-19 10:52:54 +0000 | [diff] [blame] | 103 | } |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 104 | } |
| 105 | |
| 106 | static bool HasInput(HCondition* instruction, HInstruction* input) { |
| 107 | return (instruction->InputAt(0) == input) || |
| 108 | (instruction->InputAt(1) == input); |
| 109 | } |
| 110 | |
| 111 | static bool HasEquality(IfCondition condition) { |
| 112 | switch (condition) { |
| 113 | case kCondEQ: |
| 114 | case kCondLE: |
| 115 | case kCondGE: |
| 116 | case kCondBE: |
| 117 | case kCondAE: |
| 118 | return true; |
| 119 | case kCondNE: |
| 120 | case kCondLT: |
| 121 | case kCondGT: |
| 122 | case kCondB: |
| 123 | case kCondA: |
| 124 | return false; |
| 125 | } |
| 126 | } |
| 127 | |
| 128 | static HConstant* Evaluate(HCondition* condition, HInstruction* left, HInstruction* right) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 129 | if (left == right && !DataType::IsFloatingPointType(left->GetType())) { |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 130 | return condition->GetBlock()->GetGraph()->GetIntConstant( |
| 131 | HasEquality(condition->GetCondition()) ? 1 : 0); |
| 132 | } |
| 133 | |
| 134 | if (!left->IsConstant() || !right->IsConstant()) { |
| 135 | return nullptr; |
| 136 | } |
| 137 | |
| 138 | if (left->IsIntConstant()) { |
| 139 | return condition->Evaluate(left->AsIntConstant(), right->AsIntConstant()); |
| 140 | } else if (left->IsNullConstant()) { |
| 141 | return condition->Evaluate(left->AsNullConstant(), right->AsNullConstant()); |
| 142 | } else if (left->IsLongConstant()) { |
| 143 | return condition->Evaluate(left->AsLongConstant(), right->AsLongConstant()); |
| 144 | } else if (left->IsFloatConstant()) { |
| 145 | return condition->Evaluate(left->AsFloatConstant(), right->AsFloatConstant()); |
| 146 | } else { |
| 147 | DCHECK(left->IsDoubleConstant()); |
| 148 | return condition->Evaluate(left->AsDoubleConstant(), right->AsDoubleConstant()); |
| 149 | } |
| 150 | } |
| 151 | |
Aart Bik | 4c563ca | 2018-01-24 16:34:25 -0800 | [diff] [blame] | 152 | static bool RemoveNonNullControlDependences(HBasicBlock* block, HBasicBlock* throws) { |
| 153 | // Test for an if as last statement. |
| 154 | if (!block->EndsWithIf()) { |
| 155 | return false; |
| 156 | } |
| 157 | HIf* ifs = block->GetLastInstruction()->AsIf(); |
| 158 | // Find either: |
| 159 | // if obj == null |
| 160 | // throws |
| 161 | // else |
| 162 | // not_throws |
| 163 | // or: |
| 164 | // if obj != null |
| 165 | // not_throws |
| 166 | // else |
| 167 | // throws |
| 168 | HInstruction* cond = ifs->InputAt(0); |
| 169 | HBasicBlock* not_throws = nullptr; |
| 170 | if (throws == ifs->IfTrueSuccessor() && cond->IsEqual()) { |
| 171 | not_throws = ifs->IfFalseSuccessor(); |
| 172 | } else if (throws == ifs->IfFalseSuccessor() && cond->IsNotEqual()) { |
| 173 | not_throws = ifs->IfTrueSuccessor(); |
| 174 | } else { |
| 175 | return false; |
| 176 | } |
| 177 | DCHECK(cond->IsEqual() || cond->IsNotEqual()); |
| 178 | HInstruction* obj = cond->InputAt(1); |
| 179 | if (obj->IsNullConstant()) { |
| 180 | obj = cond->InputAt(0); |
| 181 | } else if (!cond->InputAt(0)->IsNullConstant()) { |
| 182 | return false; |
| 183 | } |
| 184 | // Scan all uses of obj and find null check under control dependence. |
| 185 | HBoundType* bound = nullptr; |
| 186 | const HUseList<HInstruction*>& uses = obj->GetUses(); |
| 187 | for (auto it = uses.begin(), end = uses.end(); it != end;) { |
| 188 | HInstruction* user = it->GetUser(); |
| 189 | ++it; // increment before possibly replacing |
| 190 | if (user->IsNullCheck()) { |
| 191 | HBasicBlock* user_block = user->GetBlock(); |
| 192 | if (user_block != block && |
| 193 | user_block != throws && |
| 194 | block->Dominates(user_block)) { |
| 195 | if (bound == nullptr) { |
| 196 | ReferenceTypeInfo ti = obj->GetReferenceTypeInfo(); |
| 197 | bound = new (obj->GetBlock()->GetGraph()->GetAllocator()) HBoundType(obj); |
| 198 | bound->SetUpperBound(ti, /*can_be_null*/ false); |
| 199 | bound->SetReferenceTypeInfo(ti); |
| 200 | bound->SetCanBeNull(false); |
| 201 | not_throws->InsertInstructionBefore(bound, not_throws->GetFirstInstruction()); |
| 202 | } |
| 203 | user->ReplaceWith(bound); |
| 204 | user_block->RemoveInstruction(user); |
| 205 | } |
| 206 | } |
| 207 | } |
| 208 | return bound != nullptr; |
| 209 | } |
| 210 | |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 211 | // Simplify the pattern: |
| 212 | // |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 213 | // B1 |
| 214 | // / \ |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 215 | // | instr_1 |
| 216 | // | ... |
| 217 | // | instr_n |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 218 | // | foo() // always throws |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 219 | // | instr_n+2 |
| 220 | // | ... |
| 221 | // | instr_n+m |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 222 | // \ goto B2 |
| 223 | // \ / |
| 224 | // B2 |
| 225 | // |
| 226 | // Into: |
| 227 | // |
| 228 | // B1 |
| 229 | // / \ |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 230 | // | instr_1 |
| 231 | // | ... |
| 232 | // | instr_n |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 233 | // | foo() |
| 234 | // | goto Exit |
| 235 | // | | |
| 236 | // B2 Exit |
| 237 | // |
| 238 | // Rationale: |
Santiago Aboy Solanes | a3bd09c | 2022-07-29 14:09:28 +0100 | [diff] [blame] | 239 | // Removal of the never taken edge to B2 may expose other optimization opportunities, such as code |
| 240 | // sinking. |
| 241 | // |
| 242 | // Note: The example above is a simple one that uses a `goto` but we could end the block with an If, |
| 243 | // for example. |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 244 | bool HDeadCodeElimination::SimplifyAlwaysThrows() { |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 245 | HBasicBlock* exit = graph_->GetExitBlock(); |
Santiago Aboy Solanes | 78f3d3a | 2022-07-15 14:30:05 +0100 | [diff] [blame] | 246 | if (!graph_->HasAlwaysThrowingInvokes() || exit == nullptr) { |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 247 | return false; |
| 248 | } |
| 249 | |
| 250 | bool rerun_dominance_and_loop_analysis = false; |
| 251 | |
| 252 | // Order does not matter, just pick one. |
| 253 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
Santiago Aboy Solanes | a3bd09c | 2022-07-29 14:09:28 +0100 | [diff] [blame] | 254 | if (block->IsTryBlock()) { |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 255 | // We don't want to perform the simplify always throws optimizations for throws inside of |
Santiago Aboy Solanes | a3bd09c | 2022-07-29 14:09:28 +0100 | [diff] [blame] | 256 | // tries since those throws might not go to the exit block. |
Santiago Aboy Solanes | cef72a6 | 2022-04-06 14:13:18 +0000 | [diff] [blame] | 257 | continue; |
| 258 | } |
| 259 | |
Santiago Aboy Solanes | a3bd09c | 2022-07-29 14:09:28 +0100 | [diff] [blame] | 260 | // We iterate to find the first instruction that always throws. If two instructions always |
| 261 | // throw, the first one will throw and the second one will never be reached. |
| 262 | HInstruction* throwing_invoke = nullptr; |
| 263 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 264 | if (it.Current()->IsInvoke() && it.Current()->AsInvoke()->AlwaysThrows()) { |
| 265 | throwing_invoke = it.Current(); |
| 266 | break; |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 267 | } |
| 268 | } |
Santiago Aboy Solanes | a3bd09c | 2022-07-29 14:09:28 +0100 | [diff] [blame] | 269 | |
| 270 | if (throwing_invoke == nullptr) { |
| 271 | // No always-throwing instruction found. Continue with the rest of the blocks. |
| 272 | continue; |
| 273 | } |
| 274 | |
| 275 | // If we are already pointing at the exit block we could still remove the instructions |
| 276 | // between the always throwing instruction, and the exit block. If we have no other |
| 277 | // instructions, just continue since there's nothing to do. |
| 278 | if (block->GetSuccessors().size() == 1 && |
| 279 | block->GetSingleSuccessor() == exit && |
| 280 | block->GetLastInstruction()->GetPrevious() == throwing_invoke) { |
| 281 | continue; |
| 282 | } |
| 283 | |
| 284 | // We split the block at the throwing instruction, and the instructions after the throwing |
| 285 | // instructions will be disconnected from the graph after `block` points to the exit. |
| 286 | // `RemoveDeadBlocks` will take care of removing this new block and its instructions. |
| 287 | // Even though `SplitBefore` doesn't guarantee the graph to remain in SSA form, it is fine |
| 288 | // since we do not break it. |
| 289 | HBasicBlock* new_block = block->SplitBefore(throwing_invoke->GetNext(), |
| 290 | /* require_graph_not_in_ssa_form= */ false); |
| 291 | DCHECK_EQ(block->GetSingleSuccessor(), new_block); |
| 292 | block->ReplaceSuccessor(new_block, exit); |
| 293 | |
| 294 | rerun_dominance_and_loop_analysis = true; |
| 295 | MaybeRecordStat(stats_, MethodCompilationStat::kSimplifyThrowingInvoke); |
| 296 | // Perform a quick follow up optimization on object != null control dependences |
| 297 | // that is much cheaper to perform now than in a later phase. |
| 298 | // If there are multiple predecessors, none may end with a HIf as required in |
| 299 | // RemoveNonNullControlDependences because we split critical edges. |
| 300 | if (block->GetPredecessors().size() == 1u && |
| 301 | RemoveNonNullControlDependences(block->GetSinglePredecessor(), block)) { |
| 302 | MaybeRecordStat(stats_, MethodCompilationStat::kRemovedNullCheck); |
| 303 | } |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 304 | } |
| 305 | |
| 306 | // We need to re-analyze the graph in order to run DCE afterwards. |
| 307 | if (rerun_dominance_and_loop_analysis) { |
| 308 | graph_->ClearLoopInformation(); |
| 309 | graph_->ClearDominanceInformation(); |
| 310 | graph_->BuildDominatorTree(); |
| 311 | return true; |
| 312 | } |
| 313 | return false; |
| 314 | } |
| 315 | |
| 316 | // Simplify the pattern: |
| 317 | // |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 318 | // B1 B2 ... |
| 319 | // goto goto goto |
| 320 | // \ | / |
| 321 | // \ | / |
| 322 | // B3 |
| 323 | // i1 = phi(input, input) |
| 324 | // (i2 = condition on i1) |
| 325 | // if i1 (or i2) |
| 326 | // / \ |
| 327 | // / \ |
| 328 | // B4 B5 |
| 329 | // |
| 330 | // Into: |
| 331 | // |
| 332 | // B1 B2 ... |
| 333 | // | | | |
| 334 | // B4 B5 B? |
| 335 | // |
Vladimir Marko | 606c8f0 | 2016-11-03 13:01:28 +0000 | [diff] [blame] | 336 | // Note that individual edges can be redirected (for example B2->B3 |
| 337 | // can be redirected as B2->B5) without applying this optimization |
| 338 | // to other incoming edges. |
| 339 | // |
| 340 | // This simplification cannot be applied to catch blocks, because |
| 341 | // exception handler edges do not represent normal control flow. |
| 342 | // Though in theory this could still apply to normal control flow |
| 343 | // going directly to a catch block, we cannot support it at the |
| 344 | // moment because the catch Phi's inputs do not correspond to the |
| 345 | // catch block's predecessors, so we cannot identify which |
| 346 | // predecessor corresponds to a given statically evaluated input. |
| 347 | // |
| 348 | // We do not apply this optimization to loop headers as this could |
| 349 | // create irreducible loops. We rely on the suspend check in the |
| 350 | // loop header to prevent the pattern match. |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 351 | // |
| 352 | // Note that we rely on the dead code elimination to get rid of B3. |
| 353 | bool HDeadCodeElimination::SimplifyIfs() { |
| 354 | bool simplified_one_or_more_ifs = false; |
| 355 | bool rerun_dominance_and_loop_analysis = false; |
| 356 | |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 357 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 358 | HInstruction* last = block->GetLastInstruction(); |
| 359 | HInstruction* first = block->GetFirstInstruction(); |
Vladimir Marko | 606c8f0 | 2016-11-03 13:01:28 +0000 | [diff] [blame] | 360 | if (!block->IsCatchBlock() && |
| 361 | last->IsIf() && |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 362 | block->HasSinglePhi() && |
| 363 | block->GetFirstPhi()->HasOnlyOneNonEnvironmentUse()) { |
| 364 | bool has_only_phi_and_if = (last == first) && (last->InputAt(0) == block->GetFirstPhi()); |
| 365 | bool has_only_phi_condition_and_if = |
| 366 | !has_only_phi_and_if && |
| 367 | first->IsCondition() && |
| 368 | HasInput(first->AsCondition(), block->GetFirstPhi()) && |
| 369 | (first->GetNext() == last) && |
| 370 | (last->InputAt(0) == first) && |
| 371 | first->HasOnlyOneNonEnvironmentUse(); |
| 372 | |
| 373 | if (has_only_phi_and_if || has_only_phi_condition_and_if) { |
| 374 | DCHECK(!block->IsLoopHeader()); |
| 375 | HPhi* phi = block->GetFirstPhi()->AsPhi(); |
| 376 | bool phi_input_is_left = (first->InputAt(0) == phi); |
| 377 | |
| 378 | // Walk over all inputs of the phis and update the control flow of |
| 379 | // predecessors feeding constants to the phi. |
| 380 | // Note that phi->InputCount() may change inside the loop. |
| 381 | for (size_t i = 0; i < phi->InputCount();) { |
| 382 | HInstruction* input = phi->InputAt(i); |
| 383 | HInstruction* value_to_check = nullptr; |
| 384 | if (has_only_phi_and_if) { |
| 385 | if (input->IsIntConstant()) { |
| 386 | value_to_check = input; |
| 387 | } |
| 388 | } else { |
| 389 | DCHECK(has_only_phi_condition_and_if); |
| 390 | if (phi_input_is_left) { |
| 391 | value_to_check = Evaluate(first->AsCondition(), input, first->InputAt(1)); |
| 392 | } else { |
| 393 | value_to_check = Evaluate(first->AsCondition(), first->InputAt(0), input); |
| 394 | } |
| 395 | } |
| 396 | if (value_to_check == nullptr) { |
| 397 | // Could not evaluate to a constant, continue iterating over the inputs. |
| 398 | ++i; |
| 399 | } else { |
| 400 | HBasicBlock* predecessor_to_update = block->GetPredecessors()[i]; |
| 401 | HBasicBlock* successor_to_update = nullptr; |
| 402 | if (value_to_check->AsIntConstant()->IsTrue()) { |
| 403 | successor_to_update = last->AsIf()->IfTrueSuccessor(); |
| 404 | } else { |
| 405 | DCHECK(value_to_check->AsIntConstant()->IsFalse()) |
| 406 | << value_to_check->AsIntConstant()->GetValue(); |
| 407 | successor_to_update = last->AsIf()->IfFalseSuccessor(); |
| 408 | } |
| 409 | predecessor_to_update->ReplaceSuccessor(block, successor_to_update); |
| 410 | phi->RemoveInputAt(i); |
| 411 | simplified_one_or_more_ifs = true; |
| 412 | if (block->IsInLoop()) { |
| 413 | rerun_dominance_and_loop_analysis = true; |
| 414 | } |
| 415 | // For simplicity, don't create a dead block, let the dead code elimination |
| 416 | // pass deal with it. |
| 417 | if (phi->InputCount() == 1) { |
| 418 | break; |
| 419 | } |
| 420 | } |
| 421 | } |
| 422 | if (block->GetPredecessors().size() == 1) { |
| 423 | phi->ReplaceWith(phi->InputAt(0)); |
| 424 | block->RemovePhi(phi); |
| 425 | if (has_only_phi_condition_and_if) { |
| 426 | // Evaluate here (and not wait for a constant folding pass) to open |
| 427 | // more opportunities for DCE. |
| 428 | HInstruction* result = first->AsCondition()->TryStaticEvaluation(); |
| 429 | if (result != nullptr) { |
| 430 | first->ReplaceWith(result); |
| 431 | block->RemoveInstruction(first); |
| 432 | } |
| 433 | } |
| 434 | } |
| 435 | if (simplified_one_or_more_ifs) { |
| 436 | MaybeRecordSimplifyIf(); |
| 437 | } |
| 438 | } |
| 439 | } |
| 440 | } |
| 441 | // We need to re-analyze the graph in order to run DCE afterwards. |
| 442 | if (simplified_one_or_more_ifs) { |
| 443 | if (rerun_dominance_and_loop_analysis) { |
| 444 | graph_->ClearLoopInformation(); |
| 445 | graph_->ClearDominanceInformation(); |
| 446 | graph_->BuildDominatorTree(); |
| 447 | } else { |
| 448 | graph_->ClearDominanceInformation(); |
| 449 | // We have introduced critical edges, remove them. |
| 450 | graph_->SimplifyCFG(); |
| 451 | graph_->ComputeDominanceInformation(); |
| 452 | graph_->ComputeTryBlockInformation(); |
| 453 | } |
| 454 | } |
| 455 | |
| 456 | return simplified_one_or_more_ifs; |
| 457 | } |
| 458 | |
| 459 | void HDeadCodeElimination::ConnectSuccessiveBlocks() { |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 460 | // Order does not matter. Skip the entry block by starting at index 1 in reverse post order. |
| 461 | for (size_t i = 1u, size = graph_->GetReversePostOrder().size(); i != size; ++i) { |
| 462 | HBasicBlock* block = graph_->GetReversePostOrder()[i]; |
| 463 | DCHECK(!block->IsEntryBlock()); |
| 464 | while (block->GetLastInstruction()->IsGoto()) { |
| 465 | HBasicBlock* successor = block->GetSingleSuccessor(); |
| 466 | if (successor->IsExitBlock() || successor->GetPredecessors().size() != 1u) { |
| 467 | break; |
| 468 | } |
| 469 | DCHECK_LT(i, IndexOfElement(graph_->GetReversePostOrder(), successor)); |
| 470 | block->MergeWith(successor); |
| 471 | --size; |
| 472 | DCHECK_EQ(size, graph_->GetReversePostOrder().size()); |
| 473 | DCHECK_EQ(block, graph_->GetReversePostOrder()[i]); |
| 474 | // Reiterate on this block in case it can be merged with its new successor. |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 475 | } |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 476 | } |
| 477 | } |
| 478 | |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 479 | struct HDeadCodeElimination::TryBelongingInformation { |
Santiago Aboy Solanes | 44ba423 | 2022-11-23 12:27:57 +0000 | [diff] [blame] | 480 | explicit TryBelongingInformation(ScopedArenaAllocator* allocator) |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 481 | : blocks_in_try(allocator->Adapter(kArenaAllocDCE)), |
| 482 | coalesced_try_entries(allocator->Adapter(kArenaAllocDCE)) {} |
| 483 | |
| 484 | // Which blocks belong in the try. |
| 485 | ScopedArenaSet<HBasicBlock*> blocks_in_try; |
| 486 | // Which other try entries are referencing this same try. |
| 487 | ScopedArenaSet<HBasicBlock*> coalesced_try_entries; |
| 488 | }; |
| 489 | |
| 490 | bool HDeadCodeElimination::CanPerformTryRemoval(const TryBelongingInformation& try_belonging_info) { |
| 491 | for (HBasicBlock* block : try_belonging_info.blocks_in_try) { |
| 492 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 493 | if (it.Current()->CanThrow()) { |
| 494 | return false; |
| 495 | } |
| 496 | } |
| 497 | } |
| 498 | return true; |
| 499 | } |
| 500 | |
| 501 | void HDeadCodeElimination::DisconnectHandlersAndUpdateTryBoundary( |
| 502 | HBasicBlock* block, |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 503 | /* out */ bool* any_block_in_loop) { |
| 504 | if (block->IsInLoop()) { |
| 505 | *any_block_in_loop = true; |
| 506 | } |
| 507 | |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 508 | // Disconnect the handlers. |
| 509 | while (block->GetSuccessors().size() > 1) { |
| 510 | HBasicBlock* handler = block->GetSuccessors()[1]; |
| 511 | DCHECK(handler->IsCatchBlock()); |
| 512 | block->RemoveSuccessor(handler); |
| 513 | handler->RemovePredecessor(block); |
| 514 | if (handler->IsInLoop()) { |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 515 | *any_block_in_loop = true; |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 516 | } |
| 517 | } |
| 518 | |
| 519 | // Change TryBoundary to Goto. |
| 520 | DCHECK(block->EndsWithTryBoundary()); |
| 521 | HInstruction* last = block->GetLastInstruction(); |
| 522 | block->RemoveInstruction(last); |
| 523 | block->AddInstruction(new (graph_->GetAllocator()) HGoto(last->GetDexPc())); |
| 524 | DCHECK_EQ(block->GetSuccessors().size(), 1u); |
| 525 | } |
| 526 | |
| 527 | void HDeadCodeElimination::RemoveTry(HBasicBlock* try_entry, |
| 528 | const TryBelongingInformation& try_belonging_info, |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 529 | /* out */ bool* any_block_in_loop) { |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 530 | // Update all try entries. |
| 531 | DCHECK(try_entry->EndsWithTryBoundary()); |
| 532 | DCHECK(try_entry->GetLastInstruction()->AsTryBoundary()->IsEntry()); |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 533 | DisconnectHandlersAndUpdateTryBoundary(try_entry, any_block_in_loop); |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 534 | |
| 535 | for (HBasicBlock* other_try_entry : try_belonging_info.coalesced_try_entries) { |
| 536 | DCHECK(other_try_entry->EndsWithTryBoundary()); |
| 537 | DCHECK(other_try_entry->GetLastInstruction()->AsTryBoundary()->IsEntry()); |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 538 | DisconnectHandlersAndUpdateTryBoundary(other_try_entry, any_block_in_loop); |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 539 | } |
| 540 | |
| 541 | // Update the blocks in the try. |
| 542 | for (HBasicBlock* block : try_belonging_info.blocks_in_try) { |
| 543 | // Update the try catch information since now the try doesn't exist. |
| 544 | block->SetTryCatchInformation(nullptr); |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 545 | if (block->IsInLoop()) { |
| 546 | *any_block_in_loop = true; |
| 547 | } |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 548 | |
| 549 | if (block->EndsWithTryBoundary()) { |
| 550 | // Try exits. |
| 551 | DCHECK(!block->GetLastInstruction()->AsTryBoundary()->IsEntry()); |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 552 | DisconnectHandlersAndUpdateTryBoundary(block, any_block_in_loop); |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 553 | |
| 554 | if (block->GetSingleSuccessor()->IsExitBlock()) { |
Santiago Aboy Solanes | e22e84b | 2022-11-28 11:49:16 +0000 | [diff] [blame] | 555 | // `block` used to be a single exit TryBoundary that got turned into a Goto. It |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 556 | // is now pointing to the exit which we don't allow. To fix it, we disconnect |
Santiago Aboy Solanes | e22e84b | 2022-11-28 11:49:16 +0000 | [diff] [blame] | 557 | // `block` from its predecessor and RemoveDeadBlocks will remove it from the |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 558 | // graph. |
| 559 | DCHECK(block->IsSingleGoto()); |
| 560 | HBasicBlock* predecessor = block->GetSinglePredecessor(); |
| 561 | predecessor->ReplaceSuccessor(block, graph_->GetExitBlock()); |
Santiago Aboy Solanes | e22e84b | 2022-11-28 11:49:16 +0000 | [diff] [blame] | 562 | |
| 563 | if (!block->GetDominatedBlocks().empty()) { |
| 564 | // Update domination tree if `block` dominates a block to keep the graph consistent. |
| 565 | DCHECK_EQ(block->GetDominatedBlocks().size(), 1u); |
| 566 | DCHECK_EQ(graph_->GetExitBlock()->GetDominator(), block); |
| 567 | predecessor->AddDominatedBlock(graph_->GetExitBlock()); |
| 568 | graph_->GetExitBlock()->SetDominator(predecessor); |
| 569 | block->RemoveDominatedBlock(graph_->GetExitBlock()); |
| 570 | } |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 571 | } |
| 572 | } |
| 573 | } |
| 574 | } |
| 575 | |
| 576 | bool HDeadCodeElimination::RemoveUnneededTries() { |
| 577 | if (!graph_->HasTryCatch()) { |
| 578 | return false; |
| 579 | } |
| 580 | |
| 581 | // Use local allocator for allocating memory. |
| 582 | ScopedArenaAllocator allocator(graph_->GetArenaStack()); |
| 583 | |
| 584 | // Collect which blocks are part of which try. |
| 585 | std::unordered_map<HBasicBlock*, TryBelongingInformation> tries; |
| 586 | for (HBasicBlock* block : graph_->GetReversePostOrderSkipEntryBlock()) { |
| 587 | if (block->IsTryBlock()) { |
| 588 | HBasicBlock* key = block->GetTryCatchInformation()->GetTryEntry().GetBlock(); |
| 589 | auto it = tries.find(key); |
| 590 | if (it == tries.end()) { |
| 591 | it = tries.insert({key, TryBelongingInformation(&allocator)}).first; |
| 592 | } |
| 593 | it->second.blocks_in_try.insert(block); |
| 594 | } |
| 595 | } |
| 596 | |
| 597 | // Deduplicate the tries which have different try entries but they are really the same try. |
| 598 | for (auto it = tries.begin(); it != tries.end(); it++) { |
| 599 | DCHECK(it->first->EndsWithTryBoundary()); |
| 600 | HTryBoundary* try_boundary = it->first->GetLastInstruction()->AsTryBoundary(); |
| 601 | for (auto other_it = next(it); other_it != tries.end(); /*other_it++ in the loop*/) { |
| 602 | DCHECK(other_it->first->EndsWithTryBoundary()); |
| 603 | HTryBoundary* other_try_boundary = other_it->first->GetLastInstruction()->AsTryBoundary(); |
| 604 | if (try_boundary->HasSameExceptionHandlersAs(*other_try_boundary)) { |
| 605 | // Merge the entries as they are really the same one. |
| 606 | // Block merging. |
| 607 | it->second.blocks_in_try.insert(other_it->second.blocks_in_try.begin(), |
| 608 | other_it->second.blocks_in_try.end()); |
| 609 | |
| 610 | // Add the coalesced try entry to update it too. |
| 611 | it->second.coalesced_try_entries.insert(other_it->first); |
| 612 | |
| 613 | // Erase the other entry. |
| 614 | other_it = tries.erase(other_it); |
| 615 | } else { |
| 616 | other_it++; |
| 617 | } |
| 618 | } |
| 619 | } |
| 620 | |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 621 | size_t removed_tries = 0; |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 622 | bool any_block_in_loop = false; |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 623 | |
| 624 | // Check which tries contain throwing instructions. |
| 625 | for (const auto& entry : tries) { |
| 626 | if (CanPerformTryRemoval(entry.second)) { |
| 627 | ++removed_tries; |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 628 | RemoveTry(entry.first, entry.second, &any_block_in_loop); |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 629 | } |
| 630 | } |
| 631 | |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 632 | if (removed_tries != 0) { |
| 633 | // We want to: |
| 634 | // 1) Update the dominance information |
| 635 | // 2) Remove catch block subtrees, if they are now unreachable. |
| 636 | // If we run the dominance recomputation without removing the code, those catch blocks will |
| 637 | // not be part of the post order and won't be removed. If we don't run the dominance |
| 638 | // recomputation, we risk RemoveDeadBlocks not running it and leaving the graph in an |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 639 | // inconsistent state. So, what we can do is run RemoveDeadBlocks and force a recomputation. |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 640 | // Note that we are not guaranteed to remove a catch block if we have nested try blocks: |
| 641 | // |
| 642 | // try { |
| 643 | // ... nothing can throw. TryBoundary A ... |
| 644 | // try { |
| 645 | // ... can throw. TryBoundary B... |
| 646 | // } catch (Error e) {} |
| 647 | // } catch (Exception e) {} |
| 648 | // |
| 649 | // In the example above, we can remove the TryBoundary A but the Exception catch cannot be |
| 650 | // removed as the TryBoundary B might still throw into that catch. TryBoundary A and B don't get |
| 651 | // coalesced since they have different catch handlers. |
| 652 | |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 653 | RemoveDeadBlocks(/* force_recomputation= */ true, any_block_in_loop); |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 654 | MaybeRecordStat(stats_, MethodCompilationStat::kRemovedTry, removed_tries); |
| 655 | return true; |
| 656 | } else { |
| 657 | return false; |
| 658 | } |
| 659 | } |
| 660 | |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 661 | bool HDeadCodeElimination::RemoveDeadBlocks(bool force_recomputation, |
| 662 | bool force_loop_recomputation) { |
| 663 | DCHECK_IMPLIES(force_loop_recomputation, force_recomputation); |
| 664 | |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 665 | // Use local allocator for allocating memory. |
| 666 | ScopedArenaAllocator allocator(graph_->GetArenaStack()); |
| 667 | |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 668 | // Classify blocks as reachable/unreachable. |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 669 | ArenaBitVector live_blocks(&allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE); |
| 670 | live_blocks.ClearAllBits(); |
David Brazdil | a4b8c21 | 2015-05-07 09:59:30 +0100 | [diff] [blame] | 671 | |
Vladimir Marko | 211c211 | 2015-09-24 16:52:33 +0100 | [diff] [blame] | 672 | MarkReachableBlocks(graph_, &live_blocks); |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 673 | bool removed_one_or_more_blocks = false; |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 674 | bool rerun_dominance_and_loop_analysis = false; |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 675 | |
David Brazdil | a4b8c21 | 2015-05-07 09:59:30 +0100 | [diff] [blame] | 676 | // Remove all dead blocks. Iterate in post order because removal needs the |
| 677 | // block's chain of dominators and nested loops need to be updated from the |
| 678 | // inside out. |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 679 | for (HBasicBlock* block : graph_->GetPostOrder()) { |
David Brazdil | a4b8c21 | 2015-05-07 09:59:30 +0100 | [diff] [blame] | 680 | int id = block->GetBlockId(); |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 681 | if (!live_blocks.IsBitSet(id)) { |
David Brazdil | 69a2804 | 2015-04-29 17:16:07 +0100 | [diff] [blame] | 682 | MaybeRecordDeadBlock(block); |
| 683 | block->DisconnectAndDelete(); |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 684 | removed_one_or_more_blocks = true; |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 685 | if (block->IsInLoop()) { |
| 686 | rerun_dominance_and_loop_analysis = true; |
| 687 | } |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 688 | } |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 689 | } |
| 690 | |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 691 | // If we removed at least one block, we need to recompute the full |
David Brazdil | 8a7c0fe | 2015-11-02 20:24:55 +0000 | [diff] [blame] | 692 | // dominator tree and try block membership. |
Santiago Aboy Solanes | fa55aa0 | 2022-11-29 11:42:20 +0000 | [diff] [blame] | 693 | if (removed_one_or_more_blocks || force_recomputation) { |
| 694 | if (rerun_dominance_and_loop_analysis || force_loop_recomputation) { |
Nicolas Geoffray | 15bd228 | 2016-01-05 15:55:41 +0000 | [diff] [blame] | 695 | graph_->ClearLoopInformation(); |
| 696 | graph_->ClearDominanceInformation(); |
| 697 | graph_->BuildDominatorTree(); |
| 698 | } else { |
| 699 | graph_->ClearDominanceInformation(); |
| 700 | graph_->ComputeDominanceInformation(); |
| 701 | graph_->ComputeTryBlockInformation(); |
| 702 | } |
Nicolas Geoffray | 1f82ecc | 2015-06-24 12:20:24 +0100 | [diff] [blame] | 703 | } |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 704 | return removed_one_or_more_blocks; |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 705 | } |
| 706 | |
| 707 | void HDeadCodeElimination::RemoveDeadInstructions() { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 708 | // Process basic blocks in post-order in the dominator tree, so that |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 709 | // a dead instruction depending on another dead instruction is removed. |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 710 | for (HBasicBlock* block : graph_->GetPostOrder()) { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 711 | // Traverse this block's instructions in backward order and remove |
| 712 | // the unused ones. |
| 713 | HBackwardInstructionIterator i(block->GetInstructions()); |
| 714 | // Skip the first iteration, as the last instruction of a block is |
| 715 | // a branching instruction. |
| 716 | DCHECK(i.Current()->IsControlFlow()); |
| 717 | for (i.Advance(); !i.Done(); i.Advance()) { |
| 718 | HInstruction* inst = i.Current(); |
| 719 | DCHECK(!inst->IsControlFlow()); |
Aart Bik | 482095d | 2016-10-10 15:39:10 -0700 | [diff] [blame] | 720 | if (inst->IsDeadAndRemovable()) { |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 721 | block->RemoveInstruction(inst); |
Igor Murashkin | 1e065a5 | 2017-08-09 13:20:34 -0700 | [diff] [blame] | 722 | MaybeRecordStat(stats_, MethodCompilationStat::kRemovedDeadInstruction); |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 723 | } |
| 724 | } |
| 725 | } |
| 726 | } |
| 727 | |
Santiago Aboy Solanes | 74da668 | 2022-12-16 19:28:47 +0000 | [diff] [blame] | 728 | void HDeadCodeElimination::UpdateGraphFlags() { |
| 729 | bool has_monitor_operations = false; |
| 730 | bool has_simd = false; |
| 731 | bool has_bounds_checks = false; |
| 732 | bool has_always_throwing_invokes = false; |
| 733 | |
| 734 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
| 735 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 736 | HInstruction* instruction = it.Current(); |
| 737 | if (instruction->IsMonitorOperation()) { |
| 738 | has_monitor_operations = true; |
| 739 | } else if (instruction->IsVecOperation()) { |
| 740 | has_simd = true; |
| 741 | } else if (instruction->IsBoundsCheck()) { |
| 742 | has_bounds_checks = true; |
| 743 | } else if (instruction->IsInvoke() && instruction->AsInvoke()->AlwaysThrows()) { |
| 744 | has_always_throwing_invokes = true; |
| 745 | } |
| 746 | } |
| 747 | } |
| 748 | |
| 749 | graph_->SetHasMonitorOperations(has_monitor_operations); |
| 750 | graph_->SetHasSIMD(has_simd); |
| 751 | graph_->SetHasBoundsChecks(has_bounds_checks); |
| 752 | graph_->SetHasAlwaysThrowingInvokes(has_always_throwing_invokes); |
| 753 | } |
| 754 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 755 | bool HDeadCodeElimination::Run() { |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 756 | // Do not eliminate dead blocks if the graph has irreducible loops. We could |
| 757 | // support it, but that would require changes in our loop representation to handle |
| 758 | // multiple entry points. We decided it was not worth the complexity. |
| 759 | if (!graph_->HasIrreducibleLoops()) { |
| 760 | // Simplify graph to generate more dead block patterns. |
| 761 | ConnectSuccessiveBlocks(); |
| 762 | bool did_any_simplification = false; |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame] | 763 | did_any_simplification |= SimplifyAlwaysThrows(); |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 764 | did_any_simplification |= SimplifyIfs(); |
| 765 | did_any_simplification |= RemoveDeadBlocks(); |
Santiago Aboy Solanes | b2d364d | 2022-11-10 10:26:31 +0000 | [diff] [blame] | 766 | // We call RemoveDeadBlocks before RemoveUnneededTries to remove the dead blocks from the |
| 767 | // previous optimizations. Otherwise, we might detect that a try has throwing instructions but |
| 768 | // they are actually dead code. RemoveUnneededTryBoundary will call RemoveDeadBlocks again if |
| 769 | // needed. |
| 770 | did_any_simplification |= RemoveUnneededTries(); |
Nicolas Geoffray | dac9b19 | 2016-07-15 10:46:17 +0100 | [diff] [blame] | 771 | if (did_any_simplification) { |
| 772 | // Connect successive blocks created by dead branches. |
| 773 | ConnectSuccessiveBlocks(); |
| 774 | } |
| 775 | } |
David Brazdil | 84daae5 | 2015-05-18 12:06:52 +0100 | [diff] [blame] | 776 | SsaRedundantPhiElimination(graph_).Run(); |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 777 | RemoveDeadInstructions(); |
Santiago Aboy Solanes | 74da668 | 2022-12-16 19:28:47 +0000 | [diff] [blame] | 778 | UpdateGraphFlags(); |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 779 | return true; |
David Brazdil | 2d7352b | 2015-04-20 14:52:42 +0100 | [diff] [blame] | 780 | } |
| 781 | |
Roland Levillain | 72bceff | 2014-09-15 18:29:00 +0100 | [diff] [blame] | 782 | } // namespace art |