Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "code_sinking.h" |
| 18 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 19 | #include "base/arena_bit_vector.h" |
| 20 | #include "base/bit_vector-inl.h" |
| 21 | #include "base/scoped_arena_allocator.h" |
| 22 | #include "base/scoped_arena_containers.h" |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 23 | #include "common_dominator.h" |
| 24 | #include "nodes.h" |
| 25 | |
| 26 | namespace art { |
| 27 | |
| 28 | void CodeSinking::Run() { |
| 29 | HBasicBlock* exit = graph_->GetExitBlock(); |
| 30 | if (exit == nullptr) { |
| 31 | // Infinite loop, just bail. |
| 32 | return; |
| 33 | } |
| 34 | // TODO(ngeoffray): we do not profile branches yet, so use throw instructions |
| 35 | // as an indicator of an uncommon branch. |
| 36 | for (HBasicBlock* exit_predecessor : exit->GetPredecessors()) { |
Aart Bik | a8b8e9b | 2018-01-09 11:01:02 -0800 | [diff] [blame^] | 37 | HInstruction* last = exit_predecessor->GetLastInstruction(); |
| 38 | // Any predecessor of the exit that does not return, throws an exception. |
| 39 | if (!last->IsReturn() && !last->IsReturnVoid()) { |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 40 | SinkCodeToUncommonBranch(exit_predecessor); |
| 41 | } |
| 42 | } |
| 43 | } |
| 44 | |
| 45 | static bool IsInterestingInstruction(HInstruction* instruction) { |
| 46 | // Instructions from the entry graph (for example constants) are never interesting to move. |
| 47 | if (instruction->GetBlock() == instruction->GetBlock()->GetGraph()->GetEntryBlock()) { |
| 48 | return false; |
| 49 | } |
| 50 | // We want to move moveable instructions that cannot throw, as well as |
| 51 | // heap stores and allocations. |
| 52 | |
| 53 | // Volatile stores cannot be moved. |
| 54 | if (instruction->IsInstanceFieldSet()) { |
| 55 | if (instruction->AsInstanceFieldSet()->IsVolatile()) { |
| 56 | return false; |
| 57 | } |
| 58 | } |
| 59 | |
| 60 | // Check allocations first, as they can throw, but it is safe to move them. |
| 61 | if (instruction->IsNewInstance() || instruction->IsNewArray()) { |
| 62 | return true; |
| 63 | } |
| 64 | |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 65 | // Check it is safe to move ConstructorFence. |
| 66 | // (Safe to move ConstructorFence for only protecting the new-instance but not for finals.) |
| 67 | if (instruction->IsConstructorFence()) { |
| 68 | HConstructorFence* ctor_fence = instruction->AsConstructorFence(); |
| 69 | |
| 70 | // A fence with "0" inputs is dead and should've been removed in a prior pass. |
| 71 | DCHECK_NE(0u, ctor_fence->InputCount()); |
| 72 | |
Igor Murashkin | dd018df | 2017-08-09 10:38:31 -0700 | [diff] [blame] | 73 | // TODO: this should be simplified to 'return true' since it's |
| 74 | // potentially pessimizing any code sinking for inlined constructors with final fields. |
| 75 | // TODO: double check that if the final field assignments are not moved, |
| 76 | // then the fence is not moved either. |
| 77 | |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 78 | return ctor_fence->GetAssociatedAllocation() != nullptr; |
| 79 | } |
| 80 | |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 81 | // All other instructions that can throw cannot be moved. |
| 82 | if (instruction->CanThrow()) { |
| 83 | return false; |
| 84 | } |
| 85 | |
| 86 | // We can only store on local allocations. Other heap references can |
| 87 | // be escaping. Note that allocations can escape too, but we only move |
| 88 | // allocations if their users can move to, or are in the list of |
| 89 | // post dominated blocks. |
| 90 | if (instruction->IsInstanceFieldSet()) { |
| 91 | if (!instruction->InputAt(0)->IsNewInstance()) { |
| 92 | return false; |
| 93 | } |
| 94 | } |
| 95 | |
| 96 | if (instruction->IsArraySet()) { |
| 97 | if (!instruction->InputAt(0)->IsNewArray()) { |
| 98 | return false; |
| 99 | } |
| 100 | } |
| 101 | |
| 102 | // Heap accesses cannot go pass instructions that have memory side effects, which |
| 103 | // we are not tracking here. Note that the load/store elimination optimization |
| 104 | // runs before this optimization, and should have removed interesting ones. |
| 105 | // In theory, we could handle loads of local allocations, but this is currently |
| 106 | // hard to test, as LSE removes them. |
| 107 | if (instruction->IsStaticFieldGet() || |
| 108 | instruction->IsInstanceFieldGet() || |
| 109 | instruction->IsArrayGet()) { |
| 110 | return false; |
| 111 | } |
| 112 | |
| 113 | if (instruction->IsInstanceFieldSet() || |
| 114 | instruction->IsArraySet() || |
| 115 | instruction->CanBeMoved()) { |
| 116 | return true; |
| 117 | } |
| 118 | return false; |
| 119 | } |
| 120 | |
| 121 | static void AddInstruction(HInstruction* instruction, |
| 122 | const ArenaBitVector& processed_instructions, |
| 123 | const ArenaBitVector& discard_blocks, |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 124 | ScopedArenaVector<HInstruction*>* worklist) { |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 125 | // Add to the work list if the instruction is not in the list of blocks |
| 126 | // to discard, hasn't been already processed and is of interest. |
| 127 | if (!discard_blocks.IsBitSet(instruction->GetBlock()->GetBlockId()) && |
| 128 | !processed_instructions.IsBitSet(instruction->GetId()) && |
| 129 | IsInterestingInstruction(instruction)) { |
| 130 | worklist->push_back(instruction); |
| 131 | } |
| 132 | } |
| 133 | |
| 134 | static void AddInputs(HInstruction* instruction, |
| 135 | const ArenaBitVector& processed_instructions, |
| 136 | const ArenaBitVector& discard_blocks, |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 137 | ScopedArenaVector<HInstruction*>* worklist) { |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 138 | for (HInstruction* input : instruction->GetInputs()) { |
| 139 | AddInstruction(input, processed_instructions, discard_blocks, worklist); |
| 140 | } |
| 141 | } |
| 142 | |
| 143 | static void AddInputs(HBasicBlock* block, |
| 144 | const ArenaBitVector& processed_instructions, |
| 145 | const ArenaBitVector& discard_blocks, |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 146 | ScopedArenaVector<HInstruction*>* worklist) { |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 147 | for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) { |
| 148 | AddInputs(it.Current(), processed_instructions, discard_blocks, worklist); |
| 149 | } |
| 150 | for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { |
| 151 | AddInputs(it.Current(), processed_instructions, discard_blocks, worklist); |
| 152 | } |
| 153 | } |
| 154 | |
| 155 | static bool ShouldFilterUse(HInstruction* instruction, |
| 156 | HInstruction* user, |
| 157 | const ArenaBitVector& post_dominated) { |
| 158 | if (instruction->IsNewInstance()) { |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 159 | return (user->IsInstanceFieldSet() || user->IsConstructorFence()) && |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 160 | (user->InputAt(0) == instruction) && |
| 161 | !post_dominated.IsBitSet(user->GetBlock()->GetBlockId()); |
| 162 | } else if (instruction->IsNewArray()) { |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 163 | return (user->IsArraySet() || user->IsConstructorFence()) && |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 164 | (user->InputAt(0) == instruction) && |
| 165 | !post_dominated.IsBitSet(user->GetBlock()->GetBlockId()); |
| 166 | } |
| 167 | return false; |
| 168 | } |
| 169 | |
| 170 | |
| 171 | // Find the ideal position for moving `instruction`. If `filter` is true, |
| 172 | // we filter out store instructions to that instruction, which are processed |
| 173 | // first in the step (3) of the sinking algorithm. |
| 174 | // This method is tailored to the sinking algorithm, unlike |
| 175 | // the generic HInstruction::MoveBeforeFirstUserAndOutOfLoops. |
| 176 | static HInstruction* FindIdealPosition(HInstruction* instruction, |
| 177 | const ArenaBitVector& post_dominated, |
| 178 | bool filter = false) { |
| 179 | DCHECK(!instruction->IsPhi()); // Makes no sense for Phi. |
| 180 | |
| 181 | // Find the target block. |
| 182 | CommonDominator finder(/* start_block */ nullptr); |
| 183 | for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) { |
| 184 | HInstruction* user = use.GetUser(); |
| 185 | if (!(filter && ShouldFilterUse(instruction, user, post_dominated))) { |
Nicolas Geoffray | 13445e7 | 2017-04-20 15:19:46 +0100 | [diff] [blame] | 186 | HBasicBlock* block = user->GetBlock(); |
| 187 | if (user->IsPhi()) { |
| 188 | // Special case phis by taking the incoming block for regular ones, |
| 189 | // or the dominator for catch phis. |
| 190 | block = user->AsPhi()->IsCatchPhi() |
| 191 | ? block->GetDominator() |
| 192 | : block->GetPredecessors()[use.GetIndex()]; |
| 193 | } |
| 194 | finder.Update(block); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 195 | } |
| 196 | } |
| 197 | for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) { |
| 198 | DCHECK(!use.GetUser()->GetHolder()->IsPhi()); |
| 199 | DCHECK(!filter || !ShouldFilterUse(instruction, use.GetUser()->GetHolder(), post_dominated)); |
| 200 | finder.Update(use.GetUser()->GetHolder()->GetBlock()); |
| 201 | } |
| 202 | HBasicBlock* target_block = finder.Get(); |
| 203 | if (target_block == nullptr) { |
| 204 | // No user we can go next to? Likely a LSE or DCE limitation. |
| 205 | return nullptr; |
| 206 | } |
| 207 | |
| 208 | // Move to the first dominator not in a loop, if we can. |
| 209 | while (target_block->IsInLoop()) { |
| 210 | if (!post_dominated.IsBitSet(target_block->GetDominator()->GetBlockId())) { |
| 211 | break; |
| 212 | } |
| 213 | target_block = target_block->GetDominator(); |
| 214 | DCHECK(target_block != nullptr); |
| 215 | } |
| 216 | |
| 217 | // Find insertion position. No need to filter anymore, as we have found a |
| 218 | // target block. |
| 219 | HInstruction* insert_pos = nullptr; |
| 220 | for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) { |
| 221 | if (use.GetUser()->GetBlock() == target_block && |
| 222 | (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) { |
| 223 | insert_pos = use.GetUser(); |
| 224 | } |
| 225 | } |
| 226 | for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) { |
| 227 | HInstruction* user = use.GetUser()->GetHolder(); |
| 228 | if (user->GetBlock() == target_block && |
| 229 | (insert_pos == nullptr || user->StrictlyDominates(insert_pos))) { |
| 230 | insert_pos = user; |
| 231 | } |
| 232 | } |
| 233 | if (insert_pos == nullptr) { |
| 234 | // No user in `target_block`, insert before the control flow instruction. |
| 235 | insert_pos = target_block->GetLastInstruction(); |
| 236 | DCHECK(insert_pos->IsControlFlow()); |
| 237 | // Avoid splitting HCondition from HIf to prevent unnecessary materialization. |
| 238 | if (insert_pos->IsIf()) { |
| 239 | HInstruction* if_input = insert_pos->AsIf()->InputAt(0); |
| 240 | if (if_input == insert_pos->GetPrevious()) { |
| 241 | insert_pos = if_input; |
| 242 | } |
| 243 | } |
| 244 | } |
| 245 | DCHECK(!insert_pos->IsPhi()); |
| 246 | return insert_pos; |
| 247 | } |
| 248 | |
| 249 | |
| 250 | void CodeSinking::SinkCodeToUncommonBranch(HBasicBlock* end_block) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 251 | // Local allocator to discard data structures created below at the end of this optimization. |
| 252 | ScopedArenaAllocator allocator(graph_->GetArenaStack()); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 253 | |
| 254 | size_t number_of_instructions = graph_->GetCurrentInstructionId(); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 255 | ScopedArenaVector<HInstruction*> worklist(allocator.Adapter(kArenaAllocMisc)); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 256 | ArenaBitVector processed_instructions(&allocator, number_of_instructions, /* expandable */ false); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 257 | processed_instructions.ClearAllBits(); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 258 | ArenaBitVector post_dominated(&allocator, graph_->GetBlocks().size(), /* expandable */ false); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 259 | post_dominated.ClearAllBits(); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 260 | ArenaBitVector instructions_that_can_move( |
| 261 | &allocator, number_of_instructions, /* expandable */ false); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 262 | instructions_that_can_move.ClearAllBits(); |
| 263 | ScopedArenaVector<HInstruction*> move_in_order(allocator.Adapter(kArenaAllocMisc)); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 264 | |
| 265 | // Step (1): Visit post order to get a subset of blocks post dominated by `end_block`. |
| 266 | // TODO(ngeoffray): Getting the full set of post-dominated shoud be done by |
| 267 | // computint the post dominator tree, but that could be too time consuming. Also, |
| 268 | // we should start the analysis from blocks dominated by an uncommon branch, but we |
| 269 | // don't profile branches yet. |
| 270 | bool found_block = false; |
| 271 | for (HBasicBlock* block : graph_->GetPostOrder()) { |
| 272 | if (block == end_block) { |
| 273 | found_block = true; |
| 274 | post_dominated.SetBit(block->GetBlockId()); |
| 275 | } else if (found_block) { |
| 276 | bool is_post_dominated = true; |
| 277 | if (block->GetSuccessors().empty()) { |
| 278 | // We currently bail for loops. |
| 279 | is_post_dominated = false; |
| 280 | } else { |
| 281 | for (HBasicBlock* successor : block->GetSuccessors()) { |
| 282 | if (!post_dominated.IsBitSet(successor->GetBlockId())) { |
| 283 | is_post_dominated = false; |
| 284 | break; |
| 285 | } |
| 286 | } |
| 287 | } |
| 288 | if (is_post_dominated) { |
| 289 | post_dominated.SetBit(block->GetBlockId()); |
| 290 | } |
| 291 | } |
| 292 | } |
| 293 | |
| 294 | // Now that we have found a subset of post-dominated blocks, add to the worklist all inputs |
| 295 | // of instructions in these blocks that are not themselves in these blocks. |
| 296 | // Also find the common dominator of the found post dominated blocks, to help filtering |
| 297 | // out un-movable uses in step (2). |
| 298 | CommonDominator finder(end_block); |
| 299 | for (size_t i = 0, e = graph_->GetBlocks().size(); i < e; ++i) { |
| 300 | if (post_dominated.IsBitSet(i)) { |
| 301 | finder.Update(graph_->GetBlocks()[i]); |
| 302 | AddInputs(graph_->GetBlocks()[i], processed_instructions, post_dominated, &worklist); |
| 303 | } |
| 304 | } |
| 305 | HBasicBlock* common_dominator = finder.Get(); |
| 306 | |
| 307 | // Step (2): iterate over the worklist to find sinking candidates. |
| 308 | while (!worklist.empty()) { |
| 309 | HInstruction* instruction = worklist.back(); |
| 310 | if (processed_instructions.IsBitSet(instruction->GetId())) { |
| 311 | // The instruction has already been processed, continue. This happens |
| 312 | // when the instruction is the input/user of multiple instructions. |
| 313 | worklist.pop_back(); |
| 314 | continue; |
| 315 | } |
| 316 | bool all_users_in_post_dominated_blocks = true; |
| 317 | bool can_move = true; |
| 318 | // Check users of the instruction. |
| 319 | for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) { |
| 320 | HInstruction* user = use.GetUser(); |
| 321 | if (!post_dominated.IsBitSet(user->GetBlock()->GetBlockId()) && |
| 322 | !instructions_that_can_move.IsBitSet(user->GetId())) { |
| 323 | all_users_in_post_dominated_blocks = false; |
| 324 | // If we've already processed this user, or the user cannot be moved, or |
| 325 | // is not dominating the post dominated blocks, bail. |
| 326 | // TODO(ngeoffray): The domination check is an approximation. We should |
| 327 | // instead check if the dominated blocks post dominate the user's block, |
| 328 | // but we do not have post dominance information here. |
| 329 | if (processed_instructions.IsBitSet(user->GetId()) || |
| 330 | !IsInterestingInstruction(user) || |
| 331 | !user->GetBlock()->Dominates(common_dominator)) { |
| 332 | can_move = false; |
| 333 | break; |
| 334 | } |
| 335 | } |
| 336 | } |
| 337 | |
| 338 | // Check environment users of the instruction. Some of these users require |
| 339 | // the instruction not to move. |
| 340 | if (all_users_in_post_dominated_blocks) { |
| 341 | for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) { |
| 342 | HEnvironment* environment = use.GetUser(); |
| 343 | HInstruction* user = environment->GetHolder(); |
| 344 | if (!post_dominated.IsBitSet(user->GetBlock()->GetBlockId())) { |
| 345 | if (graph_->IsDebuggable() || |
| 346 | user->IsDeoptimize() || |
| 347 | user->CanThrowIntoCatchBlock() || |
| 348 | (user->IsSuspendCheck() && graph_->IsCompilingOsr())) { |
| 349 | can_move = false; |
| 350 | break; |
| 351 | } |
| 352 | } |
| 353 | } |
| 354 | } |
| 355 | if (!can_move) { |
| 356 | // Instruction cannot be moved, mark it as processed and remove it from the work |
| 357 | // list. |
| 358 | processed_instructions.SetBit(instruction->GetId()); |
| 359 | worklist.pop_back(); |
| 360 | } else if (all_users_in_post_dominated_blocks) { |
| 361 | // Instruction is a candidate for being sunk. Mark it as such, remove it from the |
| 362 | // work list, and add its inputs to the work list. |
| 363 | instructions_that_can_move.SetBit(instruction->GetId()); |
| 364 | move_in_order.push_back(instruction); |
| 365 | processed_instructions.SetBit(instruction->GetId()); |
| 366 | worklist.pop_back(); |
| 367 | AddInputs(instruction, processed_instructions, post_dominated, &worklist); |
| 368 | // Drop the environment use not in the list of post-dominated block. This is |
| 369 | // to help step (3) of this optimization, when we start moving instructions |
| 370 | // closer to their use. |
| 371 | for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) { |
| 372 | HEnvironment* environment = use.GetUser(); |
| 373 | HInstruction* user = environment->GetHolder(); |
| 374 | if (!post_dominated.IsBitSet(user->GetBlock()->GetBlockId())) { |
| 375 | environment->RemoveAsUserOfInput(use.GetIndex()); |
| 376 | environment->SetRawEnvAt(use.GetIndex(), nullptr); |
| 377 | } |
| 378 | } |
| 379 | } else { |
| 380 | // The information we have on the users was not enough to decide whether the |
| 381 | // instruction could be moved. |
| 382 | // Add the users to the work list, and keep the instruction in the work list |
| 383 | // to process it again once all users have been processed. |
| 384 | for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) { |
| 385 | AddInstruction(use.GetUser(), processed_instructions, post_dominated, &worklist); |
| 386 | } |
| 387 | } |
| 388 | } |
| 389 | |
| 390 | // Make sure we process instructions in dominated order. This is required for heap |
| 391 | // stores. |
| 392 | std::sort(move_in_order.begin(), move_in_order.end(), [](HInstruction* a, HInstruction* b) { |
| 393 | return b->StrictlyDominates(a); |
| 394 | }); |
| 395 | |
| 396 | // Step (3): Try to move sinking candidates. |
| 397 | for (HInstruction* instruction : move_in_order) { |
| 398 | HInstruction* position = nullptr; |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 399 | if (instruction->IsArraySet() |
| 400 | || instruction->IsInstanceFieldSet() |
| 401 | || instruction->IsConstructorFence()) { |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 402 | if (!instructions_that_can_move.IsBitSet(instruction->InputAt(0)->GetId())) { |
| 403 | // A store can trivially move, but it can safely do so only if the heap |
| 404 | // location it stores to can also move. |
| 405 | // TODO(ngeoffray): Handle allocation/store cycles by pruning these instructions |
| 406 | // from the set and all their inputs. |
| 407 | continue; |
| 408 | } |
| 409 | // Find the position of the instruction we're storing into, filtering out this |
| 410 | // store and all other stores to that instruction. |
| 411 | position = FindIdealPosition(instruction->InputAt(0), post_dominated, /* filter */ true); |
| 412 | |
| 413 | // The position needs to be dominated by the store, in order for the store to move there. |
| 414 | if (position == nullptr || !instruction->GetBlock()->Dominates(position->GetBlock())) { |
| 415 | continue; |
| 416 | } |
| 417 | } else { |
| 418 | // Find the ideal position within the post dominated blocks. |
| 419 | position = FindIdealPosition(instruction, post_dominated); |
| 420 | if (position == nullptr) { |
| 421 | continue; |
| 422 | } |
| 423 | } |
| 424 | // Bail if we could not find a position in the post dominated blocks (for example, |
| 425 | // if there are multiple users whose common dominator is not in the list of |
| 426 | // post dominated blocks). |
| 427 | if (!post_dominated.IsBitSet(position->GetBlock()->GetBlockId())) { |
| 428 | continue; |
| 429 | } |
Igor Murashkin | 1e065a5 | 2017-08-09 13:20:34 -0700 | [diff] [blame] | 430 | MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSunk); |
Nicolas Geoffray | b813ca1 | 2017-02-16 22:08:29 +0000 | [diff] [blame] | 431 | instruction->MoveBefore(position, /* ensure_safety */ false); |
| 432 | } |
| 433 | } |
| 434 | |
| 435 | } // namespace art |