diff options
author | 2019-07-04 15:31:42 +0100 | |
---|---|---|
committer | 2019-11-25 13:54:49 +0000 | |
commit | aedc9bc5ebdacadc9efe4465173e1b7ca7edc226 (patch) | |
tree | 5889afe345495e831f6c390f215fc0ca611b630d /compiler/optimizing | |
parent | 3c036885d2d5e6129c1d6b5933be2fce13fbdc79 (diff) |
Ensure we can always enter OSR code
When the the loop header is not the target of a back-edge,
we used to never enter the OSR code even if it's been compiled.
Test: testrunner.py --host --jit -t 570-checker-osr-locals
(it used to get stuck, you can kill the dalvikvm to check that
the weirdLoop was OSR-compiled)
Bug: 136743846
Change-Id: Iae55463eff92adccf9adec842e04f8ff6d9d8568
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/block_builder.cc | 46 | ||||
-rw-r--r-- | compiler/optimizing/block_builder.h | 5 |
2 files changed, 51 insertions, 0 deletions
diff --git a/compiler/optimizing/block_builder.cc b/compiler/optimizing/block_builder.cc index a5f78cafe0..e1f061ae70 100644 --- a/compiler/optimizing/block_builder.cc +++ b/compiler/optimizing/block_builder.cc @@ -398,6 +398,48 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() { } } +void HBasicBlockBuilder::InsertSynthesizedLoopsForOsr() { + ArenaSet<uint32_t> targets(allocator_->Adapter(kArenaAllocGraphBuilder)); + // Collect basic blocks that are targets of a negative branch. + for (const DexInstructionPcPair& pair : code_item_accessor_) { + const uint32_t dex_pc = pair.DexPc(); + const Instruction& instruction = pair.Inst(); + if (instruction.IsBranch()) { + uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset(); + if (target_dex_pc < dex_pc) { + HBasicBlock* block = GetBlockAt(target_dex_pc); + CHECK_NE(kNoDexPc, block->GetDexPc()); + targets.insert(block->GetBlockId()); + } + } else if (instruction.IsSwitch()) { + DexSwitchTable table(instruction, dex_pc); + for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) { + uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset(); + if (target_dex_pc < dex_pc) { + HBasicBlock* block = GetBlockAt(target_dex_pc); + CHECK_NE(kNoDexPc, block->GetDexPc()); + targets.insert(block->GetBlockId()); + } + } + } + } + + // Insert synthesized loops before the collected blocks. + for (uint32_t block_id : targets) { + HBasicBlock* block = graph_->GetBlocks()[block_id]; + HBasicBlock* loop_block = new (allocator_) HBasicBlock(graph_, block->GetDexPc()); + graph_->AddBlock(loop_block); + while (!block->GetPredecessors().empty()) { + block->GetPredecessors()[0]->ReplaceSuccessor(block, loop_block); + } + loop_block->AddSuccessor(loop_block); + loop_block->AddSuccessor(block); + // We loop on false - we know this won't be optimized later on as the loop + // is marked irreducible, which disables loop optimizations. + loop_block->AddInstruction(new (allocator_) HIf(graph_->GetIntConstant(0), kNoDexPc)); + } +} + bool HBasicBlockBuilder::Build() { DCHECK(code_item_accessor_.HasCodeItem()); DCHECK(graph_->GetBlocks().empty()); @@ -413,6 +455,10 @@ bool HBasicBlockBuilder::Build() { ConnectBasicBlocks(); InsertTryBoundaryBlocks(); + if (graph_->IsCompilingOsr()) { + InsertSynthesizedLoopsForOsr(); + } + return true; } diff --git a/compiler/optimizing/block_builder.h b/compiler/optimizing/block_builder.h index 2c1f034d80..42a3f327e7 100644 --- a/compiler/optimizing/block_builder.h +++ b/compiler/optimizing/block_builder.h @@ -59,6 +59,11 @@ class HBasicBlockBuilder : public ValueObject { void ConnectBasicBlocks(); void InsertTryBoundaryBlocks(); + // To ensure branches with negative offsets can always OSR jump to compiled + // code, we insert synthesized loops before each block that is the target of a + // negative branch. + void InsertSynthesizedLoopsForOsr(); + // Helper method which decides whether `catch_block` may have live normal // predecessors and thus whether a synthetic catch block needs to be created // to avoid mixing normal and exceptional predecessors. |