summaryrefslogtreecommitdiff
path: root/compiler/optimizing/block_builder.cc
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2019-07-04 15:31:42 +0100
committer Nicolas Geoffray <ngeoffray@google.com> 2019-11-25 13:54:49 +0000
commitaedc9bc5ebdacadc9efe4465173e1b7ca7edc226 (patch)
tree5889afe345495e831f6c390f215fc0ca611b630d /compiler/optimizing/block_builder.cc
parent3c036885d2d5e6129c1d6b5933be2fce13fbdc79 (diff)
Ensure we can always enter OSR code
When the the loop header is not the target of a back-edge, we used to never enter the OSR code even if it's been compiled. Test: testrunner.py --host --jit -t 570-checker-osr-locals (it used to get stuck, you can kill the dalvikvm to check that the weirdLoop was OSR-compiled) Bug: 136743846 Change-Id: Iae55463eff92adccf9adec842e04f8ff6d9d8568
Diffstat (limited to 'compiler/optimizing/block_builder.cc')
-rw-r--r--compiler/optimizing/block_builder.cc46
1 files changed, 46 insertions, 0 deletions
diff --git a/compiler/optimizing/block_builder.cc b/compiler/optimizing/block_builder.cc
index a5f78cafe0..e1f061ae70 100644
--- a/compiler/optimizing/block_builder.cc
+++ b/compiler/optimizing/block_builder.cc
@@ -398,6 +398,48 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() {
}
}
+void HBasicBlockBuilder::InsertSynthesizedLoopsForOsr() {
+ ArenaSet<uint32_t> targets(allocator_->Adapter(kArenaAllocGraphBuilder));
+ // Collect basic blocks that are targets of a negative branch.
+ for (const DexInstructionPcPair& pair : code_item_accessor_) {
+ const uint32_t dex_pc = pair.DexPc();
+ const Instruction& instruction = pair.Inst();
+ if (instruction.IsBranch()) {
+ uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset();
+ if (target_dex_pc < dex_pc) {
+ HBasicBlock* block = GetBlockAt(target_dex_pc);
+ CHECK_NE(kNoDexPc, block->GetDexPc());
+ targets.insert(block->GetBlockId());
+ }
+ } else if (instruction.IsSwitch()) {
+ DexSwitchTable table(instruction, dex_pc);
+ for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) {
+ uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset();
+ if (target_dex_pc < dex_pc) {
+ HBasicBlock* block = GetBlockAt(target_dex_pc);
+ CHECK_NE(kNoDexPc, block->GetDexPc());
+ targets.insert(block->GetBlockId());
+ }
+ }
+ }
+ }
+
+ // Insert synthesized loops before the collected blocks.
+ for (uint32_t block_id : targets) {
+ HBasicBlock* block = graph_->GetBlocks()[block_id];
+ HBasicBlock* loop_block = new (allocator_) HBasicBlock(graph_, block->GetDexPc());
+ graph_->AddBlock(loop_block);
+ while (!block->GetPredecessors().empty()) {
+ block->GetPredecessors()[0]->ReplaceSuccessor(block, loop_block);
+ }
+ loop_block->AddSuccessor(loop_block);
+ loop_block->AddSuccessor(block);
+ // We loop on false - we know this won't be optimized later on as the loop
+ // is marked irreducible, which disables loop optimizations.
+ loop_block->AddInstruction(new (allocator_) HIf(graph_->GetIntConstant(0), kNoDexPc));
+ }
+}
+
bool HBasicBlockBuilder::Build() {
DCHECK(code_item_accessor_.HasCodeItem());
DCHECK(graph_->GetBlocks().empty());
@@ -413,6 +455,10 @@ bool HBasicBlockBuilder::Build() {
ConnectBasicBlocks();
InsertTryBoundaryBlocks();
+ if (graph_->IsCompilingOsr()) {
+ InsertSynthesizedLoopsForOsr();
+ }
+
return true;
}