summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/block_builder.cc46
-rw-r--r--compiler/optimizing/block_builder.h5
2 files changed, 51 insertions, 0 deletions
diff --git a/compiler/optimizing/block_builder.cc b/compiler/optimizing/block_builder.cc
index a5f78cafe0..e1f061ae70 100644
--- a/compiler/optimizing/block_builder.cc
+++ b/compiler/optimizing/block_builder.cc
@@ -398,6 +398,48 @@ void HBasicBlockBuilder::InsertTryBoundaryBlocks() {
}
}
+void HBasicBlockBuilder::InsertSynthesizedLoopsForOsr() {
+ ArenaSet<uint32_t> targets(allocator_->Adapter(kArenaAllocGraphBuilder));
+ // Collect basic blocks that are targets of a negative branch.
+ for (const DexInstructionPcPair& pair : code_item_accessor_) {
+ const uint32_t dex_pc = pair.DexPc();
+ const Instruction& instruction = pair.Inst();
+ if (instruction.IsBranch()) {
+ uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset();
+ if (target_dex_pc < dex_pc) {
+ HBasicBlock* block = GetBlockAt(target_dex_pc);
+ CHECK_NE(kNoDexPc, block->GetDexPc());
+ targets.insert(block->GetBlockId());
+ }
+ } else if (instruction.IsSwitch()) {
+ DexSwitchTable table(instruction, dex_pc);
+ for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) {
+ uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset();
+ if (target_dex_pc < dex_pc) {
+ HBasicBlock* block = GetBlockAt(target_dex_pc);
+ CHECK_NE(kNoDexPc, block->GetDexPc());
+ targets.insert(block->GetBlockId());
+ }
+ }
+ }
+ }
+
+ // Insert synthesized loops before the collected blocks.
+ for (uint32_t block_id : targets) {
+ HBasicBlock* block = graph_->GetBlocks()[block_id];
+ HBasicBlock* loop_block = new (allocator_) HBasicBlock(graph_, block->GetDexPc());
+ graph_->AddBlock(loop_block);
+ while (!block->GetPredecessors().empty()) {
+ block->GetPredecessors()[0]->ReplaceSuccessor(block, loop_block);
+ }
+ loop_block->AddSuccessor(loop_block);
+ loop_block->AddSuccessor(block);
+ // We loop on false - we know this won't be optimized later on as the loop
+ // is marked irreducible, which disables loop optimizations.
+ loop_block->AddInstruction(new (allocator_) HIf(graph_->GetIntConstant(0), kNoDexPc));
+ }
+}
+
bool HBasicBlockBuilder::Build() {
DCHECK(code_item_accessor_.HasCodeItem());
DCHECK(graph_->GetBlocks().empty());
@@ -413,6 +455,10 @@ bool HBasicBlockBuilder::Build() {
ConnectBasicBlocks();
InsertTryBoundaryBlocks();
+ if (graph_->IsCompilingOsr()) {
+ InsertSynthesizedLoopsForOsr();
+ }
+
return true;
}
diff --git a/compiler/optimizing/block_builder.h b/compiler/optimizing/block_builder.h
index 2c1f034d80..42a3f327e7 100644
--- a/compiler/optimizing/block_builder.h
+++ b/compiler/optimizing/block_builder.h
@@ -59,6 +59,11 @@ class HBasicBlockBuilder : public ValueObject {
void ConnectBasicBlocks();
void InsertTryBoundaryBlocks();
+ // To ensure branches with negative offsets can always OSR jump to compiled
+ // code, we insert synthesized loops before each block that is the target of a
+ // negative branch.
+ void InsertSynthesizedLoopsForOsr();
+
// Helper method which decides whether `catch_block` may have live normal
// predecessors and thus whether a synthetic catch block needs to be created
// to avoid mixing normal and exceptional predecessors.