summaryrefslogtreecommitdiff
path: root/compiler/optimizing/block_builder.h
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2019-07-04 15:31:42 +0100
committer Nicolas Geoffray <ngeoffray@google.com> 2019-11-25 13:54:49 +0000
commitaedc9bc5ebdacadc9efe4465173e1b7ca7edc226 (patch)
tree5889afe345495e831f6c390f215fc0ca611b630d /compiler/optimizing/block_builder.h
parent3c036885d2d5e6129c1d6b5933be2fce13fbdc79 (diff)
Ensure we can always enter OSR code
When the the loop header is not the target of a back-edge, we used to never enter the OSR code even if it's been compiled. Test: testrunner.py --host --jit -t 570-checker-osr-locals (it used to get stuck, you can kill the dalvikvm to check that the weirdLoop was OSR-compiled) Bug: 136743846 Change-Id: Iae55463eff92adccf9adec842e04f8ff6d9d8568
Diffstat (limited to 'compiler/optimizing/block_builder.h')
-rw-r--r--compiler/optimizing/block_builder.h5
1 files changed, 5 insertions, 0 deletions
diff --git a/compiler/optimizing/block_builder.h b/compiler/optimizing/block_builder.h
index 2c1f034d80..42a3f327e7 100644
--- a/compiler/optimizing/block_builder.h
+++ b/compiler/optimizing/block_builder.h
@@ -59,6 +59,11 @@ class HBasicBlockBuilder : public ValueObject {
void ConnectBasicBlocks();
void InsertTryBoundaryBlocks();
+ // To ensure branches with negative offsets can always OSR jump to compiled
+ // code, we insert synthesized loops before each block that is the target of a
+ // negative branch.
+ void InsertSynthesizedLoopsForOsr();
+
// Helper method which decides whether `catch_block` may have live normal
// predecessors and thus whether a synthetic catch block needs to be created
// to avoid mixing normal and exceptional predecessors.