summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--runtime/jit/jit.h4
-rw-r--r--test/common/runtime_state.cc13
2 files changed, 8 insertions, 9 deletions
diff --git a/runtime/jit/jit.h b/runtime/jit/jit.h
index 287b2600a9..b46e92cdbb 100644
--- a/runtime/jit/jit.h
+++ b/runtime/jit/jit.h
@@ -514,9 +514,9 @@ class Jit {
// class path methods.
void NotifyZygoteCompilationDone();
- void EnqueueOptimizedCompilation(ArtMethod* method, Thread* self);
+ EXPORT void EnqueueOptimizedCompilation(ArtMethod* method, Thread* self);
- void MaybeEnqueueCompilation(ArtMethod* method, Thread* self)
+ EXPORT void MaybeEnqueueCompilation(ArtMethod* method, Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_);
private:
diff --git a/test/common/runtime_state.cc b/test/common/runtime_state.cc
index ff17f64632..36d1a41206 100644
--- a/test/common/runtime_state.cc
+++ b/test/common/runtime_state.cc
@@ -273,16 +273,15 @@ static void ForceJitCompiled(Thread* self,
// Update the code cache to make sure the JIT code does not get deleted.
// Note: this will apply to all JIT compilations.
code_cache->SetGarbageCollectCode(false);
+ if (kind == CompilationKind::kBaseline || jit->GetJitCompiler()->IsBaselineCompiler()) {
+ ScopedObjectAccess soa(self);
+ jit->MaybeEnqueueCompilation(method, self);
+ } else {
+ jit->EnqueueOptimizedCompilation(method, self);
+ }
do {
// Sleep to yield to the compiler thread.
usleep(1000);
- ScopedObjectAccess soa(self);
- // Will either ensure it's compiled or do the compilation itself. We do
- // this before checking if we will execute JIT code in case the request
- // is for an 'optimized' compilation.
- if (jit->CompileMethod(method, self, kind, /*prejit=*/ false)) {
- return;
- }
const void* entry_point = method->GetEntryPointFromQuickCompiledCode();
if (code_cache->ContainsPc(entry_point)) {
// If we're running baseline or not requesting optimized, we're good to go.