[baseline] Check that the profiling info is not null.
Zygote cannot allocate profiling infos.
Test: 689-zygote-jit-deopt
Change-Id: I85e8b7f16b81ba4de435a5417dbb2588c34414b0
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 64ec987..5ef7404 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1080,41 +1080,43 @@
if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
ScopedObjectAccess soa(Thread::Current());
ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
- uint32_t address = reinterpret_cast32<uint32_t>(info);
- vixl::aarch64::Label done;
- UseScratchRegisterScope temps(masm);
- Register temp = temps.AcquireX();
- Register counter = temps.AcquireW();
- __ Mov(temp, address);
- __ Ldrh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
- __ Add(counter, counter, 1);
- __ Strh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
- __ Tst(counter, 0xffff);
- __ B(ne, &done);
- if (is_frame_entry) {
- if (HasEmptyFrame()) {
- // The entyrpoint expects the method at the bottom of the stack. We
- // claim stack space necessary for alignment.
- __ Claim(kStackAlignment);
- __ Stp(kArtMethodRegister, lr, MemOperand(sp, 0));
- } else if (!RequiresCurrentMethod()) {
- __ Str(kArtMethodRegister, MemOperand(sp, 0));
+ if (info != nullptr) {
+ uint32_t address = reinterpret_cast32<uint32_t>(info);
+ vixl::aarch64::Label done;
+ UseScratchRegisterScope temps(masm);
+ Register temp = temps.AcquireX();
+ Register counter = temps.AcquireW();
+ __ Mov(temp, address);
+ __ Ldrh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
+ __ Add(counter, counter, 1);
+ __ Strh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
+ __ Tst(counter, 0xffff);
+ __ B(ne, &done);
+ if (is_frame_entry) {
+ if (HasEmptyFrame()) {
+ // The entyrpoint expects the method at the bottom of the stack. We
+ // claim stack space necessary for alignment.
+ __ Claim(kStackAlignment);
+ __ Stp(kArtMethodRegister, lr, MemOperand(sp, 0));
+ } else if (!RequiresCurrentMethod()) {
+ __ Str(kArtMethodRegister, MemOperand(sp, 0));
+ }
+ } else {
+ CHECK(RequiresCurrentMethod());
}
- } else {
- CHECK(RequiresCurrentMethod());
+ uint32_t entrypoint_offset =
+ GetThreadOffset<kArm64PointerSize>(kQuickCompileOptimized).Int32Value();
+ __ Ldr(lr, MemOperand(tr, entrypoint_offset));
+ // Note: we don't record the call here (and therefore don't generate a stack
+ // map), as the entrypoint should never be suspended.
+ __ Blr(lr);
+ if (HasEmptyFrame()) {
+ CHECK(is_frame_entry);
+ __ Ldr(lr, MemOperand(sp, 8));
+ __ Drop(kStackAlignment);
+ }
+ __ Bind(&done);
}
- uint32_t entrypoint_offset =
- GetThreadOffset<kArm64PointerSize>(kQuickCompileOptimized).Int32Value();
- __ Ldr(lr, MemOperand(tr, entrypoint_offset));
- // Note: we don't record the call here (and therefore don't generate a stack
- // map), as the entrypoint should never be suspended.
- __ Blr(lr);
- if (HasEmptyFrame()) {
- CHECK(is_frame_entry);
- __ Ldr(lr, MemOperand(sp, 8));
- __ Drop(kStackAlignment);
- }
- __ Bind(&done);
}
}
@@ -4089,16 +4091,18 @@
DCHECK(!instruction->GetEnvironment()->IsFromInlinedInvoke());
ScopedObjectAccess soa(Thread::Current());
ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
- InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
- uint64_t address = reinterpret_cast64<uint64_t>(cache);
- vixl::aarch64::Label done;
- __ Mov(x8, address);
- __ Ldr(x9, MemOperand(x8, InlineCache::ClassesOffset().Int32Value()));
- // Fast path for a monomorphic cache.
- __ Cmp(klass, x9);
- __ B(eq, &done);
- InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
- __ Bind(&done);
+ if (info != nullptr) {
+ InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
+ uint64_t address = reinterpret_cast64<uint64_t>(cache);
+ vixl::aarch64::Label done;
+ __ Mov(x8, address);
+ __ Ldr(x9, MemOperand(x8, InlineCache::ClassesOffset().Int32Value()));
+ // Fast path for a monomorphic cache.
+ __ Cmp(klass, x9);
+ __ B(eq, &done);
+ InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
+ __ Bind(&done);
+ }
}
}