[baseline] Check that the profiling info is not null.

Zygote cannot allocate profiling infos.

Test: 689-zygote-jit-deopt
Change-Id: I85e8b7f16b81ba4de435a5417dbb2588c34414b0
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index d4a41f7..3a2cf40 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -2103,43 +2103,45 @@
   if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
     ScopedObjectAccess soa(Thread::Current());
     ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
-    uint32_t address = reinterpret_cast32<uint32_t>(info);
-    vixl::aarch32::Label done;
-    UseScratchRegisterScope temps(GetVIXLAssembler());
-    temps.Exclude(ip);
-    if (!is_frame_entry) {
-      __ Push(r4);  // Will be used as temporary. For frame entry, r4 is always available.
-    }
-    __ Mov(r4, address);
-    __ Ldrh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
-    __ Add(ip, ip, 1);
-    __ Strh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
-    if (!is_frame_entry) {
-      __ Pop(r4);
-    }
-    __ Lsls(ip, ip, 16);
-    __ B(ne, &done);
-    uint32_t entry_point_offset =
-        GetThreadOffset<kArmPointerSize>(kQuickCompileOptimized).Int32Value();
-    if (HasEmptyFrame()) {
-      CHECK(is_frame_entry);
-      // For leaf methods, we need to spill lr and r0. Also spill r1 and r2 for
-      // alignment.
-      uint32_t core_spill_mask =
-          (1 << lr.GetCode()) | (1 << r0.GetCode()) | (1 << r1.GetCode()) | (1 << r2.GetCode());
-      __ Push(RegisterList(core_spill_mask));
+    if (info != nullptr) {
+      uint32_t address = reinterpret_cast32<uint32_t>(info);
+      vixl::aarch32::Label done;
+      UseScratchRegisterScope temps(GetVIXLAssembler());
+      temps.Exclude(ip);
+      if (!is_frame_entry) {
+        __ Push(r4);  // Will be used as temporary. For frame entry, r4 is always available.
+      }
+      __ Mov(r4, address);
+      __ Ldrh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
+      __ Add(ip, ip, 1);
+      __ Strh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
+      if (!is_frame_entry) {
+        __ Pop(r4);
+      }
+      __ Lsls(ip, ip, 16);
+      __ B(ne, &done);
+      uint32_t entry_point_offset =
+          GetThreadOffset<kArmPointerSize>(kQuickCompileOptimized).Int32Value();
+      if (HasEmptyFrame()) {
+        CHECK(is_frame_entry);
+        // For leaf methods, we need to spill lr and r0. Also spill r1 and r2 for
+        // alignment.
+        uint32_t core_spill_mask =
+            (1 << lr.GetCode()) | (1 << r0.GetCode()) | (1 << r1.GetCode()) | (1 << r2.GetCode());
+        __ Push(RegisterList(core_spill_mask));
+        __ Ldr(lr, MemOperand(tr, entry_point_offset));
+        __ Blx(lr);
+        __ Pop(RegisterList(core_spill_mask));
+      } else {
+        if (!RequiresCurrentMethod()) {
+          CHECK(is_frame_entry);
+          GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
+        }
       __ Ldr(lr, MemOperand(tr, entry_point_offset));
       __ Blx(lr);
-      __ Pop(RegisterList(core_spill_mask));
-    } else {
-      if (!RequiresCurrentMethod()) {
-        CHECK(is_frame_entry);
-        GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
       }
-    __ Ldr(lr, MemOperand(tr, entry_point_offset));
-    __ Blx(lr);
+      __ Bind(&done);
     }
-    __ Bind(&done);
   }
 }
 
@@ -3364,18 +3366,20 @@
     DCHECK(!instruction->GetEnvironment()->IsFromInlinedInvoke());
     ScopedObjectAccess soa(Thread::Current());
     ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
-    InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
-    uint32_t address = reinterpret_cast32<uint32_t>(cache);
-    vixl32::Label done;
-    UseScratchRegisterScope temps(GetVIXLAssembler());
-    temps.Exclude(ip);
-    __ Mov(r4, address);
-    __ Ldr(ip, MemOperand(r4, InlineCache::ClassesOffset().Int32Value()));
-    // Fast path for a monomorphic cache.
-    __ Cmp(klass, ip);
-    __ B(eq, &done, /* is_far_target= */ false);
-    InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
-    __ Bind(&done);
+    if (info != nullptr) {
+      InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
+      uint32_t address = reinterpret_cast32<uint32_t>(cache);
+      vixl32::Label done;
+      UseScratchRegisterScope temps(GetVIXLAssembler());
+      temps.Exclude(ip);
+      __ Mov(r4, address);
+      __ Ldr(ip, MemOperand(r4, InlineCache::ClassesOffset().Int32Value()));
+      // Fast path for a monomorphic cache.
+      __ Cmp(klass, ip);
+      __ B(eq, &done, /* is_far_target= */ false);
+      InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
+      __ Bind(&done);
+    }
   }
 }