Add debugging info

Test: art/test/testrunner/testrunner.py
Bug: 160737021
Change-Id: Iecc6e22d34e249415bf44f5bf054bfd9acf051db
diff --git a/cmdline/cmdline_types.h b/cmdline/cmdline_types.h
index dc2f8b7..8c535a6 100644
--- a/cmdline/cmdline_types.h
+++ b/cmdline/cmdline_types.h
@@ -539,7 +539,7 @@
   bool verify_pre_gc_heap_ = false;
   bool verify_pre_sweeping_heap_ = kIsDebugBuild;
   bool generational_cc = kEnableGenerationalCCByDefault;
-  bool verify_post_gc_heap_ = false;
+  bool verify_post_gc_heap_ = kIsDebugBuild;
   bool verify_pre_gc_rosalloc_ = kIsDebugBuild;
   bool verify_pre_sweeping_rosalloc_ = false;
   bool verify_post_gc_rosalloc_ = false;
diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc
index 3c5688d..a0458d2 100644
--- a/runtime/gc/accounting/space_bitmap.cc
+++ b/runtime/gc/accounting/space_bitmap.cc
@@ -16,6 +16,9 @@
 
 #include "space_bitmap-inl.h"
 
+#include <iomanip>
+#include <sstream>
+
 #include "android-base/stringprintf.h"
 
 #include "art_field-inl.h"
@@ -113,6 +116,37 @@
                       reinterpret_cast<void*>(HeapLimit()));
 }
 
+template <size_t kAlignment>
+std::string SpaceBitmap<kAlignment>::DumpMemAround(mirror::Object* obj) const {
+  uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
+  DCHECK_GE(addr, heap_begin_);
+  DCHECK(HasAddress(obj)) << obj;
+  const uintptr_t offset = addr - heap_begin_;
+  const size_t index = OffsetToIndex(offset);
+  const uintptr_t mask = OffsetToMask(offset);
+  size_t num_entries = bitmap_size_ / sizeof(uintptr_t);
+  DCHECK_LT(index, num_entries) << " bitmap_size_ = " << bitmap_size_;
+  Atomic<uintptr_t>* atomic_entry = &bitmap_begin_[index];
+  uintptr_t prev = 0;
+  uintptr_t next = 0;
+  if (index > 0) {
+    prev = (atomic_entry - 1)->load(std::memory_order_relaxed);
+  }
+  uintptr_t curr = atomic_entry->load(std::memory_order_relaxed);
+  if (index < num_entries - 1) {
+    next = (atomic_entry + 1)->load(std::memory_order_relaxed);
+  }
+  std::ostringstream oss;
+  oss << " offset: " << offset
+      << " index: " << index
+      << " mask: " << std::hex << std::setfill('0') << std::setw(16) << mask
+      << " words {" << std::hex << std::setfill('0') << std::setw(16) << prev
+      << ", " << std::hex << std::setfill('0') << std::setw(16) << curr
+      << ", " << std::hex <<std::setfill('0') << std::setw(16) << next
+      << "}";
+  return oss.str();
+}
+
 template<size_t kAlignment>
 void SpaceBitmap<kAlignment>::Clear() {
   if (bitmap_begin_ != nullptr) {
diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h
index 09a7ce4..c87b31e 100644
--- a/runtime/gc/accounting/space_bitmap.h
+++ b/runtime/gc/accounting/space_bitmap.h
@@ -207,6 +207,9 @@
 
   std::string Dump() const;
 
+  // Dump three bitmap words around obj.
+  std::string DumpMemAround(mirror::Object* obj) const;
+
   // Helper function for computing bitmap size based on a 64 bit capacity.
   static size_t ComputeBitmapSize(uint64_t capacity);
   static size_t ComputeHeapSize(uint64_t bitmap_bytes);
diff --git a/runtime/gc/collector/mark_compact-inl.h b/runtime/gc/collector/mark_compact-inl.h
index 9c46ed4..6f636c2 100644
--- a/runtime/gc/collector/mark_compact-inl.h
+++ b/runtime/gc/collector/mark_compact-inl.h
@@ -180,6 +180,32 @@
 inline void MarkCompact::UpdateRef(mirror::Object* obj, MemberOffset offset) {
   mirror::Object* old_ref = obj->GetFieldObject<
       mirror::Object, kVerifyNone, kWithoutReadBarrier, /*kIsVolatile*/false>(offset);
+  if (kIsDebugBuild) {
+    if (live_words_bitmap_->HasAddress(old_ref)
+        && reinterpret_cast<uint8_t*>(old_ref) < black_allocations_begin_
+        && !current_space_bitmap_->Test(old_ref)) {
+      mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
+      std::ostringstream oss;
+      heap_->DumpSpaces(oss);
+      MemMap::DumpMaps(oss, /* terse= */ true);
+      LOG(FATAL) << "Not marked in the bitmap ref=" << old_ref
+                 << " from_ref=" << from_ref
+                 << " offset=" << offset
+                 << " obj=" << obj
+                 << " obj-validity=" << IsValidObject(obj)
+                 << " from-space=" << static_cast<void*>(from_space_begin_)
+                 << " bitmap= " << current_space_bitmap_->DumpMemAround(old_ref)
+                 << " from_ref "
+                 << heap_->GetVerification()->DumpRAMAroundAddress(
+                     reinterpret_cast<uintptr_t>(from_ref), 128)
+                 << " obj "
+                 << heap_->GetVerification()->DumpRAMAroundAddress(
+                     reinterpret_cast<uintptr_t>(obj), 128)
+                 << " old_ref " << heap_->GetVerification()->DumpRAMAroundAddress(
+                     reinterpret_cast<uintptr_t>(old_ref), 128)
+                 << " maps\n" << oss.str();
+    }
+  }
   mirror::Object* new_ref = PostCompactAddress(old_ref);
   if (new_ref != old_ref) {
     obj->SetFieldObjectWithoutWriteBarrier<
@@ -194,7 +220,6 @@
                                                 const RootInfo& info) {
   void* stack_end = stack_end_;
   void* stack_addr = stack_addr_;
-
   if (!live_words_bitmap_->HasAddress(old_ref)) {
     return false;
   }
@@ -288,6 +313,23 @@
   if (reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_) {
     return PostCompactBlackObjAddr(old_ref);
   }
+  if (kIsDebugBuild) {
+    mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
+    DCHECK(live_words_bitmap_->Test(old_ref))
+         << "ref=" << old_ref;
+    if (!current_space_bitmap_->Test(old_ref)) {
+      std::ostringstream oss;
+      Runtime::Current()->GetHeap()->DumpSpaces(oss);
+      MemMap::DumpMaps(oss, /* terse= */ true);
+      LOG(FATAL) << "ref=" << old_ref
+                 << " from_ref=" << from_ref
+                 << " from-space=" << static_cast<void*>(from_space_begin_)
+                 << " bitmap= " << current_space_bitmap_->DumpMemAround(old_ref)
+                 << heap_->GetVerification()->DumpRAMAroundAddress(
+                         reinterpret_cast<uintptr_t>(from_ref), 128)
+                 << " maps\n" << oss.str();
+    }
+  }
   return PostCompactOldObjAddr(old_ref);
 }
 
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 0bad044..8d98a3c 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -21,6 +21,8 @@
 #include "gc/accounting/mod_union_table-inl.h"
 #include "gc/reference_processor.h"
 #include "gc/space/bump_pointer_space.h"
+#include "gc/task_processor.h"
+#include "gc/verification-inl.h"
 #include "jit/jit_code_cache.h"
 #include "mirror/object-refvisitor-inl.h"
 #include "scoped_thread_state_change-inl.h"
@@ -653,38 +655,126 @@
   uint8_t* const end_;
 };
 
+bool MarkCompact::IsValidObject(mirror::Object* obj) const {
+  if (!heap_->GetVerification()->IsValidHeapObjectAddress(obj)) {
+    return false;
+  }
+  mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
+  if (!heap_->GetVerification()->IsValidHeapObjectAddress(klass)) {
+    return false;
+  }
+  return heap_->GetVerification()->IsValidClassUnchecked<kWithFromSpaceBarrier>(
+          obj->GetClass<kVerifyNone, kWithFromSpaceBarrier>());
+}
+
+template <typename Callback>
+void MarkCompact::VerifyObject(mirror::Object* ref, Callback& callback) const {
+  if (kIsDebugBuild) {
+    mirror::Class* klass = ref->GetClass<kVerifyNone, kWithFromSpaceBarrier>();
+    mirror::Class* pre_compact_klass = ref->GetClass<kVerifyNone, kWithoutReadBarrier>();
+    mirror::Class* klass_klass = klass->GetClass<kVerifyNone, kWithFromSpaceBarrier>();
+    mirror::Class* klass_klass_klass = klass_klass->GetClass<kVerifyNone, kWithFromSpaceBarrier>();
+    if (bump_pointer_space_->HasAddress(pre_compact_klass) &&
+        reinterpret_cast<uint8_t*>(pre_compact_klass) < black_allocations_begin_) {
+      CHECK(current_space_bitmap_->Test(pre_compact_klass))
+          << "ref=" << ref
+          << " post_compact_end=" << static_cast<void*>(post_compact_end_)
+          << " pre_compact_klass=" << pre_compact_klass
+          << " black_allocations_begin=" << static_cast<void*>(black_allocations_begin_);
+      CHECK(live_words_bitmap_->Test(pre_compact_klass));
+    }
+    if (!IsValidObject(ref)) {
+      std::ostringstream oss;
+      oss << "Invalid object: "
+          << "ref=" << ref
+          << " klass=" << klass
+          << " klass_klass=" << klass_klass
+          << " klass_klass_klass=" << klass_klass_klass
+          << " pre_compact_klass=" << pre_compact_klass
+          << " from_space_begin=" << static_cast<void*>(from_space_begin_)
+          << " pre_compact_begin=" << static_cast<void*>(bump_pointer_space_->Begin())
+          << " post_compact_end=" << static_cast<void*>(post_compact_end_)
+          << " black_allocations_begin=" << static_cast<void*>(black_allocations_begin_);
+
+      // Call callback before dumping larger data like RAM and space dumps.
+      callback(oss);
+
+      oss << " \nobject="
+          << heap_->GetVerification()->DumpRAMAroundAddress(reinterpret_cast<uintptr_t>(ref), 128)
+          << " \nklass(from)="
+          << heap_->GetVerification()->DumpRAMAroundAddress(reinterpret_cast<uintptr_t>(klass), 128)
+          << "spaces:\n";
+      heap_->DumpSpaces(oss);
+      LOG(FATAL) << oss.str();
+    }
+  }
+}
+
 void MarkCompact::CompactPage(mirror::Object* obj, uint32_t offset, uint8_t* addr) {
   DCHECK(IsAligned<kPageSize>(addr));
-  obj = GetFromSpaceAddr(obj);
   DCHECK(current_space_bitmap_->Test(obj)
          && live_words_bitmap_->Test(obj));
   DCHECK(live_words_bitmap_->Test(offset)) << "obj=" << obj
-                                              << " offset=" << offset
-                                              << " addr=" << static_cast<void*>(addr)
-                                              << " black_allocs_begin="
-                                              << static_cast<void*>(black_allocations_begin_)
-                                              << " post_compact_addr="
-                                              << static_cast<void*>(post_compact_end_);
+                                           << " offset=" << offset
+                                           << " addr=" << static_cast<void*>(addr)
+                                           << " black_allocs_begin="
+                                           << static_cast<void*>(black_allocations_begin_)
+                                           << " post_compact_addr="
+                                           << static_cast<void*>(post_compact_end_);
   uint8_t* const start_addr = addr;
   // How many distinct live-strides do we have.
   size_t stride_count = 0;
   uint8_t* last_stride;
   uint32_t last_stride_begin = 0;
+  auto verify_obj_callback = [&] (std::ostream& os) {
+                               os << " stride_count=" << stride_count
+                                  << " last_stride=" << static_cast<void*>(last_stride)
+                                  << " offset=" << offset
+                                  << " start_addr=" << static_cast<void*>(start_addr);
+                             };
+  obj = GetFromSpaceAddr(obj);
   live_words_bitmap_->VisitLiveStrides(offset,
                                        black_allocations_begin_,
                                        kPageSize,
-                                       [&] (uint32_t stride_begin,
-                                            size_t stride_size,
-                                            bool is_last) {
+                                       [&addr,
+                                        &last_stride,
+                                        &stride_count,
+                                        &last_stride_begin,
+                                        verify_obj_callback,
+                                        this] (uint32_t stride_begin,
+                                               size_t stride_size,
+                                               bool /*is_last*/)
+                                        REQUIRES_SHARED(Locks::mutator_lock_) {
                                          const size_t stride_in_bytes = stride_size * kAlignment;
                                          DCHECK_LE(stride_in_bytes, kPageSize);
                                          last_stride_begin = stride_begin;
+                                         DCHECK(IsAligned<kAlignment>(addr));
                                          memcpy(addr,
                                                 from_space_begin_ + stride_begin * kAlignment,
                                                 stride_in_bytes);
-                                         if (is_last) {
-                                            last_stride = addr;
+                                         if (kIsDebugBuild) {
+                                           uint8_t* space_begin = bump_pointer_space_->Begin();
+                                           // We can interpret the first word of the stride as an
+                                           // obj only from second stride onwards, as the first
+                                           // stride's first-object may have started on previous
+                                           // page. The only exception is the first page of the
+                                           // moving space.
+                                           if (stride_count > 0
+                                               || stride_begin * kAlignment < kPageSize) {
+                                             mirror::Object* o =
+                                                reinterpret_cast<mirror::Object*>(space_begin
+                                                                                  + stride_begin
+                                                                                  * kAlignment);
+                                             CHECK(live_words_bitmap_->Test(o)) << "ref=" << o;
+                                             CHECK(current_space_bitmap_->Test(o))
+                                                 << "ref=" << o
+                                                 << " bitmap: "
+                                                 << current_space_bitmap_->DumpMemAround(o);
+                                             VerifyObject(reinterpret_cast<mirror::Object*>(addr),
+                                                          verify_obj_callback);
+                                           }
                                          }
+                                         last_stride = addr;
                                          addr += stride_in_bytes;
                                          stride_count++;
                                        });
@@ -730,6 +820,7 @@
   // checks.
   while (addr < last_stride) {
     mirror::Object* ref = reinterpret_cast<mirror::Object*>(addr);
+    VerifyObject(ref, verify_obj_callback);
     RefsUpdateVisitor</*kCheckBegin*/false, /*kCheckEnd*/false>
             visitor(this, ref, nullptr, nullptr);
     obj_size = ref->VisitRefsForCompaction(visitor, MemberOffset(0), MemberOffset(-1));
@@ -744,6 +835,7 @@
   while (addr < end_addr) {
     mirror::Object* ref = reinterpret_cast<mirror::Object*>(addr);
     obj = reinterpret_cast<mirror::Object*>(from_addr);
+    VerifyObject(ref, verify_obj_callback);
     RefsUpdateVisitor</*kCheckBegin*/false, /*kCheckEnd*/true>
             visitor(this, ref, nullptr, start_addr + kPageSize);
     obj_size = obj->VisitRefsForCompaction(visitor,
@@ -779,6 +871,14 @@
   uint8_t* const dest_page_end = dest + kPageSize;
   DCHECK(IsAligned<kPageSize>(dest_page_end));
 
+  auto verify_obj_callback = [&] (std::ostream& os) {
+                               os << " first_obj=" << first_obj
+                                  << " next_page_first_obj=" << next_page_first_obj
+                                  << " first_chunk_sie=" << first_chunk_size
+                                  << " dest=" << static_cast<void*>(dest)
+                                  << " pre_compact_page="
+                                  << static_cast<void* const>(pre_compact_page);
+                             };
   // We have empty portion at the beginning of the page. Zero it.
   if (pre_compact_addr > pre_compact_page) {
     bytes_copied = pre_compact_addr - pre_compact_page;
@@ -846,6 +946,7 @@
     }
     while (bytes_to_visit > 0) {
       mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest);
+      VerifyObject(dest_obj, verify_obj_callback);
       RefsUpdateVisitor</*kCheckBegin*/false, /*kCheckEnd*/false> visitor(this,
                                                                           dest_obj,
                                                                           nullptr,
@@ -858,6 +959,7 @@
     DCHECK_EQ(bytes_to_visit, 0u);
     if (check_last_obj) {
       mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest);
+      VerifyObject(dest_obj, verify_obj_callback);
       RefsUpdateVisitor</*kCheckBegin*/false, /*kCheckEnd*/true> visitor(this,
                                                                          dest_obj,
                                                                          nullptr,
@@ -900,10 +1002,11 @@
     current_space_bitmap_->VisitMarkedRange(
             reinterpret_cast<uintptr_t>(found_obj) + mirror::kObjectHeaderSize,
             page_end,
-            [&found_obj, pre_compact_addr, dest, this] (mirror::Object* obj)
+            [&found_obj, pre_compact_addr, dest, this, verify_obj_callback] (mirror::Object* obj)
             REQUIRES_SHARED(Locks::mutator_lock_) {
               ptrdiff_t diff = reinterpret_cast<uint8_t*>(found_obj) - pre_compact_addr;
               mirror::Object* ref = reinterpret_cast<mirror::Object*>(dest + diff);
+              VerifyObject(ref, verify_obj_callback);
               RefsUpdateVisitor</*kCheckBegin*/false, /*kCheckEnd*/false>
                       visitor(this, ref, nullptr, nullptr);
               ref->VisitRefsForCompaction</*kFetchObjSize*/false>(visitor,
@@ -918,6 +1021,7 @@
     DCHECK_LT(reinterpret_cast<uintptr_t>(found_obj), page_end);
     ptrdiff_t diff = reinterpret_cast<uint8_t*>(found_obj) - pre_compact_addr;
     mirror::Object* ref = reinterpret_cast<mirror::Object*>(dest + diff);
+    VerifyObject(ref, verify_obj_callback);
     RefsUpdateVisitor</*kCheckBegin*/false, /*kCheckEnd*/true> visitor(this,
                                                                        ref,
                                                                        nullptr,
@@ -1367,6 +1471,10 @@
   gc_barrier_.Init(thread_running_gc_, 0);
   StackRefsUpdateVisitor thread_visitor(this, black_objs_slide_diff_);
   CompactionPauseCallback callback(this);
+  // To increase likelihood of black allocations. For testing purposes only.
+  if (kIsDebugBuild && heap_->GetTaskProcessor()->GetRunningThread() == thread_running_gc_) {
+    sleep(10);
+  }
 
   size_t barrier_count = Runtime::Current()->GetThreadList()->FlipThreadRoots(
       &thread_visitor, &callback, this, GetHeap()->GetGcPauseListener());
diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h
index e354ed8..92f0830 100644
--- a/runtime/gc/collector/mark_compact.h
+++ b/runtime/gc/collector/mark_compact.h
@@ -163,7 +163,8 @@
     ALWAYS_INLINE void VisitLiveStrides(uintptr_t begin_bit_idx,
                                         uint8_t* end,
                                         const size_t bytes,
-                                        Visitor&& visitor) const;
+                                        Visitor&& visitor) const
+        REQUIRES_SHARED(Locks::mutator_lock_);
     // Count the number of live bytes in the given vector entry.
     size_t LiveBytesInBitmapWord(size_t chunk_idx) const;
     void ClearBitmap() { Bitmap::Clear(); }
@@ -192,6 +193,14 @@
                                              + from_space_slide_diff_);
   }
 
+  // Verifies that that given object reference refers to a valid object.
+  // Otherwise fataly dumps logs, including those from callback.
+  template <typename Callback>
+  void VerifyObject(mirror::Object* ref, Callback& callback) const
+      REQUIRES_SHARED(Locks::mutator_lock_);
+  // Check if the obj is within heap and has a klass which is likely to be valid
+  // mirror::Class.
+  bool IsValidObject(mirror::Object* obj) const REQUIRES_SHARED(Locks::mutator_lock_);
   void InitializePhase();
   void FinishPhase() REQUIRES(!Locks::mutator_lock_, !Locks::heap_bitmap_lock_);
   void MarkingPhase() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::heap_bitmap_lock_);
diff --git a/runtime/gc/heap_verification_test.cc b/runtime/gc/heap_verification_test.cc
index ca6a30b..789a8e3 100644
--- a/runtime/gc/heap_verification_test.cc
+++ b/runtime/gc/heap_verification_test.cc
@@ -26,7 +26,7 @@
 #include "mirror/string.h"
 #include "runtime.h"
 #include "scoped_thread_state_change-inl.h"
-#include "verification.h"
+#include "verification-inl.h"
 
 namespace art {
 namespace gc {
@@ -76,11 +76,11 @@
   Handle<mirror::String> string(
       hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "test")));
   const Verification* const v = Runtime::Current()->GetHeap()->GetVerification();
-  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(1)));
-  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(4)));
+  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<mirror::Class*>(1)));
+  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<mirror::Class*>(4)));
   EXPECT_FALSE(v->IsValidClass(nullptr));
   EXPECT_TRUE(v->IsValidClass(string->GetClass()));
-  EXPECT_FALSE(v->IsValidClass(string.Get()));
+  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<mirror::Class*>(string.Get())));
 }
 
 TEST_F(VerificationTest, IsValidClassInHeap) {
@@ -95,9 +95,9 @@
   Handle<mirror::String> string(
       hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "test")));
   const Verification* const v = Runtime::Current()->GetHeap()->GetVerification();
-  const uintptr_t uint_klass = reinterpret_cast<uintptr_t>(string->GetClass());
-  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(uint_klass - kObjectAlignment)));
-  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(&uint_klass)));
+  uintptr_t uint_klass = reinterpret_cast<uintptr_t>(string->GetClass());
+  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<mirror::Class*>(uint_klass - kObjectAlignment)));
+  EXPECT_FALSE(v->IsValidClass(reinterpret_cast<mirror::Class*>(&uint_klass)));
 }
 
 TEST_F(VerificationTest, DumpInvalidObjectInfo) {
diff --git a/runtime/gc/verification-inl.h b/runtime/gc/verification-inl.h
new file mode 100644
index 0000000..1ef96e2
--- /dev/null
+++ b/runtime/gc/verification-inl.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_GC_VERIFICATION_INL_H_
+#define ART_RUNTIME_GC_VERIFICATION_INL_H_
+
+#include "verification.h"
+
+#include "mirror/class-inl.h"
+
+namespace art {
+namespace gc {
+
+template <ReadBarrierOption kReadBarrierOption>
+bool Verification::IsValidClassUnchecked(mirror::Class* klass) const {
+  mirror::Class* k1 = klass->GetClass<kVerifyNone, kReadBarrierOption>();
+  if (!IsValidHeapObjectAddress(k1)) {
+    return false;
+  }
+  // `k1` should be class class, take the class again to verify.
+  // Note that this check may not be valid for the no image space
+  // since the class class might move around from moving GC.
+  mirror::Class* k2 = k1->GetClass<kVerifyNone, kReadBarrierOption>();
+  if (!IsValidHeapObjectAddress(k2)) {
+    return false;
+  }
+  return k1 == k2;
+}
+
+template <ReadBarrierOption kReadBarrierOption>
+bool Verification::IsValidClass(mirror::Class* klass) const {
+  if (!IsValidHeapObjectAddress(klass)) {
+    return false;
+  }
+  return IsValidClassUnchecked<kReadBarrierOption>(klass);
+}
+
+template <ReadBarrierOption kReadBarrierOption>
+bool Verification::IsValidObject(mirror::Object* obj) const {
+  if (!IsValidHeapObjectAddress(obj)) {
+    return false;
+  }
+  mirror::Class* klass = obj->GetClass<kVerifyNone, kReadBarrierOption>();
+  return IsValidClass(klass);
+}
+
+}  // namespace gc
+}  // namespace art
+
+#endif  // ART_RUNTIME_GC_VERIFICATION_INL_H_
diff --git a/runtime/gc/verification.cc b/runtime/gc/verification.cc
index 9e0b8a2..5790755 100644
--- a/runtime/gc/verification.cc
+++ b/runtime/gc/verification.cc
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "verification.h"
+#include "verification-inl.h"
 
 #include <iomanip>
 #include <sstream>
@@ -29,23 +29,16 @@
 namespace gc {
 
 std::string Verification::DumpRAMAroundAddress(uintptr_t addr, uintptr_t bytes) const {
-  const uintptr_t dump_start = addr - bytes;
-  const uintptr_t dump_end = addr + bytes;
+  uintptr_t* dump_start = reinterpret_cast<uintptr_t*>(addr - bytes);
+  uintptr_t* dump_end = reinterpret_cast<uintptr_t*>(addr + bytes);
   std::ostringstream oss;
-  if (dump_start < dump_end &&
-      IsAddressInHeapSpace(reinterpret_cast<const void*>(dump_start)) &&
-      IsAddressInHeapSpace(reinterpret_cast<const void*>(dump_end - 1))) {
-    oss << " adjacent_ram=";
-    for (uintptr_t p = dump_start; p < dump_end; ++p) {
-      if (p == addr) {
-        // Marker of where the address is.
-        oss << "|";
-      }
-      uint8_t* ptr = reinterpret_cast<uint8_t*>(p);
-      oss << std::hex << std::setfill('0') << std::setw(2) << static_cast<uintptr_t>(*ptr);
+  oss << " adjacent_ram=";
+  for (const uintptr_t* p = dump_start; p < dump_end; ++p) {
+    if (p == reinterpret_cast<uintptr_t*>(addr)) {
+      // Marker of where the address is.
+      oss << "|";
     }
-  } else {
-    oss << " <invalid address>";
+    oss << std::hex << std::setfill('0') << std::setw(sizeof(uintptr_t) * 2) << *p << " ";
   }
   return oss.str();
 }
@@ -132,25 +125,6 @@
   return IsAligned<kObjectAlignment>(addr) && IsAddressInHeapSpace(addr, out_space);
 }
 
-bool Verification::IsValidClass(const void* addr) const {
-  if (!IsValidHeapObjectAddress(addr)) {
-    return false;
-  }
-  mirror::Class* klass = reinterpret_cast<mirror::Class*>(const_cast<void*>(addr));
-  mirror::Class* k1 = klass->GetClass<kVerifyNone, kWithoutReadBarrier>();
-  if (!IsValidHeapObjectAddress(k1)) {
-    return false;
-  }
-  // `k1` should be class class, take the class again to verify.
-  // Note that this check may not be valid for the no image space since the class class might move
-  // around from moving GC.
-  mirror::Class* k2 = k1->GetClass<kVerifyNone, kWithoutReadBarrier>();
-  if (!IsValidHeapObjectAddress(k2)) {
-    return false;
-  }
-  return k1 == k2;
-}
-
 using ObjectSet = std::set<mirror::Object*>;
 using WorkQueue = std::deque<std::pair<mirror::Object*, std::string>>;
 
diff --git a/runtime/gc/verification.h b/runtime/gc/verification.h
index 6b456fd..7a5d01a 100644
--- a/runtime/gc/verification.h
+++ b/runtime/gc/verification.h
@@ -19,6 +19,7 @@
 
 #include "obj_ptr.h"
 #include "offsets.h"
+#include "read_barrier_option.h"
 
 namespace art {
 
@@ -50,7 +51,16 @@
                          bool fatal) const REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Return true if the klass is likely to be a valid mirror::Class.
-  bool IsValidClass(const void* klass) const REQUIRES_SHARED(Locks::mutator_lock_);
+  // Returns true if the class is a valid mirror::Class or possibly spuriously.
+  template <ReadBarrierOption kReadBarrierOption = kWithoutReadBarrier>
+  bool IsValidClassUnchecked(mirror::Class* klass) const
+      REQUIRES_SHARED(Locks::mutator_lock_);
+  // Return true if the klass is likely to be a valid mirror::Class.
+  template <ReadBarrierOption kReadBarrierOption = kWithoutReadBarrier>
+  bool IsValidClass(mirror::Class* klass) const REQUIRES_SHARED(Locks::mutator_lock_);
+  // Return true if the obj is likely to be a valid obj with valid mirror::Class.
+  template <ReadBarrierOption kReadBarrierOption = kWithoutReadBarrier>
+  bool IsValidObject(mirror::Object* obj) const REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Does not allow null, checks alignment.
   bool IsValidHeapObjectAddress(const void* addr, space::Space** out_space = nullptr) const