diff options
author | 2017-04-18 11:26:22 -0700 | |
---|---|---|
committer | 2017-04-21 15:55:16 -0700 | |
commit | 1ca689096b532e007dc9f8ba16db4731e6afd719 (patch) | |
tree | e07c8c5c0eba7494d7dba628caaa1cd2bbe6066f | |
parent | 9ae527f615f61aec4aaca310c52f373e8c3d8d58 (diff) |
More robust GC verification and corruption dumping
Added a test for GC heap corruption dumping, added more info to the
dump like adjacent bytes and card table.
Added heap corruption detection in
ConcurrentCopying::MarkNonMoving().
Bug: 37187694
Bug: 12687968
Test: mm test-art-host-gtest-verification_test -j20
Change-Id: I8c90e45796d0784265aa091b2f8082f0cfb62719
-rw-r--r-- | build/Android.gtest.mk | 2 | ||||
-rw-r--r-- | runtime/Android.bp | 2 | ||||
-rw-r--r-- | runtime/gc/collector/concurrent_copying-inl.h | 6 | ||||
-rw-r--r-- | runtime/gc/collector/concurrent_copying.cc | 10 | ||||
-rw-r--r-- | runtime/gc/collector/concurrent_copying.h | 9 | ||||
-rw-r--r-- | runtime/gc/heap.cc | 6 | ||||
-rw-r--r-- | runtime/gc/heap.h | 5 | ||||
-rw-r--r-- | runtime/gc/heap_verification_test.cc | 119 | ||||
-rw-r--r-- | runtime/gc/verification.cc | 141 | ||||
-rw-r--r-- | runtime/gc/verification.h | 67 |
10 files changed, 362 insertions, 5 deletions
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk index ed34a8df5f..11af1c0ca8 100644 --- a/build/Android.gtest.mk +++ b/build/Android.gtest.mk @@ -125,6 +125,7 @@ ART_GTEST_stub_test_DEX_DEPS := AllFields ART_GTEST_transaction_test_DEX_DEPS := Transaction ART_GTEST_type_lookup_table_test_DEX_DEPS := Lookup ART_GTEST_unstarted_runtime_test_DEX_DEPS := Nested +ART_GTEST_heap_verification_test_DEX_DEPS := ProtoCompare ProtoCompare2 StaticsFromCode XandY ART_GTEST_verifier_deps_test_DEX_DEPS := VerifierDeps VerifierDepsMulti MultiDex ART_GTEST_dex_to_dex_decompiler_test_DEX_DEPS := VerifierDeps DexToDexDecompiler @@ -655,6 +656,7 @@ ART_GTEST_reflection_test_DEX_DEPS := ART_GTEST_stub_test_DEX_DEPS := ART_GTEST_transaction_test_DEX_DEPS := ART_GTEST_dex2oat_environment_tests_DEX_DEPS := +ART_GTEST_heap_verification_test_DEX_DEPS := ART_GTEST_verifier_deps_test_DEX_DEPS := ART_VALGRIND_DEPENDENCIES := ART_VALGRIND_TARGET_DEPENDENCIES := diff --git a/runtime/Android.bp b/runtime/Android.bp index 2866d4b4a0..38d5b211fb 100644 --- a/runtime/Android.bp +++ b/runtime/Android.bp @@ -93,6 +93,7 @@ cc_defaults { "gc/space/space.cc", "gc/space/zygote_space.cc", "gc/task_processor.cc", + "gc/verification.cc", "hprof/hprof.cc", "image.cc", "indirect_reference_table.cc", @@ -546,6 +547,7 @@ art_cc_test { "gc/accounting/space_bitmap_test.cc", "gc/collector/immune_spaces_test.cc", "gc/heap_test.cc", + "gc/heap_verification_test.cc", "gc/reference_queue_test.cc", "gc/space/dlmalloc_space_static_test.cc", "gc/space/dlmalloc_space_random_test.cc", diff --git a/runtime/gc/collector/concurrent_copying-inl.h b/runtime/gc/collector/concurrent_copying-inl.h index 854d0a58ff..53645479f0 100644 --- a/runtime/gc/collector/concurrent_copying-inl.h +++ b/runtime/gc/collector/concurrent_copying-inl.h @@ -96,7 +96,9 @@ inline mirror::Object* ConcurrentCopying::MarkImmuneSpace(mirror::Object* ref) { } template<bool kGrayImmuneObject, bool kFromGCThread> -inline mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref) { +inline mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref, + mirror::Object* holder, + MemberOffset offset) { if (from_ref == nullptr) { return nullptr; } @@ -141,7 +143,7 @@ inline mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref) { if (immune_spaces_.ContainsObject(from_ref)) { return MarkImmuneSpace<kGrayImmuneObject>(from_ref); } else { - return MarkNonMoving(from_ref); + return MarkNonMoving(from_ref, holder, offset); } default: UNREACHABLE(); diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index a091fcedd6..80b569add3 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -29,6 +29,7 @@ #include "gc/reference_processor.h" #include "gc/space/image_space.h" #include "gc/space/space-inl.h" +#include "gc/verification.h" #include "image-inl.h" #include "intern_table.h" #include "mirror/class-inl.h" @@ -2362,7 +2363,9 @@ bool ConcurrentCopying::IsOnAllocStack(mirror::Object* ref) { return alloc_stack->Contains(ref); } -mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref) { +mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref, + mirror::Object* holder, + MemberOffset offset) { // ref is in a non-moving space (from_ref == to_ref). DCHECK(!region_space_->HasAddress(ref)) << ref; DCHECK(!immune_spaces_.ContainsObject(ref)); @@ -2408,6 +2411,11 @@ mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref) { return ref; } } + if (is_los && !IsAligned<kPageSize>(ref)) { + // Ref is a large object that is not aligned, it must be heap corruption. Dump data before + // AtomicSetReadBarrierState since it will fault if the address is not valid. + heap_->GetVerification()->LogHeapCorruption(ref, offset, holder, /* fatal */ true); + } // Not marked or on the allocation stack. Try to mark it. // This may or may not succeed, which is ok. bool cas_success = false; diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h index 398a7e2ae7..c21520d2fa 100644 --- a/runtime/gc/collector/concurrent_copying.h +++ b/runtime/gc/collector/concurrent_copying.h @@ -106,7 +106,9 @@ class ConcurrentCopying : public GarbageCollector { return IsMarked(ref) == ref; } template<bool kGrayImmuneObject = true, bool kFromGCThread = false> - ALWAYS_INLINE mirror::Object* Mark(mirror::Object* from_ref) + ALWAYS_INLINE mirror::Object* Mark(mirror::Object* from_ref, + mirror::Object* holder = nullptr, + MemberOffset offset = MemberOffset(0)) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); ALWAYS_INLINE mirror::Object* MarkFromReadBarrier(mirror::Object* from_ref) @@ -224,7 +226,10 @@ class ConcurrentCopying : public GarbageCollector { void DisableMarking() REQUIRES_SHARED(Locks::mutator_lock_); void IssueDisableMarkingCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_); void ExpandGcMarkStack() REQUIRES_SHARED(Locks::mutator_lock_); - mirror::Object* MarkNonMoving(mirror::Object* from_ref) REQUIRES_SHARED(Locks::mutator_lock_) + mirror::Object* MarkNonMoving(mirror::Object* from_ref, + mirror::Object* holder = nullptr, + MemberOffset offset = MemberOffset(0)) + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); ALWAYS_INLINE mirror::Object* MarkUnevacFromSpaceRegion(mirror::Object* from_ref, accounting::SpaceBitmap<kObjectAlignment>* bitmap) diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index e08784dc63..7a0eea4af1 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -60,6 +60,7 @@ #include "gc/space/space-inl.h" #include "gc/space/zygote_space.h" #include "gc/task_processor.h" +#include "gc/verification.h" #include "entrypoints/quick/quick_alloc_entrypoints.h" #include "gc_pause_listener.h" #include "heap-inl.h" @@ -286,6 +287,7 @@ Heap::Heap(size_t initial_size, CHECK_EQ(foreground_collector_type_, kCollectorTypeCC); CHECK_EQ(background_collector_type_, kCollectorTypeCCBackground); } + verification_.reset(new Verification(this)); CHECK_GE(large_object_threshold, kMinLargeObjectThreshold); ScopedTrace trace(__FUNCTION__); Runtime* const runtime = Runtime::Current(); @@ -4267,5 +4269,9 @@ mirror::Object* Heap::AllocWithNewTLAB(Thread* self, return ret; } +const Verification* Heap::GetVerification() const { + return verification_.get(); +} + } // namespace gc } // namespace art diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h index 241d84ce22..aa123d8736 100644 --- a/runtime/gc/heap.h +++ b/runtime/gc/heap.h @@ -64,6 +64,7 @@ class AllocRecordObjectMap; class GcPauseListener; class ReferenceProcessor; class TaskProcessor; +class Verification; namespace accounting { class HeapBitmap; @@ -821,6 +822,8 @@ class Heap { // reasons, we assume it stays valid when we read it (so that we don't require a lock). void RemoveGcPauseListener(); + const Verification* GetVerification() const; + private: class ConcurrentGCTask; class CollectorTransitionTask; @@ -1433,6 +1436,8 @@ class Heap { // An installed GC Pause listener. Atomic<GcPauseListener*> gc_pause_listener_; + std::unique_ptr<Verification> verification_; + friend class CollectorTransitionTask; friend class collector::GarbageCollector; friend class collector::MarkCompact; diff --git a/runtime/gc/heap_verification_test.cc b/runtime/gc/heap_verification_test.cc new file mode 100644 index 0000000000..480ba2abb1 --- /dev/null +++ b/runtime/gc/heap_verification_test.cc @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common_runtime_test.h" +#include "verification.h" +#include "mirror/string.h" +#include "scoped_thread_state_change-inl.h" + +namespace art { +namespace gc { + +class VerificationTest : public CommonRuntimeTest { + protected: + VerificationTest() {} + + template <class T> + mirror::ObjectArray<T>* AllocObjectArray(Thread* self, size_t length) + REQUIRES_SHARED(Locks::mutator_lock_) { + ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); + return mirror::ObjectArray<T>::Alloc( + self, + class_linker->GetClassRoot(ClassLinker::ClassRoot::kObjectArrayClass), + length); + } +}; + +TEST_F(VerificationTest, IsValidHeapObjectAddress) { + ScopedObjectAccess soa(Thread::Current()); + const Verification* const v = Runtime::Current()->GetHeap()->GetVerification(); + EXPECT_FALSE(v->IsValidHeapObjectAddress(reinterpret_cast<const void*>(1))); + EXPECT_FALSE(v->IsValidHeapObjectAddress(reinterpret_cast<const void*>(4))); + EXPECT_FALSE(v->IsValidHeapObjectAddress(nullptr)); + VariableSizedHandleScope hs(soa.Self()); + Handle<mirror::String> string( + hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "test"))); + EXPECT_TRUE(v->IsValidHeapObjectAddress(string.Get())); + EXPECT_TRUE(v->IsValidHeapObjectAddress(string->GetClass())); + const uintptr_t uint_klass = reinterpret_cast<uintptr_t>(string->GetClass()); + // Not actually a valid object but the verification can't know that. Guaranteed to be inside a + // heap space. + EXPECT_TRUE(v->IsValidHeapObjectAddress( + reinterpret_cast<const void*>(uint_klass + kObjectAlignment))); + EXPECT_FALSE(v->IsValidHeapObjectAddress( + reinterpret_cast<const void*>(&uint_klass))); +} + +TEST_F(VerificationTest, IsValidClass) { + ScopedObjectAccess soa(Thread::Current()); + VariableSizedHandleScope hs(soa.Self()); + Handle<mirror::String> string( + hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "test"))); + const Verification* const v = Runtime::Current()->GetHeap()->GetVerification(); + EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(1))); + EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(4))); + EXPECT_FALSE(v->IsValidClass(nullptr)); + EXPECT_FALSE(v->IsValidClass(string.Get())); + EXPECT_TRUE(v->IsValidClass(string->GetClass())); + const uintptr_t uint_klass = reinterpret_cast<uintptr_t>(string->GetClass()); + EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(uint_klass - kObjectAlignment))); + EXPECT_FALSE(v->IsValidClass(reinterpret_cast<const void*>(&uint_klass))); +} + +TEST_F(VerificationTest, DumpObjectInfo) { + ScopedLogSeverity sls(LogSeverity::INFO); + ScopedObjectAccess soa(Thread::Current()); + Runtime* const runtime = Runtime::Current(); + VariableSizedHandleScope hs(soa.Self()); + Handle<mirror::String> string( + hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "obj"))); + Handle<mirror::ObjectArray<mirror::Object>> arr( + hs.NewHandle(AllocObjectArray<mirror::Object>(soa.Self(), 256))); + const Verification* const v = runtime->GetHeap()->GetVerification(); + LOG(INFO) << v->DumpObjectInfo(reinterpret_cast<const void*>(1), "obj"); + LOG(INFO) << v->DumpObjectInfo(reinterpret_cast<const void*>(4), "obj"); + LOG(INFO) << v->DumpObjectInfo(nullptr, "obj"); + LOG(INFO) << v->DumpObjectInfo(string.Get(), "test"); + LOG(INFO) << v->DumpObjectInfo(string->GetClass(), "obj"); + const uintptr_t uint_klass = reinterpret_cast<uintptr_t>(string->GetClass()); + LOG(INFO) << v->DumpObjectInfo(reinterpret_cast<const void*>(uint_klass - kObjectAlignment), + "obj"); + LOG(INFO) << v->DumpObjectInfo(reinterpret_cast<const void*>(&uint_klass), "obj"); + LOG(INFO) << v->DumpObjectInfo(arr.Get(), "arr"); +} + +TEST_F(VerificationTest, LogHeapCorruption) { + ScopedLogSeverity sls(LogSeverity::INFO); + ScopedObjectAccess soa(Thread::Current()); + Runtime* const runtime = Runtime::Current(); + VariableSizedHandleScope hs(soa.Self()); + Handle<mirror::String> string( + hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "obj"))); + using ObjArray = mirror::ObjectArray<mirror::Object>; + Handle<ObjArray> arr( + hs.NewHandle(AllocObjectArray<mirror::Object>(soa.Self(), 256))); + const Verification* const v = runtime->GetHeap()->GetVerification(); + arr->Set(0, string.Get()); + // Test normal cases. + v->LogHeapCorruption(arr.Get(), ObjArray::DataOffset(kHeapReferenceSize), string.Get(), false); + v->LogHeapCorruption(string.Get(), mirror::Object::ClassOffset(), string->GetClass(), false); + // Test null holder cases. + v->LogHeapCorruption(nullptr, MemberOffset(0), string.Get(), false); + v->LogHeapCorruption(nullptr, MemberOffset(0), arr.Get(), false); +} + +} // namespace gc +} // namespace art diff --git a/runtime/gc/verification.cc b/runtime/gc/verification.cc new file mode 100644 index 0000000000..9e79cb4a5f --- /dev/null +++ b/runtime/gc/verification.cc @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "verification.h" + +#include <iomanip> +#include <sstream> + +#include "mirror/class-inl.h" + +namespace art { +namespace gc { + +std::string Verification::DumpObjectInfo(const void* addr, const char* tag) const { + std::ostringstream oss; + oss << tag << "=" << addr; + if (IsValidHeapObjectAddress(addr)) { + mirror::Object* obj = reinterpret_cast<mirror::Object*>(const_cast<void*>(addr)); + mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>(); + oss << " klass=" << klass; + if (IsValidClass(klass)) { + oss << "(" << klass->PrettyClass() << ")"; + if (klass->IsArrayClass<kVerifyNone, kWithoutReadBarrier>()) { + oss << " length=" << obj->AsArray<kVerifyNone, kWithoutReadBarrier>()->GetLength(); + } + } else { + oss << " <invalid address>"; + } + space::Space* const space = heap_->FindSpaceFromAddress(addr); + if (space != nullptr) { + oss << " space=" << *space; + } + accounting::CardTable* card_table = heap_->GetCardTable(); + if (card_table->AddrIsInCardTable(addr)) { + oss << " card=" << static_cast<size_t>( + card_table->GetCard(reinterpret_cast<const mirror::Object*>(addr))); + } + // Dump adjacent RAM. + const uintptr_t uint_addr = reinterpret_cast<uintptr_t>(addr); + static constexpr size_t kBytesBeforeAfter = 2 * kObjectAlignment; + const uintptr_t dump_start = uint_addr - kBytesBeforeAfter; + const uintptr_t dump_end = uint_addr + kBytesBeforeAfter; + if (dump_start < dump_end && + IsValidHeapObjectAddress(reinterpret_cast<const void*>(dump_start)) && + IsValidHeapObjectAddress(reinterpret_cast<const void*>(dump_end - kObjectAlignment))) { + oss << " adjacent_ram="; + for (uintptr_t p = dump_start; p < dump_end; ++p) { + if (p == uint_addr) { + // Marker of where the object is. + oss << "|"; + } + uint8_t* ptr = reinterpret_cast<uint8_t*>(p); + oss << std::hex << std::setfill('0') << std::setw(2) << static_cast<uintptr_t>(*ptr); + } + } + } else { + oss << " <invalid address>"; + } + return oss.str(); +} + +void Verification::LogHeapCorruption(ObjPtr<mirror::Object> holder, + MemberOffset offset, + mirror::Object* ref, + bool fatal) const { + // Lowest priority logging first: + PrintFileToLog("/proc/self/maps", LogSeverity::FATAL_WITHOUT_ABORT); + MemMap::DumpMaps(LOG_STREAM(FATAL_WITHOUT_ABORT), true); + // Buffer the output in the string stream since it is more important than the stack traces + // and we want it to have log priority. The stack traces are printed from Runtime::Abort + // which is called from LOG(FATAL) but before the abort message. + std::ostringstream oss; + oss << "GC tried to mark invalid reference " << ref << std::endl; + oss << DumpObjectInfo(ref, "ref") << "\n"; + if (holder != nullptr) { + oss << DumpObjectInfo(holder.Ptr(), "holder"); + mirror::Class* holder_klass = holder->GetClass<kVerifyNone, kWithoutReadBarrier>(); + if (IsValidClass(holder_klass)) { + oss << "field_offset=" << offset.Uint32Value(); + ArtField* field = holder->FindFieldByOffset(offset); + if (field != nullptr) { + oss << " name=" << field->GetName(); + } + } + } + + if (fatal) { + LOG(FATAL) << oss.str(); + } else { + LOG(FATAL_WITHOUT_ABORT) << oss.str(); + } +} + +bool Verification::IsValidHeapObjectAddress(const void* addr, space::Space** out_space) const { + if (!IsAligned<kObjectAlignment>(addr)) { + return false; + } + space::Space* const space = heap_->FindSpaceFromAddress(addr); + if (space != nullptr) { + if (out_space != nullptr) { + *out_space = space; + } + return true; + } + return false; +} + +bool Verification::IsValidClass(const void* addr) const { + if (!IsValidHeapObjectAddress(addr)) { + return false; + } + mirror::Class* klass = reinterpret_cast<mirror::Class*>(const_cast<void*>(addr)); + mirror::Class* k1 = klass->GetClass<kVerifyNone, kWithoutReadBarrier>(); + if (!IsValidHeapObjectAddress(k1)) { + return false; + } + // k should be class class, take the class again to verify. + // Note that this check may not be valid for the no image space since the class class might move + // around from moving GC. + mirror::Class* k2 = k1->GetClass<kVerifyNone, kWithoutReadBarrier>(); + if (!IsValidHeapObjectAddress(k2)) { + return false; + } + return k1 == k2; +} + +} // namespace gc +} // namespace art diff --git a/runtime/gc/verification.h b/runtime/gc/verification.h new file mode 100644 index 0000000000..3d95d93015 --- /dev/null +++ b/runtime/gc/verification.h @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_RUNTIME_GC_VERIFICATION_H_ +#define ART_RUNTIME_GC_VERIFICATION_H_ + +#include "obj_ptr.h" +#include "offsets.h" + +namespace art { + +namespace mirror { +class Class; +class Object; +} // namespace mirror + +namespace gc { + +namespace space { +class Space; +} // namespace space + +class Heap; + +class Verification { + public: + explicit Verification(gc::Heap* heap) : heap_(heap) {} + + // Dump some reveant to debugging info about an object. + std::string DumpObjectInfo(const void* obj, const char* tag) const + REQUIRES_SHARED(Locks::mutator_lock_); + + // Don't use ObjPtr for things that might not be aligned like the invalid reference. + void LogHeapCorruption(ObjPtr<mirror::Object> holder, + MemberOffset offset, + mirror::Object* ref, + bool fatal) const REQUIRES_SHARED(Locks::mutator_lock_); + + + // Return true if the klass is likely to be a valid mirror::Class. + bool IsValidClass(const void* klass) const REQUIRES_SHARED(Locks::mutator_lock_); + + // Does not allow null. + bool IsValidHeapObjectAddress(const void* addr, space::Space** out_space = nullptr) const + REQUIRES_SHARED(Locks::mutator_lock_); + + private: + gc::Heap* const heap_; +}; + +} // namespace gc +} // namespace art + +#endif // ART_RUNTIME_GC_VERIFICATION_H_ |