diff options
-rw-r--r-- | compiler/image_writer.cc | 109 | ||||
-rw-r--r-- | compiler/image_writer.h | 11 | ||||
-rw-r--r-- | runtime/class_linker.cc | 85 | ||||
-rw-r--r-- | runtime/gc/heap.cc | 172 | ||||
-rw-r--r-- | runtime/hprof/hprof.cc | 15 | ||||
-rw-r--r-- | runtime/openjdkjvmti/ti_class.cc | 15 | ||||
-rw-r--r-- | runtime/openjdkjvmti/ti_heap.cc | 83 |
7 files changed, 183 insertions, 307 deletions
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index 4f1fef9f58..f92bf95065 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -44,6 +44,7 @@ #include "gc/accounting/space_bitmap-inl.h" #include "gc/collector/concurrent_copying.h" #include "gc/heap.h" +#include "gc/heap-visit-objects-inl.h" #include "gc/space/large_object_space.h" #include "gc/space/space-inl.h" #include "gc/verification.h" @@ -117,19 +118,17 @@ bool ImageWriter::IsInBootOatFile(const void* ptr) const { return false; } -static void ClearDexFileCookieCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) - REQUIRES_SHARED(Locks::mutator_lock_) { - DCHECK(obj != nullptr); - Class* klass = obj->GetClass(); - if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) { - ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie); - // Null out the cookie to enable determinism. b/34090128 - field->SetObject</*kTransactionActive*/false>(obj, nullptr); - } -} - static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) { - Runtime::Current()->GetHeap()->VisitObjects(ClearDexFileCookieCallback, nullptr); + auto visitor = [](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + DCHECK(obj != nullptr); + Class* klass = obj->GetClass(); + if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) { + ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie); + // Null out the cookie to enable determinism. b/34090128 + field->SetObject</*kTransactionActive*/false>(obj, nullptr); + } + }; + Runtime::Current()->GetHeap()->VisitObjects(visitor); } bool ImageWriter::PrepareImageAddressSpace() { @@ -1176,21 +1175,19 @@ void ImageWriter::PruneNonImageClasses() { void ImageWriter::CheckNonImageClassesRemoved() { if (compiler_driver_.GetImageClasses() != nullptr) { + auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + if (obj->IsClass() && !IsInBootImage(obj)) { + Class* klass = obj->AsClass(); + if (!KeepClass(klass)) { + DumpImageClasses(); + std::string temp; + CHECK(KeepClass(klass)) + << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass); + } + } + }; gc::Heap* heap = Runtime::Current()->GetHeap(); - heap->VisitObjects(CheckNonImageClassesRemovedCallback, this); - } -} - -void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) { - ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg); - if (obj->IsClass() && !image_writer->IsInBootImage(obj)) { - Class* klass = obj->AsClass(); - if (!image_writer->KeepClass(klass)) { - image_writer->DumpImageClasses(); - std::string temp; - CHECK(image_writer->KeepClass(klass)) - << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass); - } + heap->VisitObjects(visitor); } } @@ -1532,26 +1529,6 @@ void ImageWriter::AssignMethodOffset(ArtMethod* method, offset += ArtMethod::Size(target_ptr_size_); } -void ImageWriter::EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg) { - ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg); - DCHECK(writer != nullptr); - if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) { - CHECK(writer->IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj; - } -} - -void ImageWriter::DeflateMonitorCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) { - Monitor::Deflate(Thread::Current(), obj); -} - -void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) { - ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg); - DCHECK(writer != nullptr); - if (!writer->IsInBootImage(obj)) { - writer->UnbinObjectsIntoOffset(obj); - } -} - void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) { DCHECK(!IsInBootImage(obj)); CHECK(obj != nullptr); @@ -1686,7 +1663,12 @@ void ImageWriter::CalculateNewObjectOffsets() { // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring // this lock while holding other locks may cause lock order violations. - heap->VisitObjects(DeflateMonitorCallback, this); + { + auto deflate_monitor = [](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + Monitor::Deflate(Thread::Current(), obj); + }; + heap->VisitObjects(deflate_monitor); + } // Work list of <object, oat_index> for objects. Everything on the stack must already be // assigned a bin slot. @@ -1748,7 +1730,15 @@ void ImageWriter::CalculateNewObjectOffsets() { } // Verify that all objects have assigned image bin slots. - heap->VisitObjects(EnsureBinSlotAssignedCallback, this); + { + auto ensure_bin_slots_assigned = [&](mirror::Object* obj) + REQUIRES_SHARED(Locks::mutator_lock_) { + if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) { + CHECK(IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj; + } + }; + heap->VisitObjects(ensure_bin_slots_assigned); + } // Calculate size of the dex cache arrays slot and prepare offsets. PrepareDexCacheArraySlots(); @@ -1812,7 +1802,15 @@ void ImageWriter::CalculateNewObjectOffsets() { } // Transform each object's bin slot into an offset which will be used to do the final copy. - heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this); + { + auto unbin_objects_into_offset = [&](mirror::Object* obj) + REQUIRES_SHARED(Locks::mutator_lock_) { + if (!IsInBootImage(obj)) { + UnbinObjectsIntoOffset(obj); + } + }; + heap->VisitObjects(unbin_objects_into_offset); + } size_t i = 0; for (ImageInfo& image_info : image_infos_) { @@ -2119,8 +2117,11 @@ void ImageWriter::CopyAndFixupNativeData(size_t oat_index) { } void ImageWriter::CopyAndFixupObjects() { - gc::Heap* heap = Runtime::Current()->GetHeap(); - heap->VisitObjects(CopyAndFixupObjectsCallback, this); + auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + DCHECK(obj != nullptr); + CopyAndFixupObject(obj); + }; + Runtime::Current()->GetHeap()->VisitObjects(visitor); // Fix up the object previously had hash codes. for (const auto& hash_pair : saved_hashcode_map_) { Object* obj = hash_pair.first; @@ -2130,12 +2131,6 @@ void ImageWriter::CopyAndFixupObjects() { saved_hashcode_map_.clear(); } -void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) { - DCHECK(obj != nullptr); - DCHECK(arg != nullptr); - reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj); -} - void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, mirror::Class* klass, diff --git a/compiler/image_writer.h b/compiler/image_writer.h index c42523b783..ee6fc1dff6 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -397,8 +397,6 @@ class ImageWriter FINAL { // Verify unwanted classes removed. void CheckNonImageClassesRemoved() REQUIRES_SHARED(Locks::mutator_lock_); - static void CheckNonImageClassesRemovedCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_); // Lays out where the image objects will be at runtime. void CalculateNewObjectOffsets() @@ -414,18 +412,9 @@ class ImageWriter FINAL { void UnbinObjectsIntoOffset(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); - static void EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_); - static void DeflateMonitorCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_); - static void UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_); - // Creates the contiguous image in memory and adjusts pointers. void CopyAndFixupNativeData(size_t oat_index) REQUIRES_SHARED(Locks::mutator_lock_); void CopyAndFixupObjects() REQUIRES_SHARED(Locks::mutator_lock_); - static void CopyAndFixupObjectsCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_); void CopyAndFixupObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy, const ImageInfo& image_info) REQUIRES_SHARED(Locks::mutator_lock_); diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index 1c3375c93a..1ee4026fa4 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -57,6 +57,7 @@ #include "gc/accounting/heap_bitmap-inl.h" #include "gc/accounting/space_bitmap-inl.h" #include "gc/heap.h" +#include "gc/heap-visit-objects-inl.h" #include "gc/scoped_gc_critical_section.h" #include "gc/space/image_space.h" #include "gc/space/space-inl.h" @@ -1620,7 +1621,46 @@ class ImageSanityChecks FINAL { static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker) REQUIRES_SHARED(Locks::mutator_lock_) { ImageSanityChecks isc(heap, class_linker); - heap->VisitObjects(ImageSanityChecks::SanityCheckObjectsCallback, &isc); + auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + DCHECK(obj != nullptr); + CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj; + CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj; + if (obj->IsClass()) { + auto klass = obj->AsClass(); + for (ArtField& field : klass->GetIFields()) { + CHECK_EQ(field.GetDeclaringClass(), klass); + } + for (ArtField& field : klass->GetSFields()) { + CHECK_EQ(field.GetDeclaringClass(), klass); + } + const auto pointer_size = isc.pointer_size_; + for (auto& m : klass->GetMethods(pointer_size)) { + isc.SanityCheckArtMethod(&m, klass); + } + auto* vtable = klass->GetVTable(); + if (vtable != nullptr) { + isc.SanityCheckArtMethodPointerArray(vtable, nullptr); + } + if (klass->ShouldHaveImt()) { + ImTable* imt = klass->GetImt(pointer_size); + for (size_t i = 0; i < ImTable::kSize; ++i) { + isc.SanityCheckArtMethod(imt->Get(i, pointer_size), nullptr); + } + } + if (klass->ShouldHaveEmbeddedVTable()) { + for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) { + isc.SanityCheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr); + } + } + mirror::IfTable* iftable = klass->GetIfTable(); + for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { + if (iftable->GetMethodArrayCount(i) > 0) { + isc.SanityCheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr); + } + } + } + }; + heap->VisitObjects(visitor); } static void CheckPointerArray(gc::Heap* heap, @@ -1632,49 +1672,6 @@ class ImageSanityChecks FINAL { isc.SanityCheckArtMethodPointerArray(arr, size); } - static void SanityCheckObjectsCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_) { - DCHECK(obj != nullptr); - CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj; - CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj; - if (obj->IsClass()) { - ImageSanityChecks* isc = reinterpret_cast<ImageSanityChecks*>(arg); - - auto klass = obj->AsClass(); - for (ArtField& field : klass->GetIFields()) { - CHECK_EQ(field.GetDeclaringClass(), klass); - } - for (ArtField& field : klass->GetSFields()) { - CHECK_EQ(field.GetDeclaringClass(), klass); - } - const auto pointer_size = isc->pointer_size_; - for (auto& m : klass->GetMethods(pointer_size)) { - isc->SanityCheckArtMethod(&m, klass); - } - auto* vtable = klass->GetVTable(); - if (vtable != nullptr) { - isc->SanityCheckArtMethodPointerArray(vtable, nullptr); - } - if (klass->ShouldHaveImt()) { - ImTable* imt = klass->GetImt(pointer_size); - for (size_t i = 0; i < ImTable::kSize; ++i) { - isc->SanityCheckArtMethod(imt->Get(i, pointer_size), nullptr); - } - } - if (klass->ShouldHaveEmbeddedVTable()) { - for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) { - isc->SanityCheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr); - } - } - mirror::IfTable* iftable = klass->GetIfTable(); - for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { - if (iftable->GetMethodArrayCount(i) > 0) { - isc->SanityCheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr); - } - } - } - } - private: ImageSanityChecks(gc::Heap* heap, ClassLinker* class_linker) : spaces_(heap->GetBootImageSpaces()), diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index dfa3ff9433..2515316e6f 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -1919,138 +1919,84 @@ uint64_t Heap::GetBytesAllocatedEver() const { return GetBytesFreedEver() + GetBytesAllocated(); } -class InstanceCounter { - public: - InstanceCounter(const std::vector<Handle<mirror::Class>>& classes, - bool use_is_assignable_from, - uint64_t* counts) - REQUIRES_SHARED(Locks::mutator_lock_) - : classes_(classes), use_is_assignable_from_(use_is_assignable_from), counts_(counts) {} - - static void Callback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { - InstanceCounter* instance_counter = reinterpret_cast<InstanceCounter*>(arg); +void Heap::CountInstances(const std::vector<Handle<mirror::Class>>& classes, + bool use_is_assignable_from, + uint64_t* counts) { + auto instance_counter = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { mirror::Class* instance_class = obj->GetClass(); CHECK(instance_class != nullptr); - for (size_t i = 0; i < instance_counter->classes_.size(); ++i) { - ObjPtr<mirror::Class> klass = instance_counter->classes_[i].Get(); - if (instance_counter->use_is_assignable_from_) { + for (size_t i = 0; i < classes.size(); ++i) { + ObjPtr<mirror::Class> klass = classes[i].Get(); + if (use_is_assignable_from) { if (klass != nullptr && klass->IsAssignableFrom(instance_class)) { - ++instance_counter->counts_[i]; + ++counts[i]; } } else if (instance_class == klass) { - ++instance_counter->counts_[i]; + ++counts[i]; } } - } - - private: - const std::vector<Handle<mirror::Class>>& classes_; - bool use_is_assignable_from_; - uint64_t* const counts_; - DISALLOW_COPY_AND_ASSIGN(InstanceCounter); -}; - -void Heap::CountInstances(const std::vector<Handle<mirror::Class>>& classes, - bool use_is_assignable_from, - uint64_t* counts) { - InstanceCounter counter(classes, use_is_assignable_from, counts); - VisitObjects(InstanceCounter::Callback, &counter); + }; + VisitObjects(instance_counter); } -class InstanceCollector { - public: - InstanceCollector(VariableSizedHandleScope& scope, - Handle<mirror::Class> c, - int32_t max_count, - std::vector<Handle<mirror::Object>>& instances) - REQUIRES_SHARED(Locks::mutator_lock_) - : scope_(scope), - class_(c), - max_count_(max_count), - instances_(instances) {} - - static void Callback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { - DCHECK(arg != nullptr); - InstanceCollector* instance_collector = reinterpret_cast<InstanceCollector*>(arg); - if (obj->GetClass() == instance_collector->class_.Get()) { - if (instance_collector->max_count_ == 0 || - instance_collector->instances_.size() < instance_collector->max_count_) { - instance_collector->instances_.push_back(instance_collector->scope_.NewHandle(obj)); - } - } - } - - private: - VariableSizedHandleScope& scope_; - Handle<mirror::Class> const class_; - const uint32_t max_count_; - std::vector<Handle<mirror::Object>>& instances_; - DISALLOW_COPY_AND_ASSIGN(InstanceCollector); -}; - void Heap::GetInstances(VariableSizedHandleScope& scope, - Handle<mirror::Class> c, + Handle<mirror::Class> h_class, int32_t max_count, std::vector<Handle<mirror::Object>>& instances) { - InstanceCollector collector(scope, c, max_count, instances); - VisitObjects(&InstanceCollector::Callback, &collector); -} - -class ReferringObjectsFinder { - public: - ReferringObjectsFinder(VariableSizedHandleScope& scope, - Handle<mirror::Object> object, - int32_t max_count, - std::vector<Handle<mirror::Object>>& referring_objects) - REQUIRES_SHARED(Locks::mutator_lock_) - : scope_(scope), - object_(object), - max_count_(max_count), - referring_objects_(referring_objects) {} - - static void Callback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { - reinterpret_cast<ReferringObjectsFinder*>(arg)->operator()(obj); - } - - // For bitmap Visit. - // TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for - // annotalysis on visitors. - void operator()(ObjPtr<mirror::Object> o) const NO_THREAD_SAFETY_ANALYSIS { - o->VisitReferences(*this, VoidFunctor()); - } - - // For Object::VisitReferences. - void operator()(ObjPtr<mirror::Object> obj, - MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { - mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); - if (ref == object_.Get() && (max_count_ == 0 || referring_objects_.size() < max_count_)) { - referring_objects_.push_back(scope_.NewHandle(obj)); + DCHECK_GE(max_count, 0); + auto instance_collector = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + if (obj->GetClass() == h_class.Get()) { + if (max_count == 0 || instances.size() < static_cast<size_t>(max_count)) { + instances.push_back(scope.NewHandle(obj)); + } } - } - - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} - - private: - VariableSizedHandleScope& scope_; - Handle<mirror::Object> const object_; - const uint32_t max_count_; - std::vector<Handle<mirror::Object>>& referring_objects_; - DISALLOW_COPY_AND_ASSIGN(ReferringObjectsFinder); -}; + }; + VisitObjects(instance_collector); +} void Heap::GetReferringObjects(VariableSizedHandleScope& scope, Handle<mirror::Object> o, int32_t max_count, std::vector<Handle<mirror::Object>>& referring_objects) { + class ReferringObjectsFinder { + public: + ReferringObjectsFinder(VariableSizedHandleScope& scope_in, + Handle<mirror::Object> object_in, + int32_t max_count_in, + std::vector<Handle<mirror::Object>>& referring_objects_in) + REQUIRES_SHARED(Locks::mutator_lock_) + : scope_(scope_in), + object_(object_in), + max_count_(max_count_in), + referring_objects_(referring_objects_in) {} + + // For Object::VisitReferences. + void operator()(ObjPtr<mirror::Object> obj, + MemberOffset offset, + bool is_static ATTRIBUTE_UNUSED) const + REQUIRES_SHARED(Locks::mutator_lock_) { + mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); + if (ref == object_.Get() && (max_count_ == 0 || referring_objects_.size() < max_count_)) { + referring_objects_.push_back(scope_.NewHandle(obj)); + } + } + + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + + private: + VariableSizedHandleScope& scope_; + Handle<mirror::Object> const object_; + const uint32_t max_count_; + std::vector<Handle<mirror::Object>>& referring_objects_; + DISALLOW_COPY_AND_ASSIGN(ReferringObjectsFinder); + }; ReferringObjectsFinder finder(scope, o, max_count, referring_objects); - VisitObjects(&ReferringObjectsFinder::Callback, &finder); + auto referring_objects_finder = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + obj->VisitReferences(finder, VoidFunctor()); + }; + VisitObjects(referring_objects_finder); } void Heap::CollectGarbage(bool clear_soft_references) { diff --git a/runtime/hprof/hprof.cc b/runtime/hprof/hprof.cc index ec860c76f2..f428bc2751 100644 --- a/runtime/hprof/hprof.cc +++ b/runtime/hprof/hprof.cc @@ -52,6 +52,7 @@ #include "gc/allocation_record.h" #include "gc/scoped_gc_critical_section.h" #include "gc/heap.h" +#include "gc/heap-visit-objects-inl.h" #include "gc/space/space.h" #include "globals.h" #include "jdwp/jdwp.h" @@ -485,13 +486,6 @@ class Hprof : public SingleRootVisitor { } private: - static void VisitObjectCallback(mirror::Object* obj, void* arg) - REQUIRES_SHARED(Locks::mutator_lock_) { - DCHECK(obj != nullptr); - DCHECK(arg != nullptr); - reinterpret_cast<Hprof*>(arg)->DumpHeapObject(obj); - } - void DumpHeapObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); @@ -534,8 +528,11 @@ class Hprof : public SingleRootVisitor { simple_roots_.clear(); runtime->VisitRoots(this); runtime->VisitImageRoots(this); - runtime->GetHeap()->VisitObjectsPaused(VisitObjectCallback, this); - + auto dump_object = [this](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { + DCHECK(obj != nullptr); + DumpHeapObject(obj); + }; + runtime->GetHeap()->VisitObjectsPaused(dump_object); output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_END, kHprofTime); output_->EndRecord(); } diff --git a/runtime/openjdkjvmti/ti_class.cc b/runtime/openjdkjvmti/ti_class.cc index b8e79555ae..99dfcfe665 100644 --- a/runtime/openjdkjvmti/ti_class.cc +++ b/runtime/openjdkjvmti/ti_class.cc @@ -46,6 +46,7 @@ #include "events-inl.h" #include "fixed_up_dex_file.h" #include "gc/heap.h" +#include "gc/heap-visit-objects-inl.h" #include "gc_root.h" #include "handle.h" #include "jni_env_ext-inl.h" @@ -544,21 +545,15 @@ struct ClassCallback : public art::ClassLoadCallback { LOG(FATAL) << "Unreachable"; } - static void AllObjectsCallback(art::mirror::Object* obj, void* arg) - REQUIRES_SHARED(art::Locks::mutator_lock_) { - HeapFixupVisitor* hfv = reinterpret_cast<HeapFixupVisitor*>(arg); - - // Visit references, not native roots. - obj->VisitReferences<false>(*hfv, *hfv); - } - private: const art::mirror::Class* input_; art::mirror::Class* output_; }; HeapFixupVisitor hfv(input, output); - art::Runtime::Current()->GetHeap()->VisitObjectsPaused(HeapFixupVisitor::AllObjectsCallback, - &hfv); + auto object_visitor = [&](art::mirror::Object* obj) { + obj->VisitReferences<false>(hfv, hfv); // Visit references, not native roots. + }; + art::Runtime::Current()->GetHeap()->VisitObjectsPaused(object_visitor); } // A set of all the temp classes we have handed out. We have to fix up references to these. diff --git a/runtime/openjdkjvmti/ti_heap.cc b/runtime/openjdkjvmti/ti_heap.cc index 29658d9154..e53503fcb9 100644 --- a/runtime/openjdkjvmti/ti_heap.cc +++ b/runtime/openjdkjvmti/ti_heap.cc @@ -22,6 +22,7 @@ #include "base/mutex.h" #include "class_linker.h" #include "gc/heap.h" +#include "gc/heap-visit-objects-inl.h" #include "gc_root-inl.h" #include "java_frame_root_info.h" #include "jni_env_ext.h" @@ -653,33 +654,25 @@ void HeapUtil::Unregister() { art::Runtime::Current()->RemoveSystemWeakHolder(&gIndexCachingTable); } -template <typename Callback> -struct IterateThroughHeapData { - IterateThroughHeapData(Callback _cb, - ObjectTagTable* _tag_table, - jvmtiEnv* _env, - art::ObjPtr<art::mirror::Class> klass, - jint _heap_filter, - const jvmtiHeapCallbacks* _callbacks, - const void* _user_data) - : cb(_cb), - tag_table(_tag_table), - heap_filter(_heap_filter), - filter_klass(klass), - env(_env), - callbacks(_callbacks), - user_data(_user_data), - stop_reports(false) { +template <typename T> +static jvmtiError DoIterateThroughHeap(T fn, + jvmtiEnv* env, + ObjectTagTable* tag_table, + jint heap_filter_int, + jclass klass, + const jvmtiHeapCallbacks* callbacks, + const void* user_data) { + if (callbacks == nullptr) { + return ERR(NULL_POINTER); } - static void ObjectCallback(art::mirror::Object* obj, void* arg) - REQUIRES_SHARED(art::Locks::mutator_lock_) { - IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg); - ithd->ObjectCallback(obj); - } + art::Thread* self = art::Thread::Current(); + art::ScopedObjectAccess soa(self); // Now we know we have the shared lock. - void ObjectCallback(art::mirror::Object* obj) - REQUIRES_SHARED(art::Locks::mutator_lock_) { + bool stop_reports = false; + const HeapFilter heap_filter(heap_filter_int); + art::ObjPtr<art::mirror::Class> filter_klass = soa.Decode<art::mirror::Class>(klass); + auto visitor = [&](art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) { // Early return, as we can't really stop visiting. if (stop_reports) { return; @@ -713,7 +706,7 @@ struct IterateThroughHeapData { } jlong saved_tag = tag; - jint ret = cb(obj, callbacks, class_tag, size, &tag, length, const_cast<void*>(user_data)); + jint ret = fn(obj, callbacks, class_tag, size, &tag, length, const_cast<void*>(user_data)); if (tag != saved_tag) { tag_table->Set(obj, tag); @@ -734,44 +727,8 @@ struct IterateThroughHeapData { if (!stop_reports) { stop_reports = ReportPrimitiveField::Report(obj, tag_table, callbacks, user_data); } - } - - Callback cb; - ObjectTagTable* tag_table; - const HeapFilter heap_filter; - art::ObjPtr<art::mirror::Class> filter_klass; - jvmtiEnv* env; - const jvmtiHeapCallbacks* callbacks; - const void* user_data; - - bool stop_reports; -}; - -template <typename T> -static jvmtiError DoIterateThroughHeap(T fn, - jvmtiEnv* env, - ObjectTagTable* tag_table, - jint heap_filter, - jclass klass, - const jvmtiHeapCallbacks* callbacks, - const void* user_data) { - if (callbacks == nullptr) { - return ERR(NULL_POINTER); - } - - art::Thread* self = art::Thread::Current(); - art::ScopedObjectAccess soa(self); // Now we know we have the shared lock. - - using Iterator = IterateThroughHeapData<T>; - Iterator ithd(fn, - tag_table, - env, - soa.Decode<art::mirror::Class>(klass), - heap_filter, - callbacks, - user_data); - - art::Runtime::Current()->GetHeap()->VisitObjects(Iterator::ObjectCallback, &ithd); + }; + art::Runtime::Current()->GetHeap()->VisitObjects(visitor); return ERR(NONE); } |