diff options
author | 2022-09-12 13:02:05 -0700 | |
---|---|---|
committer | 2022-09-14 22:17:07 +0000 | |
commit | 28443a7075e64adee1cc19c3bb065cc5c9ecb1d0 (patch) | |
tree | 880b3243b7946a0a855ec3f01019326e55166b2a | |
parent | 03ac3eb0fc36be97f301ac60e85e1bb7ca52fa12 (diff) |
Use side vector while sweeping jvmti-weak-table
In the code we were erasing all <K,V> pairs for which object reference
(Key) is changed, and then emplacing the ones which were moved. This
is not compatible with in-place compaction algorithms as then there is a
possibility that a to-space object reference being added to the map is
already there as a from-space reference.
In this CL, we change it to use a side vector to hold all updated <K,V>
until the end of map traversal, and then insert all of them back in the
map.
Bug: 160737021
Test: ART_USE_READ_BARRIER=false art/test/testrunner/testrunner.py
--host --debug -t 905-object-free
Change-Id: Ib416ca0b21d4a9fc44721495b798f82be9387900
-rw-r--r-- | openjdkjvmti/jvmti_weak_table-inl.h | 34 | ||||
-rw-r--r-- | openjdkjvmti/jvmti_weak_table.h | 14 | ||||
-rw-r--r-- | test/knownfailures.json | 3 |
3 files changed, 26 insertions, 25 deletions
diff --git a/openjdkjvmti/jvmti_weak_table-inl.h b/openjdkjvmti/jvmti_weak_table-inl.h index 17578d28f2..c5663e5475 100644 --- a/openjdkjvmti/jvmti_weak_table-inl.h +++ b/openjdkjvmti/jvmti_weak_table-inl.h @@ -210,13 +210,13 @@ void JvmtiWeakTable<T>::SweepImpl(art::IsMarkedVisitor* visitor) { template <typename T> template <typename Updater, typename JvmtiWeakTable<T>::TableUpdateNullTarget kTargetNull> ALWAYS_INLINE inline void JvmtiWeakTable<T>::UpdateTableWith(Updater& updater) { - // We optimistically hope that elements will still be well-distributed when re-inserting them. - // So play with the map mechanics, and postpone rehashing. This avoids the need of a side - // vector and two passes. - float original_max_load_factor = tagged_objects_.max_load_factor(); - tagged_objects_.max_load_factor(std::numeric_limits<float>::max()); - // For checking that a max load-factor actually does what we expect. - size_t original_bucket_count = tagged_objects_.bucket_count(); + // We can't emplace within the map as a to-space reference could be the same as some + // from-space object reference in the map, causing correctness issues. The problem + // doesn't arise if all updated <K,V> pairs are inserted after the loop as by then such + // from-space object references would also have been taken care of. + + // Side vector to hold node handles of entries which are updated. + std::vector<typename TagMap::node_type> updated_node_handles; for (auto it = tagged_objects_.begin(); it != tagged_objects_.end();) { DCHECK(!it->first.IsNull()); @@ -226,22 +226,24 @@ ALWAYS_INLINE inline void JvmtiWeakTable<T>::UpdateTableWith(Updater& updater) { if (kTargetNull == kIgnoreNull && target_obj == nullptr) { // Ignore null target, don't do anything. } else { - T tag = it->second; - it = tagged_objects_.erase(it); + auto nh = tagged_objects_.extract(it++); + DCHECK(!nh.empty()); if (target_obj != nullptr) { - tagged_objects_.emplace(art::GcRoot<art::mirror::Object>(target_obj), tag); - DCHECK_EQ(original_bucket_count, tagged_objects_.bucket_count()); + nh.key() = art::GcRoot<art::mirror::Object>(target_obj); + updated_node_handles.push_back(std::move(nh)); } else if (kTargetNull == kCallHandleNull) { - HandleNullSweep(tag); + HandleNullSweep(nh.mapped()); } - continue; // Iterator was implicitly updated by erase. + continue; // Iterator already updated above. } } it++; } - - tagged_objects_.max_load_factor(original_max_load_factor); - // TODO: consider rehash here. + while (!updated_node_handles.empty()) { + auto ret = tagged_objects_.insert(std::move(updated_node_handles.back())); + DCHECK(ret.inserted); + updated_node_handles.pop_back(); + } } template <typename T> diff --git a/openjdkjvmti/jvmti_weak_table.h b/openjdkjvmti/jvmti_weak_table.h index afa2d1da0a..674b2a3d52 100644 --- a/openjdkjvmti/jvmti_weak_table.h +++ b/openjdkjvmti/jvmti_weak_table.h @@ -211,13 +211,13 @@ class JvmtiWeakTable : public art::gc::SystemWeakHolder { }; using TagAllocator = JvmtiAllocator<std::pair<const art::GcRoot<art::mirror::Object>, T>>; - std::unordered_map<art::GcRoot<art::mirror::Object>, - T, - HashGcRoot, - EqGcRoot, - TagAllocator> tagged_objects_ - GUARDED_BY(allow_disallow_lock_) - GUARDED_BY(art::Locks::mutator_lock_); + using TagMap = std::unordered_map<art::GcRoot<art::mirror::Object>, + T, + HashGcRoot, + EqGcRoot, + TagAllocator>; + + TagMap tagged_objects_ GUARDED_BY(allow_disallow_lock_) GUARDED_BY(art::Locks::mutator_lock_); // To avoid repeatedly scanning the whole table, remember if we did that since the last sweep. bool update_since_last_sweep_; }; diff --git a/test/knownfailures.json b/test/knownfailures.json index 984f3fcfc8..9e457d05db 100644 --- a/test/knownfailures.json +++ b/test/knownfailures.json @@ -1336,8 +1336,7 @@ "tests": ["2009-structural-local-ref", "2035-structural-native-method", "2036-structural-subclass-shadow", - "2040-huge-native-alloc", - "905-object-free"], + "2040-huge-native-alloc"], "env_vars": {"ART_USE_READ_BARRIER": "false"}, "bug": "b/242181443", "description": ["Tests temporarily disabled for userfaultfd GC. Remove once native GC-root updation is implemented."] |