blob: 7502ad272aefe131675c8cf7e95a3b3da1d1897c [file] [log] [blame]
Andreas Gampef0140212017-03-03 13:28:58 -08001/* Copyright (C) 2017 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
Andreas Gampe06c42a52017-07-26 14:17:14 -070032#ifndef ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_INL_H_
33#define ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_INL_H_
Andreas Gampef0140212017-03-03 13:28:58 -080034
35#include "jvmti_weak_table.h"
36
37#include <limits>
38
Andreas Gampe57943812017-12-06 21:39:13 -080039#include <android-base/logging.h>
40
Andreas Gampef0140212017-03-03 13:28:58 -080041#include "art_jvmti.h"
Andreas Gampef0140212017-03-03 13:28:58 -080042#include "gc/allocation_listener.h"
43#include "instrumentation.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010044#include "jni/jni_env_ext-inl.h"
Andreas Gampef0140212017-03-03 13:28:58 -080045#include "jvmti_allocator.h"
46#include "mirror/class.h"
47#include "mirror/object.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -070048#include "nativehelper/scoped_local_ref.h"
Andreas Gampef0140212017-03-03 13:28:58 -080049#include "runtime.h"
Andreas Gampef0140212017-03-03 13:28:58 -080050
51namespace openjdkjvmti {
52
53template <typename T>
54void JvmtiWeakTable<T>::Lock() {
55 allow_disallow_lock_.ExclusiveLock(art::Thread::Current());
56}
57template <typename T>
58void JvmtiWeakTable<T>::Unlock() {
59 allow_disallow_lock_.ExclusiveUnlock(art::Thread::Current());
60}
61template <typename T>
62void JvmtiWeakTable<T>::AssertLocked() {
63 allow_disallow_lock_.AssertHeld(art::Thread::Current());
64}
65
66template <typename T>
67void JvmtiWeakTable<T>::UpdateTableWithReadBarrier() {
68 update_since_last_sweep_ = true;
69
70 auto WithReadBarrierUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root,
Stefano Cianciulli78f3c722023-05-16 10:32:54 +000071 [[maybe_unused]] art::mirror::Object* original_obj)
72 REQUIRES_SHARED(art::Locks::mutator_lock_) {
73 return original_root.Read<art::kWithReadBarrier>();
74 };
Andreas Gampef0140212017-03-03 13:28:58 -080075
76 UpdateTableWith<decltype(WithReadBarrierUpdater), kIgnoreNull>(WithReadBarrierUpdater);
77}
78
79template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -080080bool JvmtiWeakTable<T>::GetTagSlowPath(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T* result) {
Andreas Gampef0140212017-03-03 13:28:58 -080081 // Under concurrent GC, there is a window between moving objects and sweeping of system
82 // weaks in which mutators are active. We may receive a to-space object pointer in obj,
83 // but still have from-space pointers in the table. Explicitly update the table once.
84 // Note: this will keep *all* objects in the table live, but should be a rare occurrence.
85 UpdateTableWithReadBarrier();
86 return GetTagLocked(self, obj, result);
87}
88
89template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -080090bool JvmtiWeakTable<T>::Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) {
Andreas Gampef0140212017-03-03 13:28:58 -080091 art::Thread* self = art::Thread::Current();
92 art::MutexLock mu(self, allow_disallow_lock_);
93 Wait(self);
94
95 return RemoveLocked(self, obj, tag);
96}
97template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -080098bool JvmtiWeakTable<T>::RemoveLocked(art::ObjPtr<art::mirror::Object> obj, T* tag) {
Andreas Gampef0140212017-03-03 13:28:58 -080099 art::Thread* self = art::Thread::Current();
100 allow_disallow_lock_.AssertHeld(self);
101 Wait(self);
102
103 return RemoveLocked(self, obj, tag);
104}
105
106template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -0800107bool JvmtiWeakTable<T>::RemoveLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T* tag) {
Andreas Gampef0140212017-03-03 13:28:58 -0800108 auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj));
109 if (it != tagged_objects_.end()) {
110 if (tag != nullptr) {
111 *tag = it->second;
112 }
113 tagged_objects_.erase(it);
114 return true;
115 }
116
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +0000117 if (art::gUseReadBarrier && self->GetIsGcMarking() && !update_since_last_sweep_) {
Andreas Gampef0140212017-03-03 13:28:58 -0800118 // Under concurrent GC, there is a window between moving objects and sweeping of system
119 // weaks in which mutators are active. We may receive a to-space object pointer in obj,
120 // but still have from-space pointers in the table. Explicitly update the table once.
121 // Note: this will keep *all* objects in the table live, but should be a rare occurrence.
122
123 // Update the table.
124 UpdateTableWithReadBarrier();
125
126 // And try again.
127 return RemoveLocked(self, obj, tag);
128 }
129
130 // Not in here.
131 return false;
132}
133
134template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -0800135bool JvmtiWeakTable<T>::Set(art::ObjPtr<art::mirror::Object> obj, T new_tag) {
Andreas Gampef0140212017-03-03 13:28:58 -0800136 art::Thread* self = art::Thread::Current();
137 art::MutexLock mu(self, allow_disallow_lock_);
138 Wait(self);
139
140 return SetLocked(self, obj, new_tag);
141}
142template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -0800143bool JvmtiWeakTable<T>::SetLocked(art::ObjPtr<art::mirror::Object> obj, T new_tag) {
Andreas Gampef0140212017-03-03 13:28:58 -0800144 art::Thread* self = art::Thread::Current();
145 allow_disallow_lock_.AssertHeld(self);
146 Wait(self);
147
148 return SetLocked(self, obj, new_tag);
149}
150
151template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -0800152bool JvmtiWeakTable<T>::SetLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T new_tag) {
Andreas Gampef0140212017-03-03 13:28:58 -0800153 auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj));
154 if (it != tagged_objects_.end()) {
155 it->second = new_tag;
156 return true;
157 }
158
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +0000159 if (art::gUseReadBarrier && self->GetIsGcMarking() && !update_since_last_sweep_) {
Andreas Gampef0140212017-03-03 13:28:58 -0800160 // Under concurrent GC, there is a window between moving objects and sweeping of system
161 // weaks in which mutators are active. We may receive a to-space object pointer in obj,
162 // but still have from-space pointers in the table. Explicitly update the table once.
163 // Note: this will keep *all* objects in the table live, but should be a rare occurrence.
164
165 // Update the table.
166 UpdateTableWithReadBarrier();
167
168 // And try again.
169 return SetLocked(self, obj, new_tag);
170 }
171
172 // New element.
173 auto insert_it = tagged_objects_.emplace(art::GcRoot<art::mirror::Object>(obj), new_tag);
174 DCHECK(insert_it.second);
175 return false;
176}
177
178template <typename T>
179void JvmtiWeakTable<T>::Sweep(art::IsMarkedVisitor* visitor) {
180 if (DoesHandleNullOnSweep()) {
181 SweepImpl<true>(visitor);
182 } else {
183 SweepImpl<false>(visitor);
184 }
185
186 // Under concurrent GC, there is a window between moving objects and sweeping of system
187 // weaks in which mutators are active. We may receive a to-space object pointer in obj,
188 // but still have from-space pointers in the table. We explicitly update the table then
189 // to ensure we compare against to-space pointers. But we want to do this only once. Once
190 // sweeping is done, we know all objects are to-space pointers until the next GC cycle,
191 // so we re-enable the explicit update for the next marking.
192 update_since_last_sweep_ = false;
193}
194
195template <typename T>
196template <bool kHandleNull>
197void JvmtiWeakTable<T>::SweepImpl(art::IsMarkedVisitor* visitor) {
198 art::Thread* self = art::Thread::Current();
199 art::MutexLock mu(self, allow_disallow_lock_);
200
Stefano Cianciulli78f3c722023-05-16 10:32:54 +0000201 auto IsMarkedUpdater = [&]([[maybe_unused]] const art::GcRoot<art::mirror::Object>& original_root,
Andreas Gampef0140212017-03-03 13:28:58 -0800202 art::mirror::Object* original_obj) {
203 return visitor->IsMarked(original_obj);
204 };
205
206 UpdateTableWith<decltype(IsMarkedUpdater),
207 kHandleNull ? kCallHandleNull : kRemoveNull>(IsMarkedUpdater);
208}
209
210template <typename T>
211template <typename Updater, typename JvmtiWeakTable<T>::TableUpdateNullTarget kTargetNull>
212ALWAYS_INLINE inline void JvmtiWeakTable<T>::UpdateTableWith(Updater& updater) {
Lokesh Gidra28443a72022-09-12 13:02:05 -0700213 // We can't emplace within the map as a to-space reference could be the same as some
214 // from-space object reference in the map, causing correctness issues. The problem
215 // doesn't arise if all updated <K,V> pairs are inserted after the loop as by then such
216 // from-space object references would also have been taken care of.
217
218 // Side vector to hold node handles of entries which are updated.
219 std::vector<typename TagMap::node_type> updated_node_handles;
Andreas Gampef0140212017-03-03 13:28:58 -0800220
221 for (auto it = tagged_objects_.begin(); it != tagged_objects_.end();) {
222 DCHECK(!it->first.IsNull());
223 art::mirror::Object* original_obj = it->first.template Read<art::kWithoutReadBarrier>();
224 art::mirror::Object* target_obj = updater(it->first, original_obj);
225 if (original_obj != target_obj) {
226 if (kTargetNull == kIgnoreNull && target_obj == nullptr) {
227 // Ignore null target, don't do anything.
228 } else {
Lokesh Gidra28443a72022-09-12 13:02:05 -0700229 auto nh = tagged_objects_.extract(it++);
230 DCHECK(!nh.empty());
Andreas Gampef0140212017-03-03 13:28:58 -0800231 if (target_obj != nullptr) {
Lokesh Gidra28443a72022-09-12 13:02:05 -0700232 nh.key() = art::GcRoot<art::mirror::Object>(target_obj);
233 updated_node_handles.push_back(std::move(nh));
Andreas Gampef0140212017-03-03 13:28:58 -0800234 } else if (kTargetNull == kCallHandleNull) {
Lokesh Gidra28443a72022-09-12 13:02:05 -0700235 HandleNullSweep(nh.mapped());
Andreas Gampef0140212017-03-03 13:28:58 -0800236 }
Lokesh Gidra28443a72022-09-12 13:02:05 -0700237 continue; // Iterator already updated above.
Andreas Gampef0140212017-03-03 13:28:58 -0800238 }
239 }
240 it++;
241 }
Lokesh Gidra28443a72022-09-12 13:02:05 -0700242 while (!updated_node_handles.empty()) {
243 auto ret = tagged_objects_.insert(std::move(updated_node_handles.back()));
244 DCHECK(ret.inserted);
245 updated_node_handles.pop_back();
246 }
Andreas Gampef0140212017-03-03 13:28:58 -0800247}
248
249template <typename T>
250template <typename Storage, class Allocator>
251struct JvmtiWeakTable<T>::ReleasableContainer {
252 using allocator_type = Allocator;
253
254 explicit ReleasableContainer(const allocator_type& alloc, size_t reserve = 10)
255 : allocator(alloc),
256 data(reserve > 0 ? allocator.allocate(reserve) : nullptr),
257 size(0),
258 capacity(reserve) {
259 }
260
261 ~ReleasableContainer() {
262 if (data != nullptr) {
263 allocator.deallocate(data, capacity);
264 capacity = 0;
265 size = 0;
266 }
267 }
268
269 Storage* Release() {
270 Storage* tmp = data;
271
272 data = nullptr;
273 size = 0;
274 capacity = 0;
275
276 return tmp;
277 }
278
279 void Resize(size_t new_capacity) {
280 CHECK_GT(new_capacity, capacity);
281
282 Storage* tmp = allocator.allocate(new_capacity);
283 DCHECK(tmp != nullptr);
284 if (data != nullptr) {
285 memcpy(tmp, data, sizeof(Storage) * size);
286 }
287 Storage* old = data;
288 data = tmp;
289 allocator.deallocate(old, capacity);
290 capacity = new_capacity;
291 }
292
293 void Pushback(const Storage& elem) {
294 if (size == capacity) {
295 size_t new_capacity = 2 * capacity + 1;
296 Resize(new_capacity);
297 }
298 data[size++] = elem;
299 }
300
301 Allocator allocator;
302 Storage* data;
303 size_t size;
304 size_t capacity;
305};
306
307template <typename T>
308jvmtiError JvmtiWeakTable<T>::GetTaggedObjects(jvmtiEnv* jvmti_env,
309 jint tag_count,
310 const T* tags,
311 jint* count_ptr,
312 jobject** object_result_ptr,
313 T** tag_result_ptr) {
314 if (tag_count < 0) {
315 return ERR(ILLEGAL_ARGUMENT);
316 }
317 if (tag_count > 0) {
318 for (size_t i = 0; i != static_cast<size_t>(tag_count); ++i) {
319 if (tags[i] == 0) {
320 return ERR(ILLEGAL_ARGUMENT);
321 }
322 }
323 }
324 if (tags == nullptr) {
325 return ERR(NULL_POINTER);
326 }
327 if (count_ptr == nullptr) {
328 return ERR(NULL_POINTER);
329 }
330
331 art::Thread* self = art::Thread::Current();
332 art::MutexLock mu(self, allow_disallow_lock_);
333 Wait(self);
334
335 art::JNIEnvExt* jni_env = self->GetJniEnv();
336
337 constexpr size_t kDefaultSize = 10;
338 size_t initial_object_size;
339 size_t initial_tag_size;
340 if (tag_count == 0) {
341 initial_object_size = (object_result_ptr != nullptr) ? tagged_objects_.size() : 0;
342 initial_tag_size = (tag_result_ptr != nullptr) ? tagged_objects_.size() : 0;
343 } else {
344 initial_object_size = initial_tag_size = kDefaultSize;
345 }
346 JvmtiAllocator<void> allocator(jvmti_env);
347 ReleasableContainer<jobject, JvmtiAllocator<jobject>> selected_objects(allocator,
348 initial_object_size);
349 ReleasableContainer<T, JvmtiAllocator<T>> selected_tags(allocator, initial_tag_size);
350
351 size_t count = 0;
352 for (auto& pair : tagged_objects_) {
353 bool select;
354 if (tag_count > 0) {
355 select = false;
356 for (size_t i = 0; i != static_cast<size_t>(tag_count); ++i) {
357 if (tags[i] == pair.second) {
358 select = true;
359 break;
360 }
361 }
362 } else {
363 select = true;
364 }
365
366 if (select) {
Alex Lightd9025582019-02-13 16:33:14 -0800367 art::ObjPtr<art::mirror::Object> obj = pair.first.template Read<art::kWithReadBarrier>();
Andreas Gampef0140212017-03-03 13:28:58 -0800368 if (obj != nullptr) {
369 count++;
370 if (object_result_ptr != nullptr) {
371 selected_objects.Pushback(jni_env->AddLocalReference<jobject>(obj));
372 }
373 if (tag_result_ptr != nullptr) {
374 selected_tags.Pushback(pair.second);
375 }
376 }
377 }
378 }
379
380 if (object_result_ptr != nullptr) {
381 *object_result_ptr = selected_objects.Release();
382 }
383 if (tag_result_ptr != nullptr) {
384 *tag_result_ptr = selected_tags.Release();
385 }
386 *count_ptr = static_cast<jint>(count);
387 return ERR(NONE);
388}
389
Andreas Gamped73aba42017-05-03 21:40:26 -0700390template <typename T>
Alex Lightd9025582019-02-13 16:33:14 -0800391art::ObjPtr<art::mirror::Object> JvmtiWeakTable<T>::Find(T tag) {
Andreas Gamped73aba42017-05-03 21:40:26 -0700392 art::Thread* self = art::Thread::Current();
393 art::MutexLock mu(self, allow_disallow_lock_);
394 Wait(self);
395
396 for (auto& pair : tagged_objects_) {
397 if (tag == pair.second) {
Alex Lightd9025582019-02-13 16:33:14 -0800398 art::ObjPtr<art::mirror::Object> obj = pair.first.template Read<art::kWithReadBarrier>();
Andreas Gamped73aba42017-05-03 21:40:26 -0700399 if (obj != nullptr) {
400 return obj;
401 }
402 }
403 }
404 return nullptr;
405}
406
Andreas Gampef0140212017-03-03 13:28:58 -0800407} // namespace openjdkjvmti
408
Andreas Gampe06c42a52017-07-26 14:17:14 -0700409#endif // ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_INL_H_