summaryrefslogtreecommitdiff
path: root/runtime/mirror/class_ext.h
diff options
context:
space:
mode:
author Lokesh Gidra <lokeshgidra@google.com> 2022-01-28 12:30:31 -0800
committer Lokesh Gidra <lokeshgidra@google.com> 2022-08-10 18:06:05 +0000
commitb7607c2fd67e12e998aebd71db38414ffc65621b (patch)
tree0b816edc36dc3a696c366e1e5922018accbde5b7 /runtime/mirror/class_ext.h
parent5d73d6b3e4de8e7a1cb1aa6c8683a6afac7725be (diff)
Update native gc-roots separately in compaction pause
The concurrent compaction algorithm requires all GC roots to be updated to post-compact addresses before resuming mutators for concurrent compaction. Therefore, unlike CC, we cannot update native roots in classes/dex-caches/class-loaders while visiting references (VisitReferences) on heap objects. This CL separates the two and updates all the gc-roots in the compaction pause. Bug: 160737021 Test: art/test/testrunner/testrunner.py Change-Id: I8a57472ba49b9dc30bc0f41a7db3f5efa7eafd9a
Diffstat (limited to 'runtime/mirror/class_ext.h')
-rw-r--r--runtime/mirror/class_ext.h16
1 files changed, 15 insertions, 1 deletions
diff --git a/runtime/mirror/class_ext.h b/runtime/mirror/class_ext.h
index b805ea0582..b025eb21af 100644
--- a/runtime/mirror/class_ext.h
+++ b/runtime/mirror/class_ext.h
@@ -27,6 +27,7 @@
namespace art {
struct ClassExtOffsets;
+class DexCacheVisitor;
namespace mirror {
@@ -46,6 +47,8 @@ class MANAGED ClassExt : public Object {
ObjPtr<Throwable> GetErroneousStateError() REQUIRES_SHARED(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ObjPtr<ObjectArray<DexCache>> GetObsoleteDexCaches() REQUIRES_SHARED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -126,10 +129,21 @@ class MANAGED ClassExt : public Object {
static bool ExtendObsoleteArrays(Handle<ClassExt> h_this, Thread* self, uint32_t increase)
REQUIRES_SHARED(Locks::mutator_lock_);
- template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, class Visitor>
+ template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ bool kVisitProxyMethod = true,
+ class Visitor>
inline void VisitNativeRoots(Visitor& visitor, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ // NO_THREAD_SAFETY_ANALYSIS for dex_lock and heap_bitmap_lock_ as both are at
+ // higher lock-level than class-table's lock, which is already acquired and
+ // is at lower (kClassLoaderClassesLock) level.
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+ inline void VisitDexCaches(DexCacheVisitor& visitor)
+ NO_THREAD_SAFETY_ANALYSIS
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, class Visitor>
inline void VisitMethods(Visitor visitor, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);