summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Lokesh Gidra <lokeshgidra@google.com> 2023-09-27 18:16:31 +0000
committer Lokesh Gidra <lokeshgidra@google.com> 2023-09-28 00:34:45 +0000
commit5c5890106a8555a40b33c0a90fc7341a8bce0b8e (patch)
treee0372330231a13d4a076a3e020f317cee4b9b5da
parent70d8890650b13d6f7022a722acf14031bb2dd280 (diff)
Correct handling ArtMethod array during class linking
In case of userfaultfd, we were setting declaring_class_ of all ArtMethods to nullptr. This could cause problem if a GC is taking place simultaneously and it is visiting the ArtMethod array at the same time. Instead, we should convert the array to a 'no gc-root array' so that compaction code skips it. The marking-phase will simply skips it as it will no longer be pointed by the class. Bug: 301880194 Test: test infra Change-Id: I72007dd94ad71aa9ccc6005eb6d99b5a8c1ad7ee
-rw-r--r--runtime/class_linker.cc15
-rw-r--r--runtime/linear_alloc-inl.h12
-rw-r--r--runtime/linear_alloc.h7
3 files changed, 25 insertions, 9 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index c543cbd62d..2c22a906da 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -7951,9 +7951,9 @@ void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror:
kMethodSize,
kMethodAlignment);
const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
- auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
- class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader())->Realloc(
- self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
+ LinearAlloc* allocator = class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader());
+ auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(allocator->Realloc(
+ self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
CHECK(methods != nullptr); // Native allocation failure aborts.
if (methods != old_methods) {
@@ -7966,12 +7966,9 @@ void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror:
++out;
}
} else if (gUseUserfaultfd) {
- // Clear the declaring class of the old dangling method array so that GC doesn't
- // try to update them, which could cause crashes in userfaultfd GC due to
- // checks in post-compact address computation.
- for (auto& m : klass->GetMethods(kPointerSize)) {
- m.SetDeclaringClass(nullptr);
- }
+ // In order to make compaction code skip updating the declaring_class_ in
+ // old_methods, convert it into a 'no GC-root' array.
+ allocator->ConvertToNoGcRoots(old_methods, LinearAllocKind::kArtMethodArray);
}
}
diff --git a/runtime/linear_alloc-inl.h b/runtime/linear_alloc-inl.h
index 13dbea11d0..7c81352cd9 100644
--- a/runtime/linear_alloc-inl.h
+++ b/runtime/linear_alloc-inl.h
@@ -40,6 +40,18 @@ inline void LinearAlloc::SetFirstObject(void* begin, size_t bytes) const {
down_cast<TrackedArena*>(arena)->SetFirstObject(static_cast<uint8_t*>(begin), end);
}
+inline void LinearAlloc::ConvertToNoGcRoots(void* ptr, LinearAllocKind orig_kind) {
+ if (track_allocations_ && ptr != nullptr) {
+ TrackingHeader* header = static_cast<TrackingHeader*>(ptr);
+ header--;
+ DCHECK_EQ(header->GetKind(), orig_kind);
+ DCHECK_GT(header->GetSize(), 0u);
+ // 16-byte allocations are not supported yet.
+ DCHECK(!header->Is16Aligned());
+ header->SetKind(LinearAllocKind::kNoGCRoots);
+ }
+}
+
inline void LinearAlloc::SetupForPostZygoteFork(Thread* self) {
MutexLock mu(self, lock_);
DCHECK(track_allocations_);
diff --git a/runtime/linear_alloc.h b/runtime/linear_alloc.h
index c40af8ad46..c81077abfc 100644
--- a/runtime/linear_alloc.h
+++ b/runtime/linear_alloc.h
@@ -56,9 +56,13 @@ class TrackingHeader final {
bool Is16Aligned() const { return size_ & kIs16Aligned; }
private:
+ void SetKind(LinearAllocKind kind) { kind_ = kind; }
+
LinearAllocKind kind_;
uint32_t size_;
+ friend class LinearAlloc; // For SetKind()
+
DISALLOW_IMPLICIT_CONSTRUCTORS(TrackingHeader);
};
@@ -93,6 +97,9 @@ class LinearAlloc {
// Force arena allocator to ask for a new arena on next allocation. This
// is to preserve private/shared clean pages across zygote fork.
void SetupForPostZygoteFork(Thread* self) REQUIRES(!lock_);
+ // Convert the given allocated object into a `no GC-root` so that compaction
+ // skips it. Currently only used during class linking for ArtMethod array.
+ void ConvertToNoGcRoots(void* ptr, LinearAllocKind orig_kind);
// Return true if the linear alloc contains an address.
bool Contains(void* ptr) const REQUIRES(!lock_);