summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2024-06-19 18:15:37 +0100
committer Nicolas Geoffray <ngeoffray@google.com> 2024-06-21 09:00:39 +0000
commit90b7adbd418ea929b73647ada2b1aa9be3bfa424 (patch)
tree5a2a5edf8e9752e84d11a7b7ebd48c987d3a9cd1
parent8ac93e5c7f2c16df7b803b8431aec05c863f830a (diff)
Don't invoke SetEntryPointFromQuickCompiledCode when relocating image.
The old entrypoint points to arbitrary memory which could be the current JIT cache. SetEntryPointFromQuickCompiledCode has logic to put old JIT entrypoints in the zombie list, and so we were wrongly adding these bogus pointers to the zombie list. Test: v2/mokey-core-devs/device-boot-health-check-mokey Bug: 346052740 Change-Id: I01b224468733cdb1bf7d3387b64351e4db905681
-rw-r--r--runtime/art_method-inl.h16
-rw-r--r--runtime/art_method.h33
-rw-r--r--runtime/gc/space/image_space.cc23
3 files changed, 30 insertions, 42 deletions
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index d87040ab49..a05d9e5cee 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -664,22 +664,6 @@ void ArtMethod::VisitArrayRoots(RootVisitorType& visitor,
}
}
-template <typename Visitor>
-inline void ArtMethod::UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size) {
- if (IsNative()) {
- const void* old_native_code = GetEntryPointFromJniPtrSize(pointer_size);
- const void* new_native_code = visitor(old_native_code);
- if (old_native_code != new_native_code) {
- SetEntryPointFromJniPtrSize(new_native_code, pointer_size);
- }
- }
- const void* old_code = GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
- const void* new_code = visitor(old_code);
- if (old_code != new_code) {
- SetEntryPointFromQuickCompiledCodePtrSize(new_code, pointer_size);
- }
-}
-
template <ReadBarrierOption kReadBarrierOption>
inline bool ArtMethod::StillNeedsClinitCheck() {
if (!NeedsClinitCheckBeforeCall()) {
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 0a6cda65fd..41bf7fd62c 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -1048,11 +1048,6 @@ class EXPORT ArtMethod final {
std::string JniLongName()
REQUIRES_SHARED(Locks::mutator_lock_);
- // Update entry points by passing them through the visitor.
- template <typename Visitor>
- ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
// Visit the individual members of an ArtMethod. Used by imgdiag.
// As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
// to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
@@ -1087,6 +1082,19 @@ class EXPORT ArtMethod final {
return declaring_class_;
}
+ template<typename T>
+ ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ static_assert(std::is_pointer<T>::value, "T must be a pointer type");
+ const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
+ if (pointer_size == PointerSize::k32) {
+ uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
+ *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
+ } else {
+ *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
+ }
+ }
+
protected:
// Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
// The class we are a part of.
@@ -1166,19 +1174,6 @@ class EXPORT ArtMethod final {
}
}
- template<typename T>
- ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- static_assert(std::is_pointer<T>::value, "T must be a pointer type");
- const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
- if (pointer_size == PointerSize::k32) {
- uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
- *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
- } else {
- *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
- }
- }
-
static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
@@ -1209,8 +1204,6 @@ class EXPORT ArtMethod final {
// Used by GetName and GetNameView to share common code.
const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
- friend class RuntimeImageHelper; // For SetNativePointer.
-
DISALLOW_COPY_AND_ASSIGN(ArtMethod); // Need to use CopyFrom to deal with 32 vs 64 bits.
};
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 4333d08452..e7dde64b5e 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -1405,14 +1405,25 @@ class ImageSpace::Loader {
method.SetImtConflictTable(new_table, kPointerSize);
}
}
- const void* old_code = method.GetEntryPointFromQuickCompiledCodePtrSize(kPointerSize);
- const void* new_code = forward_code(old_code);
- if (old_code != new_code) {
- method.SetEntryPointFromQuickCompiledCodePtrSize(new_code, kPointerSize);
- }
} else {
patch_object_visitor.PatchGcRoot(&method.DeclaringClassRoot());
- method.UpdateEntrypoints(forward_code, kPointerSize);
+ if (method.IsNative()) {
+ const void* old_native_code = method.GetEntryPointFromJniPtrSize(kPointerSize);
+ const void* new_native_code = forward_code(old_native_code);
+ if (old_native_code != new_native_code) {
+ method.SetEntryPointFromJniPtrSize(new_native_code, kPointerSize);
+ }
+ }
+ }
+ const void* old_code = method.GetEntryPointFromQuickCompiledCodePtrSize(kPointerSize);
+ const void* new_code = forward_code(old_code);
+ if (old_code != new_code) {
+ // Set the pointer directly instead of calling
+ // `SetEntryPointFromQuickCompiledCode` as the old pointer could be
+ // pointing to anything.
+ method.SetNativePointer(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kPointerSize),
+ new_code,
+ kPointerSize);
}
}, target_base, kPointerSize);
}