summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2024-06-21 11:42:19 +0100
committer Nicolas Geoffray <ngeoffray@google.com> 2024-06-21 12:50:14 +0000
commit818c357ec4f724b3a3a7b0a02553d943e8f1e32e (patch)
treed21fc8cdc4a353aec9c722199e40615fb5520e79
parentc0e77cbf0ba39c1d8e59d6abeadd40cabda61620 (diff)
Make SetEntryPointFromQuickCompileCode just update the field.
So that most uses that won't have zombie JIT code do not need to do compare and exchange. Test: test.py Change-Id: I40e4175a9b536177d52a21c193c7c5af7dd81d7c
-rw-r--r--runtime/art_method.cc36
-rw-r--r--runtime/art_method.h32
-rw-r--r--runtime/gc/space/image_space.cc7
-rw-r--r--runtime/instrumentation.cc48
-rw-r--r--runtime/runtime_image.cc4
5 files changed, 58 insertions, 69 deletions
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index dac2c08a46..40a01920de 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -924,40 +924,4 @@ ALWAYS_INLINE static inline void DoGetAccessFlagsHelper(ArtMethod* method)
method->GetDeclaringClass<kReadBarrierOption>()->IsErroneous());
}
-template <typename T>
-bool CompareExchange(uintptr_t ptr, uintptr_t old_value, uintptr_t new_value) {
- std::atomic<T>* atomic_addr = reinterpret_cast<std::atomic<T>*>(ptr);
- T cast_old_value = dchecked_integral_cast<T>(old_value);
- return reinterpret_cast<const void*>(
- atomic_addr->compare_exchange_strong(cast_old_value,
- dchecked_integral_cast<T>(new_value),
- std::memory_order_relaxed));
-}
-
-void ArtMethod::SetEntryPointFromQuickCompiledCodePtrSize(
- const void* entry_point_from_quick_compiled_code, PointerSize pointer_size) {
- const void* current_entry_point = GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
- if (current_entry_point == entry_point_from_quick_compiled_code) {
- return;
- }
-
- // Do an atomic exchange to avoid potentially unregistering JIT code twice.
- MemberOffset offset = EntryPointFromQuickCompiledCodeOffset(pointer_size);
- uintptr_t old_value = reinterpret_cast<uintptr_t>(current_entry_point);
- uintptr_t new_value = reinterpret_cast<uintptr_t>(entry_point_from_quick_compiled_code);
- uintptr_t ptr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
- bool success = (pointer_size == PointerSize::k32)
- ? CompareExchange<uint32_t>(ptr, old_value, new_value)
- : CompareExchange<uint64_t>(ptr, old_value, new_value);
-
- // If we successfully updated the entrypoint and the old entrypoint is JITted
- // code, register the old entrypoint as zombie.
- jit::Jit* jit = Runtime::Current()->GetJit();
- if (success &&
- jit != nullptr &&
- jit->GetCodeCache()->ContainsPc(current_entry_point)) {
- jit->GetCodeCache()->AddZombieCode(this, current_entry_point);
- }
-}
-
} // namespace art
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 41bf7fd62c..597bc64d39 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -779,7 +779,11 @@ class EXPORT ArtMethod final {
}
ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
+ entry_point_from_quick_compiled_code,
+ pointer_size);
+ }
static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
@@ -1082,19 +1086,6 @@ class EXPORT ArtMethod final {
return declaring_class_;
}
- template<typename T>
- ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- static_assert(std::is_pointer<T>::value, "T must be a pointer type");
- const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
- if (pointer_size == PointerSize::k32) {
- uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
- *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
- } else {
- *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
- }
- }
-
protected:
// Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
// The class we are a part of.
@@ -1174,6 +1165,19 @@ class EXPORT ArtMethod final {
}
}
+ template<typename T>
+ ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ static_assert(std::is_pointer<T>::value, "T must be a pointer type");
+ const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
+ if (pointer_size == PointerSize::k32) {
+ uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
+ *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
+ } else {
+ *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
+ }
+ }
+
static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index e7dde64b5e..b970b12978 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -1418,12 +1418,7 @@ class ImageSpace::Loader {
const void* old_code = method.GetEntryPointFromQuickCompiledCodePtrSize(kPointerSize);
const void* new_code = forward_code(old_code);
if (old_code != new_code) {
- // Set the pointer directly instead of calling
- // `SetEntryPointFromQuickCompiledCode` as the old pointer could be
- // pointing to anything.
- method.SetNativePointer(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kPointerSize),
- new_code,
- kPointerSize);
+ method.SetEntryPointFromQuickCompiledCode(new_code);
}
}, target_base, kPointerSize);
}
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 46ef653e2c..8977635cd8 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -249,31 +249,59 @@ static bool CodeSupportsEntryExitHooks(const void* entry_point, ArtMethod* metho
return false;
}
-static void UpdateEntryPoints(ArtMethod* method, const void* quick_code)
+template <typename T>
+bool CompareExchange(uintptr_t ptr, uintptr_t old_value, uintptr_t new_value) {
+ std::atomic<T>* atomic_addr = reinterpret_cast<std::atomic<T>*>(ptr);
+ T cast_old_value = dchecked_integral_cast<T>(old_value);
+ return atomic_addr->compare_exchange_strong(cast_old_value,
+ dchecked_integral_cast<T>(new_value),
+ std::memory_order_relaxed);
+}
+
+static void UpdateEntryPoints(ArtMethod* method, const void* new_code)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (kIsDebugBuild) {
if (method->StillNeedsClinitCheckMayBeDead()) {
- CHECK(CanHandleInitializationCheck(quick_code));
+ CHECK(CanHandleInitializationCheck(new_code));
}
jit::Jit* jit = Runtime::Current()->GetJit();
- if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
+ if (jit != nullptr && jit->GetCodeCache()->ContainsPc(new_code)) {
// Ensure we always have the thumb entrypoint for JIT on arm32.
if (kRuntimeISA == InstructionSet::kArm) {
- CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
+ CHECK_EQ(reinterpret_cast<uintptr_t>(new_code) & 1, 1u);
}
}
const Instrumentation* instr = Runtime::Current()->GetInstrumentation();
if (instr->EntryExitStubsInstalled()) {
- CHECK(CodeSupportsEntryExitHooks(quick_code, method));
+ CHECK(CodeSupportsEntryExitHooks(new_code, method));
}
if (instr->InterpreterStubsInstalled() && !method->IsNative()) {
- CHECK_EQ(quick_code, GetQuickToInterpreterBridge());
+ CHECK_EQ(new_code, GetQuickToInterpreterBridge());
}
}
- // If the method is from a boot image, don't dirty it if the entrypoint
- // doesn't change.
- if (method->GetEntryPointFromQuickCompiledCode() != quick_code) {
- method->SetEntryPointFromQuickCompiledCode(quick_code);
+ const void* current_entry_point = method->GetEntryPointFromQuickCompiledCode();
+ if (current_entry_point == new_code) {
+ // If the method is from a boot image, don't dirty it if the entrypoint
+ // doesn't change.
+ return;
+ }
+
+ // Do an atomic exchange to avoid potentially unregistering JIT code twice.
+ MemberOffset offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kRuntimePointerSize);
+ uintptr_t old_value = reinterpret_cast<uintptr_t>(current_entry_point);
+ uintptr_t new_value = reinterpret_cast<uintptr_t>(new_code);
+ uintptr_t ptr = reinterpret_cast<uintptr_t>(method) + offset.Uint32Value();
+ bool success = (kRuntimePointerSize == PointerSize::k32)
+ ? CompareExchange<uint32_t>(ptr, old_value, new_value)
+ : CompareExchange<uint64_t>(ptr, old_value, new_value);
+
+ // If we successfully updated the entrypoint and the old entrypoint is JITted
+ // code, register the old entrypoint as zombie.
+ jit::Jit* jit = Runtime::Current()->GetJit();
+ if (success &&
+ jit != nullptr &&
+ jit->GetCodeCache()->ContainsPc(current_entry_point)) {
+ jit->GetCodeCache()->AddZombieCode(method, current_entry_point);
}
}
diff --git a/runtime/runtime_image.cc b/runtime/runtime_image.cc
index b134c79286..997ea2fde6 100644
--- a/runtime/runtime_image.cc
+++ b/runtime/runtime_image.cc
@@ -964,9 +964,7 @@ class RuntimeImageHelper {
entrypoint = boot_jni_stub;
}
}
- copy->SetNativePointer(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kRuntimePointerSize),
- entrypoint,
- kRuntimePointerSize);
+ copy->SetEntryPointFromQuickCompiledCode(entrypoint);
if (method->IsNative()) {
StubType stub_type = method->IsCriticalNative()