summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/optimizing/code_generation_data.cc11
-rw-r--r--compiler/optimizing/code_generation_data.h30
-rw-r--r--compiler/optimizing/code_generator.cc13
-rw-r--r--compiler/optimizing/code_generator.h5
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc46
-rw-r--r--compiler/optimizing/code_generator_x86_64.h5
-rw-r--r--compiler/optimizing/instruction_builder.cc7
-rw-r--r--compiler/optimizing/nodes.h9
-rw-r--r--compiler/optimizing/sharpening.cc41
-rw-r--r--compiler/optimizing/sharpening.h8
-rw-r--r--runtime/jit/jit.cc7
-rw-r--r--runtime/jit/jit_code_cache-inl.h48
-rw-r--r--runtime/jit/jit_code_cache.cc42
-rw-r--r--runtime/jit/jit_code_cache.h9
-rw-r--r--runtime/well_known_classes.h2
-rw-r--r--test/979-const-method-handle/src/Main.java2
16 files changed, 268 insertions, 17 deletions
diff --git a/compiler/optimizing/code_generation_data.cc b/compiler/optimizing/code_generation_data.cc
index 7b23d46dc5..afc4f62f0f 100644
--- a/compiler/optimizing/code_generation_data.cc
+++ b/compiler/optimizing/code_generation_data.cc
@@ -20,6 +20,7 @@
#include "intern_table.h"
#include "mirror/object-inl.h"
#include "runtime.h"
+#include "well_known_classes-inl.h"
namespace art HIDDEN {
@@ -52,6 +53,16 @@ void CodeGenerationData::EmitJitRoots(
entry.second = index;
++index;
}
+ for (auto& entry : jit_method_type_roots_) {
+ // Update the `roots` with the MethodType, and replace the address temporarily
+ // stored to the index in the table.
+ uint64_t address = entry.second;
+ roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
+ DCHECK(roots->back() != nullptr);
+ DCHECK(roots->back()->InstanceOf(WellKnownClasses::java_lang_invoke_MethodType.Get()));
+ entry.second = index;
+ ++index;
+ }
}
} // namespace art
diff --git a/compiler/optimizing/code_generation_data.h b/compiler/optimizing/code_generation_data.h
index e78ba8f574..0d4db66ab4 100644
--- a/compiler/optimizing/code_generation_data.h
+++ b/compiler/optimizing/code_generation_data.h
@@ -23,10 +23,12 @@
#include "base/scoped_arena_allocator.h"
#include "base/scoped_arena_containers.h"
#include "code_generator.h"
+#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "handle.h"
#include "mirror/class.h"
+#include "mirror/method_type.h"
#include "mirror/object.h"
#include "mirror/string.h"
#include "stack_map_stream.h"
@@ -82,8 +84,24 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
return jit_class_roots_.size();
}
+ void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
+ Handle<mirror::MethodType> method_type) {
+ jit_method_type_roots_.Overwrite(proto_reference,
+ reinterpret_cast64<uint64_t>(method_type.GetReference()));
+ }
+
+ uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference) const {
+ return jit_method_type_roots_.Get(proto_reference);
+ }
+
+ size_t GetNumberOfJitMethodTypeRoots() const {
+ return jit_method_type_roots_.size();
+ }
+
size_t GetNumberOfJitRoots() const {
- return GetNumberOfJitStringRoots() + GetNumberOfJitClassRoots();
+ return GetNumberOfJitStringRoots() +
+ GetNumberOfJitClassRoots() +
+ GetNumberOfJitMethodTypeRoots();
}
void EmitJitRoots(/*out*/std::vector<Handle<mirror::Object>>* roots)
@@ -97,7 +115,9 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
jit_string_roots_(StringReferenceValueComparator(),
allocator_.Adapter(kArenaAllocCodeGenerator)),
jit_class_roots_(TypeReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)) {
+ allocator_.Adapter(kArenaAllocCodeGenerator)),
+ jit_method_type_roots_(ProtoReferenceValueComparator(),
+ allocator_.Adapter(kArenaAllocCodeGenerator)) {
slow_paths_.reserve(kDefaultSlowPathsCapacity);
}
@@ -116,6 +136,12 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
// Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
// will compute all the indices.
ScopedArenaSafeMap<TypeReference, uint64_t, TypeReferenceValueComparator> jit_class_roots_;
+
+ // Maps a ProtoReference (dex_file, proto_index) to the index in the literal table.
+ // Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
+ // will compute all the indices.
+ ScopedArenaSafeMap<ProtoReference, uint64_t, ProtoReferenceValueComparator>
+ jit_method_type_roots_;
};
} // namespace art
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 88bd818b0c..51714ef548 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -16,6 +16,7 @@
#include "code_generator.h"
#include "base/globals.h"
+#include "mirror/method_type.h"
#ifdef ART_ENABLE_CODEGEN_arm
#include "code_generator_arm_vixl.h"
@@ -209,11 +210,23 @@ uint64_t CodeGenerator::GetJitClassRootIndex(TypeReference type_reference) {
return code_generation_data_->GetJitClassRootIndex(type_reference);
}
+void CodeGenerator::ReserveJitMethodTypeRoot(ProtoReference proto_reference,
+ Handle<mirror::MethodType> method_type) {
+ DCHECK(code_generation_data_ != nullptr);
+ code_generation_data_->ReserveJitMethodTypeRoot(proto_reference, method_type);
+}
+
+uint64_t CodeGenerator::GetJitMethodTypeRootIndex(ProtoReference proto_reference) {
+ DCHECK(code_generation_data_ != nullptr);
+ return code_generation_data_->GetJitMethodTypeRootIndex(proto_reference);
+}
+
void CodeGenerator::EmitJitRootPatches([[maybe_unused]] uint8_t* code,
[[maybe_unused]] const uint8_t* roots_data) {
DCHECK(code_generation_data_ != nullptr);
DCHECK_EQ(code_generation_data_->GetNumberOfJitStringRoots(), 0u);
DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u);
+ DCHECK_EQ(code_generation_data_->GetNumberOfJitMethodTypeRoots(), 0u);
}
uint32_t CodeGenerator::GetArrayLengthOffset(HArrayLength* array_length) {
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index aec7b45a1a..950bae5c8f 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -29,10 +29,12 @@
#include "base/memory_region.h"
#include "base/pointer_size.h"
#include "class_root.h"
+#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "graph_visualizer.h"
#include "locations.h"
+#include "mirror/method_type.h"
#include "nodes.h"
#include "oat/oat_quick_method_header.h"
#include "optimizing_compiler_stats.h"
@@ -834,6 +836,9 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
uint64_t GetJitStringRootIndex(StringReference string_reference);
void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass);
uint64_t GetJitClassRootIndex(TypeReference type_reference);
+ void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
+ Handle<mirror::MethodType> method_type);
+ uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference);
// Emit the patches assocatied with JIT roots. Only applies to JIT compiled code.
virtual void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index e2b4344be9..f61bb39ccc 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -35,6 +35,7 @@
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
+#include "mirror/method_type.h"
#include "mirror/object_reference.h"
#include "mirror/var_handle.h"
#include "optimizing/nodes.h"
@@ -1628,6 +1629,7 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ jit_method_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
}
@@ -6824,20 +6826,31 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* lo
codegen_->GenerateLoadMethodHandleRuntimeCall(load);
}
+Label* CodeGeneratorX86_64::NewJitRootMethodTypePatch(const DexFile& dex_file,
+ dex::ProtoIndex proto_index,
+ Handle<mirror::MethodType> handle) {
+ ReserveJitMethodTypeRoot(ProtoReference(&dex_file, proto_index), handle);
+ // Add a patch entry and return the label.
+ jit_method_type_patches_.emplace_back(&dex_file, proto_index.index_);
+ PatchInfo<Label>* info = &jit_method_type_patches_.back();
+ return &info->label;
+}
+
void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
if (load->GetLoadKind() == HLoadMethodType::LoadKind::kRuntimeCall) {
- Location location = Location::RegisterLocation(RAX);
- CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
+ Location location = Location::RegisterLocation(RAX);
+ CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
} else {
- DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kBssEntry);
locations->SetOut(Location::RequiresRegister());
- if (codegen_->EmitNonBakerReadBarrier()) {
- // For non-Baker read barrier we have a temp-clobbering call.
- } else {
- // Rely on the pResolveMethodType to save everything.
- locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
+ if (load->GetLoadKind() == HLoadMethodType::LoadKind::kBssEntry) {
+ if (codegen_->EmitNonBakerReadBarrier()) {
+ // For non-Baker read barrier we have a temp-clobbering call.
+ } else {
+ // Rely on the pResolveMethodType to save everything.
+ locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
+ }
}
}
}
@@ -6864,6 +6877,17 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load)
__ Bind(slow_path->GetExitLabel());
return;
}
+ case HLoadMethodType::LoadKind::kJitTableAddress: {
+ Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
+ /* no_rip= */ true);
+ Handle<mirror::MethodType> method_type = load->GetMethodType();
+ DCHECK(method_type != nullptr);
+ Label* fixup_label = codegen_->NewJitRootMethodTypePatch(
+ load->GetDexFile(), load->GetProtoIndex(), method_type);
+ GenerateGcRootFieldLoad(
+ load, out_loc, address, fixup_label, codegen_->GetCompilerReadBarrierOption());
+ return;
+ }
default:
DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
codegen_->GenerateLoadMethodTypeRuntimeCall(load);
@@ -8543,6 +8567,12 @@ void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots
uint64_t index_in_table = GetJitClassRootIndex(type_reference);
PatchJitRootUse(code, roots_data, info, index_in_table);
}
+
+ for (const PatchInfo<Label>& info : jit_method_type_patches_) {
+ ProtoReference proto_reference(info.target_dex_file, dex::ProtoIndex(info.offset_or_index));
+ uint64_t index_in_table = GetJitMethodTypeRootIndex(proto_reference);
+ PatchJitRootUse(code, roots_data, info, index_in_table);
+ }
}
bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 81c8ead32e..ddeb33a261 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -546,6 +546,9 @@ class CodeGeneratorX86_64 : public CodeGenerator {
Label* NewJitRootClassPatch(const DexFile& dex_file,
dex::TypeIndex type_index,
Handle<mirror::Class> handle);
+ Label* NewJitRootMethodTypePatch(const DexFile& dex_file,
+ dex::ProtoIndex proto_index,
+ Handle<mirror::MethodType> method_type);
void LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference);
void LoadIntrinsicDeclaringClass(CpuRegister reg, HInvoke* invoke);
@@ -765,6 +768,8 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
// Patches for class literals in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_class_patches_;
+ // Patches for method type in JIT compiled code.
+ ArenaDeque<PatchInfo<Label>> jit_method_type_patches_;
// Fixups for jump tables need to be handled specially.
ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_;
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 3a64769a8b..c97c78ca17 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -2720,9 +2720,10 @@ void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint3
const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
HLoadMethodType* load_method_type =
new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
- if (!code_generator_->GetCompilerOptions().IsJitCompiler()) {
- load_method_type->SetLoadKind(HLoadMethodType::LoadKind::kBssEntry);
- }
+ HSharpening::ProcessLoadMethodType(load_method_type,
+ code_generator_,
+ *dex_compilation_unit_,
+ graph_->GetHandleCache()->GetHandles());
AppendInstruction(load_method_type);
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 1e3aca64db..825134497d 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -7218,6 +7218,8 @@ class HLoadMethodType final : public HInstruction {
enum class LoadKind {
// Load from an entry in the .bss section using a PC-relative load.
kBssEntry,
+ // Load from the root table associated with the JIT compiled method.
+ kJitTableAddress,
// Load using a single runtime call.
kRuntimeCall,
@@ -7254,6 +7256,10 @@ class HLoadMethodType final : public HInstruction {
dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
+ Handle<mirror::MethodType> GetMethodType() const { return method_type_; }
+
+ void SetMethodType(Handle<mirror::MethodType> method_type) { method_type_ = method_type; }
+
const DexFile& GetDexFile() const { return dex_file_; }
static SideEffects SideEffectsForArchRuntimeCalls() {
@@ -7283,6 +7289,8 @@ class HLoadMethodType final : public HInstruction {
const dex::ProtoIndex proto_index_;
const DexFile& dex_file_;
+
+ Handle<mirror::MethodType> method_type_;
};
std::ostream& operator<<(std::ostream& os, HLoadMethodType::LoadKind rhs);
@@ -7293,6 +7301,7 @@ inline void HLoadMethodType::SetLoadKind(LoadKind load_kind) {
DCHECK(GetBlock() == nullptr);
DCHECK(GetEnvironment() == nullptr);
DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
+ DCHECK_IMPLIES(GetLoadKind() == LoadKind::kJitTableAddress, GetMethodType() != nullptr);
SetPackedField<LoadKindField>(load_kind);
}
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index cb94491b8e..1b6a9fb601 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -471,4 +471,45 @@ void HSharpening::ProcessLoadString(
load_string->SetLoadKind(load_kind);
}
+void HSharpening::ProcessLoadMethodType(
+ HLoadMethodType* load_method_type,
+ CodeGenerator* codegen,
+ const DexCompilationUnit& dex_compilation_unit,
+ VariableSizedHandleScope* handles) {
+ const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
+
+ HLoadMethodType::LoadKind desired_load_kind = static_cast<HLoadMethodType::LoadKind>(-1);
+
+ if (compiler_options.IsJitCompiler()) {
+ DCHECK(!compiler_options.GetCompilePic());
+ Runtime* runtime = Runtime::Current();
+ ClassLinker* class_linker = runtime->GetClassLinker();
+ ScopedObjectAccess soa(Thread::Current());
+ ObjPtr<mirror::MethodType> method_type =
+ class_linker->ResolveMethodType(Thread::Current(),
+ load_method_type->GetProtoIndex(),
+ dex_compilation_unit.GetDexCache(),
+ dex_compilation_unit.GetClassLoader());
+
+ if (method_type != nullptr) {
+ load_method_type->SetMethodType(handles->NewHandle(method_type));
+ desired_load_kind = HLoadMethodType::LoadKind::kJitTableAddress;
+ } else {
+ DCHECK_EQ(load_method_type->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
+ desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
+ Thread::Current()->ClearException();
+ }
+ } else {
+ if (compiler_options.GetCompilePic()) {
+ desired_load_kind = HLoadMethodType::LoadKind::kBssEntry;
+ } else {
+ // Test configuration, do not sharpen.
+ desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
+ }
+ }
+
+ DCHECK_NE(desired_load_kind, static_cast<HLoadMethodType::LoadKind>(-1));
+ load_method_type->SetLoadKind(desired_load_kind);
+}
+
} // namespace art
diff --git a/compiler/optimizing/sharpening.h b/compiler/optimizing/sharpening.h
index 6dfe904f27..88d3b2f604 100644
--- a/compiler/optimizing/sharpening.h
+++ b/compiler/optimizing/sharpening.h
@@ -27,7 +27,7 @@ class CodeGenerator;
class DexCompilationUnit;
// Utility methods that try to improve the way we dispatch methods, and access
-// types and strings.
+// types, strings and method types.
class HSharpening {
public:
// Used by the builder and InstructionSimplifier.
@@ -54,6 +54,12 @@ class HSharpening {
CodeGenerator* codegen,
const DexCompilationUnit& dex_compilation_unit,
VariableSizedHandleScope* handles);
+
+ // Used by the builder.
+ static void ProcessLoadMethodType(HLoadMethodType* load_method_type,
+ CodeGenerator* codegen,
+ const DexCompilationUnit& dex_compilation_unit,
+ VariableSizedHandleScope* handles);
};
} // namespace art
diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc
index 4d395c8528..0dd18b503d 100644
--- a/runtime/jit/jit.cc
+++ b/runtime/jit/jit.cc
@@ -37,7 +37,7 @@
#include "gc/space/image_space.h"
#include "interpreter/interpreter.h"
#include "jit-inl.h"
-#include "jit_code_cache.h"
+#include "jit_code_cache-inl.h"
#include "jit_create.h"
#include "jni/java_vm_ext.h"
#include "mirror/method_handle_impl.h"
@@ -1883,6 +1883,11 @@ void Jit::VisitRoots(RootVisitor* visitor) {
if (thread_pool_ != nullptr) {
thread_pool_->VisitRoots(visitor);
}
+
+ // MethodType-s are weakly interned, but a MethodType can be referenced from JIT-ted code. We
+ // visit JitCodeCache to treat such MethodType-s as strongly reachable.
+ UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
+ code_cache_->VisitRootTables(root_visitor);
}
void JitThreadPool::VisitRoots(RootVisitor* visitor) {
diff --git a/runtime/jit/jit_code_cache-inl.h b/runtime/jit/jit_code_cache-inl.h
new file mode 100644
index 0000000000..5d5354a087
--- /dev/null
+++ b/runtime/jit/jit_code_cache-inl.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
+#define ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
+
+#include "gc_root.h"
+#include "jit/jit_code_cache.h"
+#include "thread.h"
+
+namespace art HIDDEN {
+
+class ArtMethod;
+
+namespace jit {
+
+template<typename RootVisitorType>
+EXPORT void JitCodeCache::VisitRootTables(RootVisitorType& visitor) {
+ Thread* self = Thread::Current();
+ ScopedDebugDisallowReadBarriers sddrb(self);
+ MutexLock mu(self, *Locks::jit_lock_);
+
+ for (auto& [_, method_types] : method_types_map_) {
+ for (auto& method_type : method_types) {
+ visitor.VisitRoot(method_type.AddressWithoutBarrier());
+ }
+ }
+}
+
+} // namespace jit
+} // namespace art
+
+#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
+
+
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 4b69dc5c01..5f0bb4e719 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -50,6 +50,7 @@
#include "jit/profiling_info.h"
#include "jit/jit_scoped_code_cache_write.h"
#include "linear_alloc.h"
+#include "mirror/method_type.h"
#include "oat/oat_file-inl.h"
#include "oat/oat_quick_method_header.h"
#include "object_callbacks.h"
@@ -59,6 +60,7 @@
#include "thread-current-inl.h"
#include "thread-inl.h"
#include "thread_list.h"
+#include "well_known_classes-inl.h"
namespace art HIDDEN {
namespace jit {
@@ -436,16 +438,30 @@ void JitCodeCache::SweepRootTables(IsMarkedVisitor* visitor) {
if (new_object != object) {
roots[i] = GcRoot<mirror::Object>(new_object);
}
- } else {
+ } else if (object->IsClass<kDefaultVerifyFlags>()) {
mirror::Object* new_klass = visitor->IsMarked(object);
if (new_klass == nullptr) {
roots[i] = GcRoot<mirror::Object>(Runtime::GetWeakClassSentinel());
} else if (new_klass != object) {
roots[i] = GcRoot<mirror::Object>(new_klass);
}
+ } else {
+ mirror::Object* new_method_type = visitor->IsMarked(object);
+
+ // The MethodType have been visited during VisitConcurrentRoots, so they must be live.
+ DCHECK_NE(new_method_type, nullptr) << "old-method-type" << object;
+ ObjPtr<mirror::Class> method_type_class =
+ WellKnownClasses::java_lang_invoke_MethodType.Get<kWithoutReadBarrier>();
+ DCHECK_EQ((new_method_type->GetClass<kVerifyNone, kWithoutReadBarrier>()),
+ method_type_class.Ptr());
+
+ if (new_method_type != object) {
+ roots[i] = GcRoot<mirror::Object>(new_method_type);
+ }
}
}
}
+
// Walk over inline caches to clear entries containing unloaded classes.
for (const auto& [_, info] : profiling_infos_) {
InlineCache* caches = info->GetInlineCaches();
@@ -567,6 +583,7 @@ void JitCodeCache::RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) {
if (alloc.ContainsUnsafe(it->second)) {
method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->first));
VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
+ method_types_map_.erase(it->first);
zombie_code_.erase(it->first);
processed_zombie_code_.erase(it->first);
it = method_code_map_.erase(it);
@@ -755,6 +772,27 @@ bool JitCodeCache::Commit(Thread* self,
} else {
ScopedDebugDisallowReadBarriers sddrb(self);
method_code_map_.Put(code_ptr, method);
+
+ // Searching for MethodType-s in roots. They need to be treated as strongly reachable while
+ // the corresponding compiled code is not removed.
+ ObjPtr<mirror::Class> method_type_class =
+ WellKnownClasses::java_lang_invoke_MethodType.Get<kWithoutReadBarrier>();
+
+ auto method_types_in_roots = std::vector<GcRoot<mirror::MethodType>>();
+
+ for (auto root : roots) {
+ ObjPtr<mirror::Class> klass = root->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ if (klass == method_type_class ||
+ klass == ReadBarrier::IsMarked(method_type_class.Ptr()) ||
+ ReadBarrier::IsMarked(klass.Ptr()) == method_type_class) {
+ ObjPtr<mirror::MethodType> mt = ObjPtr<mirror::MethodType>::DownCast(root.Get());
+ method_types_in_roots.emplace_back(GcRoot(mt));
+ }
+ }
+
+ if (!method_types_in_roots.empty()) {
+ method_types_map_.Put(code_ptr, method_types_in_roots);
+ }
}
if (compilation_kind == CompilationKind::kOsr) {
ScopedDebugDisallowReadBarriers sddrb(self);
@@ -854,6 +892,7 @@ bool JitCodeCache::RemoveMethodLocked(ArtMethod* method, bool release_memory) {
FreeCodeAndData(it->first);
}
VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
+ method_types_map_.erase(it->first);
it = method_code_map_.erase(it);
} else {
++it;
@@ -1108,6 +1147,7 @@ void JitCodeCache::RemoveUnmarkedCode(Thread* self) {
} else {
OatQuickMethodHeader* header = OatQuickMethodHeader::FromCodePointer(code_ptr);
method_headers.insert(header);
+ method_types_map_.erase(header->GetCode());
method_code_map_.erase(header->GetCode());
VLOG(jit) << "JIT removed " << *it;
it = processed_zombie_code_.erase(it);
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 3dd57121ca..6ca79d560f 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -356,6 +356,10 @@ class JitCodeCache {
bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
+ // Visit GC roots (except j.l.Class and j.l.String) held by JIT-ed code.
+ template<typename RootVisitorType>
+ EXPORT void VisitRootTables(RootVisitorType& visitor) NO_THREAD_SAFETY_ANALYSIS;
+
void SweepRootTables(IsMarkedVisitor* visitor)
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -549,6 +553,11 @@ class JitCodeCache {
// Holds compiled code associated to the ArtMethod.
SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
+ // MethodType-s referenced by a compiled code. A subset of method_code_map_ used to treat a
+ // MethodType as strongly reachable from the corresponding code.
+ SafeMap<const void*, std::vector<GcRoot<mirror::MethodType>>> method_types_map_
+ GUARDED_BY(Locks::jit_lock_);
+
// Holds compiled code associated to the ArtMethod. Used when pre-jitting
// methods whose entrypoints have the resolution stub.
SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_);
diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h
index bd8bbe0108..f29daad204 100644
--- a/runtime/well_known_classes.h
+++ b/runtime/well_known_classes.h
@@ -251,6 +251,8 @@ struct EXPORT WellKnownClasses {
java_lang_StackOverflowError;
static constexpr ClassFromField<&java_lang_Thread_daemon> java_lang_Thread;
static constexpr ClassFromField<&java_lang_ThreadGroup_groups> java_lang_ThreadGroup;
+ static constexpr ClassFromMethod<&java_lang_invoke_MethodType_makeImpl>
+ java_lang_invoke_MethodType;
static constexpr ClassFromMethod<&java_lang_reflect_InvocationTargetException_init>
java_lang_reflect_InvocationTargetException;
static constexpr ClassFromMethod<&java_lang_reflect_Parameter_init>
diff --git a/test/979-const-method-handle/src/Main.java b/test/979-const-method-handle/src/Main.java
index 72d529b68b..17d5d91c50 100644
--- a/test/979-const-method-handle/src/Main.java
+++ b/test/979-const-method-handle/src/Main.java
@@ -29,7 +29,7 @@ class Main {
* Number of iterations run to attempt to trigger JIT compilation. These tests run on ART and
* the RI so they iterate rather than using the ART only native method ensureJitCompiled().
*/
- private static final int ITERATIONS_FOR_JIT = 12000;
+ private static final int ITERATIONS_FOR_JIT = 30000;
/** A static field updated by method handle getters and setters. */
private static String name = "default";