summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2024-06-24 08:02:27 +0000
committer Santiago Aboy Solanes <solanes@google.com> 2024-06-24 16:03:27 +0000
commitb63adc919ba9a53f4fbad476356c702845821149 (patch)
tree8d286560ebaa6959ea90d68a06cea67c68196ebc
parent6a4404c7108ffe5cb3f63d1cfd01341c4ab8b189 (diff)
Revert^3 "x86_64: Add JIT support for LoadMethodType."
This reverts commit d92a43f4310e2d634d6e8f24103fc1e27557d784. Reason for revert: Failing 979-const-method-handle https://ci.chromium.org/ui/p/art/builders/ci/host-x86_64-cms/10095/overview Change-Id: I9b44d8cc66e98db074edfa90ce2ebab087e4b115
-rw-r--r--compiler/optimizing/code_generation_data.cc11
-rw-r--r--compiler/optimizing/code_generation_data.h30
-rw-r--r--compiler/optimizing/code_generator.cc13
-rw-r--r--compiler/optimizing/code_generator.h5
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc46
-rw-r--r--compiler/optimizing/code_generator_x86_64.h5
-rw-r--r--compiler/optimizing/instruction_builder.cc7
-rw-r--r--compiler/optimizing/nodes.h9
-rw-r--r--compiler/optimizing/sharpening.cc41
-rw-r--r--compiler/optimizing/sharpening.h8
-rw-r--r--runtime/jit/jit.cc7
-rw-r--r--runtime/jit/jit_code_cache-inl.h48
-rw-r--r--runtime/jit/jit_code_cache.cc42
-rw-r--r--runtime/jit/jit_code_cache.h9
-rw-r--r--runtime/well_known_classes.h2
-rw-r--r--test/979-const-method-handle/src/Main.java2
16 files changed, 17 insertions, 268 deletions
diff --git a/compiler/optimizing/code_generation_data.cc b/compiler/optimizing/code_generation_data.cc
index afc4f62f0f..7b23d46dc5 100644
--- a/compiler/optimizing/code_generation_data.cc
+++ b/compiler/optimizing/code_generation_data.cc
@@ -20,7 +20,6 @@
#include "intern_table.h"
#include "mirror/object-inl.h"
#include "runtime.h"
-#include "well_known_classes-inl.h"
namespace art HIDDEN {
@@ -53,16 +52,6 @@ void CodeGenerationData::EmitJitRoots(
entry.second = index;
++index;
}
- for (auto& entry : jit_method_type_roots_) {
- // Update the `roots` with the MethodType, and replace the address temporarily
- // stored to the index in the table.
- uint64_t address = entry.second;
- roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
- DCHECK(roots->back() != nullptr);
- DCHECK(roots->back()->InstanceOf(WellKnownClasses::java_lang_invoke_MethodType.Get()));
- entry.second = index;
- ++index;
- }
}
} // namespace art
diff --git a/compiler/optimizing/code_generation_data.h b/compiler/optimizing/code_generation_data.h
index 0d4db66ab4..e78ba8f574 100644
--- a/compiler/optimizing/code_generation_data.h
+++ b/compiler/optimizing/code_generation_data.h
@@ -23,12 +23,10 @@
#include "base/scoped_arena_allocator.h"
#include "base/scoped_arena_containers.h"
#include "code_generator.h"
-#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "handle.h"
#include "mirror/class.h"
-#include "mirror/method_type.h"
#include "mirror/object.h"
#include "mirror/string.h"
#include "stack_map_stream.h"
@@ -84,24 +82,8 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
return jit_class_roots_.size();
}
- void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
- Handle<mirror::MethodType> method_type) {
- jit_method_type_roots_.Overwrite(proto_reference,
- reinterpret_cast64<uint64_t>(method_type.GetReference()));
- }
-
- uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference) const {
- return jit_method_type_roots_.Get(proto_reference);
- }
-
- size_t GetNumberOfJitMethodTypeRoots() const {
- return jit_method_type_roots_.size();
- }
-
size_t GetNumberOfJitRoots() const {
- return GetNumberOfJitStringRoots() +
- GetNumberOfJitClassRoots() +
- GetNumberOfJitMethodTypeRoots();
+ return GetNumberOfJitStringRoots() + GetNumberOfJitClassRoots();
}
void EmitJitRoots(/*out*/std::vector<Handle<mirror::Object>>* roots)
@@ -115,9 +97,7 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
jit_string_roots_(StringReferenceValueComparator(),
allocator_.Adapter(kArenaAllocCodeGenerator)),
jit_class_roots_(TypeReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)),
- jit_method_type_roots_(ProtoReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)) {
+ allocator_.Adapter(kArenaAllocCodeGenerator)) {
slow_paths_.reserve(kDefaultSlowPathsCapacity);
}
@@ -136,12 +116,6 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
// Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
// will compute all the indices.
ScopedArenaSafeMap<TypeReference, uint64_t, TypeReferenceValueComparator> jit_class_roots_;
-
- // Maps a ProtoReference (dex_file, proto_index) to the index in the literal table.
- // Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
- // will compute all the indices.
- ScopedArenaSafeMap<ProtoReference, uint64_t, ProtoReferenceValueComparator>
- jit_method_type_roots_;
};
} // namespace art
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 51714ef548..88bd818b0c 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -16,7 +16,6 @@
#include "code_generator.h"
#include "base/globals.h"
-#include "mirror/method_type.h"
#ifdef ART_ENABLE_CODEGEN_arm
#include "code_generator_arm_vixl.h"
@@ -210,23 +209,11 @@ uint64_t CodeGenerator::GetJitClassRootIndex(TypeReference type_reference) {
return code_generation_data_->GetJitClassRootIndex(type_reference);
}
-void CodeGenerator::ReserveJitMethodTypeRoot(ProtoReference proto_reference,
- Handle<mirror::MethodType> method_type) {
- DCHECK(code_generation_data_ != nullptr);
- code_generation_data_->ReserveJitMethodTypeRoot(proto_reference, method_type);
-}
-
-uint64_t CodeGenerator::GetJitMethodTypeRootIndex(ProtoReference proto_reference) {
- DCHECK(code_generation_data_ != nullptr);
- return code_generation_data_->GetJitMethodTypeRootIndex(proto_reference);
-}
-
void CodeGenerator::EmitJitRootPatches([[maybe_unused]] uint8_t* code,
[[maybe_unused]] const uint8_t* roots_data) {
DCHECK(code_generation_data_ != nullptr);
DCHECK_EQ(code_generation_data_->GetNumberOfJitStringRoots(), 0u);
DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u);
- DCHECK_EQ(code_generation_data_->GetNumberOfJitMethodTypeRoots(), 0u);
}
uint32_t CodeGenerator::GetArrayLengthOffset(HArrayLength* array_length) {
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 950bae5c8f..aec7b45a1a 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -29,12 +29,10 @@
#include "base/memory_region.h"
#include "base/pointer_size.h"
#include "class_root.h"
-#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "graph_visualizer.h"
#include "locations.h"
-#include "mirror/method_type.h"
#include "nodes.h"
#include "oat/oat_quick_method_header.h"
#include "optimizing_compiler_stats.h"
@@ -836,9 +834,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
uint64_t GetJitStringRootIndex(StringReference string_reference);
void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass);
uint64_t GetJitClassRootIndex(TypeReference type_reference);
- void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
- Handle<mirror::MethodType> method_type);
- uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference);
// Emit the patches assocatied with JIT roots. Only applies to JIT compiled code.
virtual void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index f61bb39ccc..e2b4344be9 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -35,7 +35,6 @@
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
-#include "mirror/method_type.h"
#include "mirror/object_reference.h"
#include "mirror/var_handle.h"
#include "optimizing/nodes.h"
@@ -1629,7 +1628,6 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- jit_method_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
}
@@ -6826,31 +6824,20 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* lo
codegen_->GenerateLoadMethodHandleRuntimeCall(load);
}
-Label* CodeGeneratorX86_64::NewJitRootMethodTypePatch(const DexFile& dex_file,
- dex::ProtoIndex proto_index,
- Handle<mirror::MethodType> handle) {
- ReserveJitMethodTypeRoot(ProtoReference(&dex_file, proto_index), handle);
- // Add a patch entry and return the label.
- jit_method_type_patches_.emplace_back(&dex_file, proto_index.index_);
- PatchInfo<Label>* info = &jit_method_type_patches_.back();
- return &info->label;
-}
-
void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
if (load->GetLoadKind() == HLoadMethodType::LoadKind::kRuntimeCall) {
- Location location = Location::RegisterLocation(RAX);
- CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
+ Location location = Location::RegisterLocation(RAX);
+ CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
} else {
+ DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kBssEntry);
locations->SetOut(Location::RequiresRegister());
- if (load->GetLoadKind() == HLoadMethodType::LoadKind::kBssEntry) {
- if (codegen_->EmitNonBakerReadBarrier()) {
- // For non-Baker read barrier we have a temp-clobbering call.
- } else {
- // Rely on the pResolveMethodType to save everything.
- locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
- }
+ if (codegen_->EmitNonBakerReadBarrier()) {
+ // For non-Baker read barrier we have a temp-clobbering call.
+ } else {
+ // Rely on the pResolveMethodType to save everything.
+ locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
}
}
}
@@ -6877,17 +6864,6 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load)
__ Bind(slow_path->GetExitLabel());
return;
}
- case HLoadMethodType::LoadKind::kJitTableAddress: {
- Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
- /* no_rip= */ true);
- Handle<mirror::MethodType> method_type = load->GetMethodType();
- DCHECK(method_type != nullptr);
- Label* fixup_label = codegen_->NewJitRootMethodTypePatch(
- load->GetDexFile(), load->GetProtoIndex(), method_type);
- GenerateGcRootFieldLoad(
- load, out_loc, address, fixup_label, codegen_->GetCompilerReadBarrierOption());
- return;
- }
default:
DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
codegen_->GenerateLoadMethodTypeRuntimeCall(load);
@@ -8567,12 +8543,6 @@ void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots
uint64_t index_in_table = GetJitClassRootIndex(type_reference);
PatchJitRootUse(code, roots_data, info, index_in_table);
}
-
- for (const PatchInfo<Label>& info : jit_method_type_patches_) {
- ProtoReference proto_reference(info.target_dex_file, dex::ProtoIndex(info.offset_or_index));
- uint64_t index_in_table = GetJitMethodTypeRootIndex(proto_reference);
- PatchJitRootUse(code, roots_data, info, index_in_table);
- }
}
bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index ddeb33a261..81c8ead32e 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -546,9 +546,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
Label* NewJitRootClassPatch(const DexFile& dex_file,
dex::TypeIndex type_index,
Handle<mirror::Class> handle);
- Label* NewJitRootMethodTypePatch(const DexFile& dex_file,
- dex::ProtoIndex proto_index,
- Handle<mirror::MethodType> method_type);
void LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference);
void LoadIntrinsicDeclaringClass(CpuRegister reg, HInvoke* invoke);
@@ -768,8 +765,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
// Patches for class literals in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_class_patches_;
- // Patches for method type in JIT compiled code.
- ArenaDeque<PatchInfo<Label>> jit_method_type_patches_;
// Fixups for jump tables need to be handled specially.
ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_;
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index c97c78ca17..3a64769a8b 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -2720,10 +2720,9 @@ void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint3
const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
HLoadMethodType* load_method_type =
new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
- HSharpening::ProcessLoadMethodType(load_method_type,
- code_generator_,
- *dex_compilation_unit_,
- graph_->GetHandleCache()->GetHandles());
+ if (!code_generator_->GetCompilerOptions().IsJitCompiler()) {
+ load_method_type->SetLoadKind(HLoadMethodType::LoadKind::kBssEntry);
+ }
AppendInstruction(load_method_type);
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 825134497d..1e3aca64db 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -7218,8 +7218,6 @@ class HLoadMethodType final : public HInstruction {
enum class LoadKind {
// Load from an entry in the .bss section using a PC-relative load.
kBssEntry,
- // Load from the root table associated with the JIT compiled method.
- kJitTableAddress,
// Load using a single runtime call.
kRuntimeCall,
@@ -7256,10 +7254,6 @@ class HLoadMethodType final : public HInstruction {
dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
- Handle<mirror::MethodType> GetMethodType() const { return method_type_; }
-
- void SetMethodType(Handle<mirror::MethodType> method_type) { method_type_ = method_type; }
-
const DexFile& GetDexFile() const { return dex_file_; }
static SideEffects SideEffectsForArchRuntimeCalls() {
@@ -7289,8 +7283,6 @@ class HLoadMethodType final : public HInstruction {
const dex::ProtoIndex proto_index_;
const DexFile& dex_file_;
-
- Handle<mirror::MethodType> method_type_;
};
std::ostream& operator<<(std::ostream& os, HLoadMethodType::LoadKind rhs);
@@ -7301,7 +7293,6 @@ inline void HLoadMethodType::SetLoadKind(LoadKind load_kind) {
DCHECK(GetBlock() == nullptr);
DCHECK(GetEnvironment() == nullptr);
DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
- DCHECK_IMPLIES(GetLoadKind() == LoadKind::kJitTableAddress, GetMethodType() != nullptr);
SetPackedField<LoadKindField>(load_kind);
}
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index 1b6a9fb601..cb94491b8e 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -471,45 +471,4 @@ void HSharpening::ProcessLoadString(
load_string->SetLoadKind(load_kind);
}
-void HSharpening::ProcessLoadMethodType(
- HLoadMethodType* load_method_type,
- CodeGenerator* codegen,
- const DexCompilationUnit& dex_compilation_unit,
- VariableSizedHandleScope* handles) {
- const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
-
- HLoadMethodType::LoadKind desired_load_kind = static_cast<HLoadMethodType::LoadKind>(-1);
-
- if (compiler_options.IsJitCompiler()) {
- DCHECK(!compiler_options.GetCompilePic());
- Runtime* runtime = Runtime::Current();
- ClassLinker* class_linker = runtime->GetClassLinker();
- ScopedObjectAccess soa(Thread::Current());
- ObjPtr<mirror::MethodType> method_type =
- class_linker->ResolveMethodType(Thread::Current(),
- load_method_type->GetProtoIndex(),
- dex_compilation_unit.GetDexCache(),
- dex_compilation_unit.GetClassLoader());
-
- if (method_type != nullptr) {
- load_method_type->SetMethodType(handles->NewHandle(method_type));
- desired_load_kind = HLoadMethodType::LoadKind::kJitTableAddress;
- } else {
- DCHECK_EQ(load_method_type->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
- desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
- Thread::Current()->ClearException();
- }
- } else {
- if (compiler_options.GetCompilePic()) {
- desired_load_kind = HLoadMethodType::LoadKind::kBssEntry;
- } else {
- // Test configuration, do not sharpen.
- desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
- }
- }
-
- DCHECK_NE(desired_load_kind, static_cast<HLoadMethodType::LoadKind>(-1));
- load_method_type->SetLoadKind(desired_load_kind);
-}
-
} // namespace art
diff --git a/compiler/optimizing/sharpening.h b/compiler/optimizing/sharpening.h
index 88d3b2f604..6dfe904f27 100644
--- a/compiler/optimizing/sharpening.h
+++ b/compiler/optimizing/sharpening.h
@@ -27,7 +27,7 @@ class CodeGenerator;
class DexCompilationUnit;
// Utility methods that try to improve the way we dispatch methods, and access
-// types, strings and method types.
+// types and strings.
class HSharpening {
public:
// Used by the builder and InstructionSimplifier.
@@ -54,12 +54,6 @@ class HSharpening {
CodeGenerator* codegen,
const DexCompilationUnit& dex_compilation_unit,
VariableSizedHandleScope* handles);
-
- // Used by the builder.
- static void ProcessLoadMethodType(HLoadMethodType* load_method_type,
- CodeGenerator* codegen,
- const DexCompilationUnit& dex_compilation_unit,
- VariableSizedHandleScope* handles);
};
} // namespace art
diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc
index 0dd18b503d..4d395c8528 100644
--- a/runtime/jit/jit.cc
+++ b/runtime/jit/jit.cc
@@ -37,7 +37,7 @@
#include "gc/space/image_space.h"
#include "interpreter/interpreter.h"
#include "jit-inl.h"
-#include "jit_code_cache-inl.h"
+#include "jit_code_cache.h"
#include "jit_create.h"
#include "jni/java_vm_ext.h"
#include "mirror/method_handle_impl.h"
@@ -1883,11 +1883,6 @@ void Jit::VisitRoots(RootVisitor* visitor) {
if (thread_pool_ != nullptr) {
thread_pool_->VisitRoots(visitor);
}
-
- // MethodType-s are weakly interned, but a MethodType can be referenced from JIT-ted code. We
- // visit JitCodeCache to treat such MethodType-s as strongly reachable.
- UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
- code_cache_->VisitRootTables(root_visitor);
}
void JitThreadPool::VisitRoots(RootVisitor* visitor) {
diff --git a/runtime/jit/jit_code_cache-inl.h b/runtime/jit/jit_code_cache-inl.h
deleted file mode 100644
index 5d5354a087..0000000000
--- a/runtime/jit/jit_code_cache-inl.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
-#define ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
-
-#include "gc_root.h"
-#include "jit/jit_code_cache.h"
-#include "thread.h"
-
-namespace art HIDDEN {
-
-class ArtMethod;
-
-namespace jit {
-
-template<typename RootVisitorType>
-EXPORT void JitCodeCache::VisitRootTables(RootVisitorType& visitor) {
- Thread* self = Thread::Current();
- ScopedDebugDisallowReadBarriers sddrb(self);
- MutexLock mu(self, *Locks::jit_lock_);
-
- for (auto& [_, method_types] : method_types_map_) {
- for (auto& method_type : method_types) {
- visitor.VisitRoot(method_type.AddressWithoutBarrier());
- }
- }
-}
-
-} // namespace jit
-} // namespace art
-
-#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
-
-
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 5f0bb4e719..4b69dc5c01 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -50,7 +50,6 @@
#include "jit/profiling_info.h"
#include "jit/jit_scoped_code_cache_write.h"
#include "linear_alloc.h"
-#include "mirror/method_type.h"
#include "oat/oat_file-inl.h"
#include "oat/oat_quick_method_header.h"
#include "object_callbacks.h"
@@ -60,7 +59,6 @@
#include "thread-current-inl.h"
#include "thread-inl.h"
#include "thread_list.h"
-#include "well_known_classes-inl.h"
namespace art HIDDEN {
namespace jit {
@@ -438,30 +436,16 @@ void JitCodeCache::SweepRootTables(IsMarkedVisitor* visitor) {
if (new_object != object) {
roots[i] = GcRoot<mirror::Object>(new_object);
}
- } else if (object->IsClass<kDefaultVerifyFlags>()) {
+ } else {
mirror::Object* new_klass = visitor->IsMarked(object);
if (new_klass == nullptr) {
roots[i] = GcRoot<mirror::Object>(Runtime::GetWeakClassSentinel());
} else if (new_klass != object) {
roots[i] = GcRoot<mirror::Object>(new_klass);
}
- } else {
- mirror::Object* new_method_type = visitor->IsMarked(object);
-
- // The MethodType have been visited during VisitConcurrentRoots, so they must be live.
- DCHECK_NE(new_method_type, nullptr) << "old-method-type" << object;
- ObjPtr<mirror::Class> method_type_class =
- WellKnownClasses::java_lang_invoke_MethodType.Get<kWithoutReadBarrier>();
- DCHECK_EQ((new_method_type->GetClass<kVerifyNone, kWithoutReadBarrier>()),
- method_type_class.Ptr());
-
- if (new_method_type != object) {
- roots[i] = GcRoot<mirror::Object>(new_method_type);
- }
}
}
}
-
// Walk over inline caches to clear entries containing unloaded classes.
for (const auto& [_, info] : profiling_infos_) {
InlineCache* caches = info->GetInlineCaches();
@@ -583,7 +567,6 @@ void JitCodeCache::RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) {
if (alloc.ContainsUnsafe(it->second)) {
method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->first));
VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
- method_types_map_.erase(it->first);
zombie_code_.erase(it->first);
processed_zombie_code_.erase(it->first);
it = method_code_map_.erase(it);
@@ -772,27 +755,6 @@ bool JitCodeCache::Commit(Thread* self,
} else {
ScopedDebugDisallowReadBarriers sddrb(self);
method_code_map_.Put(code_ptr, method);
-
- // Searching for MethodType-s in roots. They need to be treated as strongly reachable while
- // the corresponding compiled code is not removed.
- ObjPtr<mirror::Class> method_type_class =
- WellKnownClasses::java_lang_invoke_MethodType.Get<kWithoutReadBarrier>();
-
- auto method_types_in_roots = std::vector<GcRoot<mirror::MethodType>>();
-
- for (auto root : roots) {
- ObjPtr<mirror::Class> klass = root->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
- if (klass == method_type_class ||
- klass == ReadBarrier::IsMarked(method_type_class.Ptr()) ||
- ReadBarrier::IsMarked(klass.Ptr()) == method_type_class) {
- ObjPtr<mirror::MethodType> mt = ObjPtr<mirror::MethodType>::DownCast(root.Get());
- method_types_in_roots.emplace_back(GcRoot(mt));
- }
- }
-
- if (!method_types_in_roots.empty()) {
- method_types_map_.Put(code_ptr, method_types_in_roots);
- }
}
if (compilation_kind == CompilationKind::kOsr) {
ScopedDebugDisallowReadBarriers sddrb(self);
@@ -892,7 +854,6 @@ bool JitCodeCache::RemoveMethodLocked(ArtMethod* method, bool release_memory) {
FreeCodeAndData(it->first);
}
VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
- method_types_map_.erase(it->first);
it = method_code_map_.erase(it);
} else {
++it;
@@ -1147,7 +1108,6 @@ void JitCodeCache::RemoveUnmarkedCode(Thread* self) {
} else {
OatQuickMethodHeader* header = OatQuickMethodHeader::FromCodePointer(code_ptr);
method_headers.insert(header);
- method_types_map_.erase(header->GetCode());
method_code_map_.erase(header->GetCode());
VLOG(jit) << "JIT removed " << *it;
it = processed_zombie_code_.erase(it);
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 6ca79d560f..3dd57121ca 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -356,10 +356,6 @@ class JitCodeCache {
bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
- // Visit GC roots (except j.l.Class and j.l.String) held by JIT-ed code.
- template<typename RootVisitorType>
- EXPORT void VisitRootTables(RootVisitorType& visitor) NO_THREAD_SAFETY_ANALYSIS;
-
void SweepRootTables(IsMarkedVisitor* visitor)
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -553,11 +549,6 @@ class JitCodeCache {
// Holds compiled code associated to the ArtMethod.
SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
- // MethodType-s referenced by a compiled code. A subset of method_code_map_ used to treat a
- // MethodType as strongly reachable from the corresponding code.
- SafeMap<const void*, std::vector<GcRoot<mirror::MethodType>>> method_types_map_
- GUARDED_BY(Locks::jit_lock_);
-
// Holds compiled code associated to the ArtMethod. Used when pre-jitting
// methods whose entrypoints have the resolution stub.
SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_);
diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h
index f29daad204..bd8bbe0108 100644
--- a/runtime/well_known_classes.h
+++ b/runtime/well_known_classes.h
@@ -251,8 +251,6 @@ struct EXPORT WellKnownClasses {
java_lang_StackOverflowError;
static constexpr ClassFromField<&java_lang_Thread_daemon> java_lang_Thread;
static constexpr ClassFromField<&java_lang_ThreadGroup_groups> java_lang_ThreadGroup;
- static constexpr ClassFromMethod<&java_lang_invoke_MethodType_makeImpl>
- java_lang_invoke_MethodType;
static constexpr ClassFromMethod<&java_lang_reflect_InvocationTargetException_init>
java_lang_reflect_InvocationTargetException;
static constexpr ClassFromMethod<&java_lang_reflect_Parameter_init>
diff --git a/test/979-const-method-handle/src/Main.java b/test/979-const-method-handle/src/Main.java
index 17d5d91c50..72d529b68b 100644
--- a/test/979-const-method-handle/src/Main.java
+++ b/test/979-const-method-handle/src/Main.java
@@ -29,7 +29,7 @@ class Main {
* Number of iterations run to attempt to trigger JIT compilation. These tests run on ART and
* the RI so they iterate rather than using the ART only native method ensureJitCompiled().
*/
- private static final int ITERATIONS_FOR_JIT = 30000;
+ private static final int ITERATIONS_FOR_JIT = 12000;
/** A static field updated by method handle getters and setters. */
private static String name = "default";