summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2024-04-11 16:13:45 +0000
committer Treehugger Robot <android-test-infra-autosubmit@system.gserviceaccount.com> 2024-04-11 17:52:09 +0000
commit69c9ea4f93a688ff50e08060be37bcfd3f3e9910 (patch)
tree2622c38549bc219b7d2dc7c990eb83a30cc6b571
parente8da7cd1d0e7d3535c82f8d05adcef3edd43cd40 (diff)
Revert "x86_64: Add JIT support for LoadMethodType."
This reverts commit 53ca944020bb86199f6f80d8594d5deb1b1d46dd. Bug: 297147201 Reason for revert: Crash on bot Change-Id: Ibf3b53a8fe67aa633686990881a96acb783af9a3
-rw-r--r--compiler/optimizing/code_generation_data.cc11
-rw-r--r--compiler/optimizing/code_generation_data.h30
-rw-r--r--compiler/optimizing/code_generator.cc13
-rw-r--r--compiler/optimizing/code_generator.h5
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc46
-rw-r--r--compiler/optimizing/code_generator_x86_64.h5
-rw-r--r--compiler/optimizing/instruction_builder.cc7
-rw-r--r--compiler/optimizing/nodes.h9
-rw-r--r--compiler/optimizing/sharpening.cc41
-rw-r--r--compiler/optimizing/sharpening.h8
-rw-r--r--runtime/art_method-inl.h7
-rw-r--r--runtime/jit/jit_code_cache-inl.h82
-rw-r--r--runtime/jit/jit_code_cache.cc113
-rw-r--r--runtime/jit/jit_code_cache.h11
-rw-r--r--runtime/well_known_classes.h2
15 files changed, 53 insertions, 337 deletions
diff --git a/compiler/optimizing/code_generation_data.cc b/compiler/optimizing/code_generation_data.cc
index afc4f62f0f..7b23d46dc5 100644
--- a/compiler/optimizing/code_generation_data.cc
+++ b/compiler/optimizing/code_generation_data.cc
@@ -20,7 +20,6 @@
#include "intern_table.h"
#include "mirror/object-inl.h"
#include "runtime.h"
-#include "well_known_classes-inl.h"
namespace art HIDDEN {
@@ -53,16 +52,6 @@ void CodeGenerationData::EmitJitRoots(
entry.second = index;
++index;
}
- for (auto& entry : jit_method_type_roots_) {
- // Update the `roots` with the MethodType, and replace the address temporarily
- // stored to the index in the table.
- uint64_t address = entry.second;
- roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
- DCHECK(roots->back() != nullptr);
- DCHECK(roots->back()->InstanceOf(WellKnownClasses::java_lang_invoke_MethodType.Get()));
- entry.second = index;
- ++index;
- }
}
} // namespace art
diff --git a/compiler/optimizing/code_generation_data.h b/compiler/optimizing/code_generation_data.h
index 0d4db66ab4..e78ba8f574 100644
--- a/compiler/optimizing/code_generation_data.h
+++ b/compiler/optimizing/code_generation_data.h
@@ -23,12 +23,10 @@
#include "base/scoped_arena_allocator.h"
#include "base/scoped_arena_containers.h"
#include "code_generator.h"
-#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "handle.h"
#include "mirror/class.h"
-#include "mirror/method_type.h"
#include "mirror/object.h"
#include "mirror/string.h"
#include "stack_map_stream.h"
@@ -84,24 +82,8 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
return jit_class_roots_.size();
}
- void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
- Handle<mirror::MethodType> method_type) {
- jit_method_type_roots_.Overwrite(proto_reference,
- reinterpret_cast64<uint64_t>(method_type.GetReference()));
- }
-
- uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference) const {
- return jit_method_type_roots_.Get(proto_reference);
- }
-
- size_t GetNumberOfJitMethodTypeRoots() const {
- return jit_method_type_roots_.size();
- }
-
size_t GetNumberOfJitRoots() const {
- return GetNumberOfJitStringRoots() +
- GetNumberOfJitClassRoots() +
- GetNumberOfJitMethodTypeRoots();
+ return GetNumberOfJitStringRoots() + GetNumberOfJitClassRoots();
}
void EmitJitRoots(/*out*/std::vector<Handle<mirror::Object>>* roots)
@@ -115,9 +97,7 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
jit_string_roots_(StringReferenceValueComparator(),
allocator_.Adapter(kArenaAllocCodeGenerator)),
jit_class_roots_(TypeReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)),
- jit_method_type_roots_(ProtoReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)) {
+ allocator_.Adapter(kArenaAllocCodeGenerator)) {
slow_paths_.reserve(kDefaultSlowPathsCapacity);
}
@@ -136,12 +116,6 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
// Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
// will compute all the indices.
ScopedArenaSafeMap<TypeReference, uint64_t, TypeReferenceValueComparator> jit_class_roots_;
-
- // Maps a ProtoReference (dex_file, proto_index) to the index in the literal table.
- // Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
- // will compute all the indices.
- ScopedArenaSafeMap<ProtoReference, uint64_t, ProtoReferenceValueComparator>
- jit_method_type_roots_;
};
} // namespace art
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 51714ef548..88bd818b0c 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -16,7 +16,6 @@
#include "code_generator.h"
#include "base/globals.h"
-#include "mirror/method_type.h"
#ifdef ART_ENABLE_CODEGEN_arm
#include "code_generator_arm_vixl.h"
@@ -210,23 +209,11 @@ uint64_t CodeGenerator::GetJitClassRootIndex(TypeReference type_reference) {
return code_generation_data_->GetJitClassRootIndex(type_reference);
}
-void CodeGenerator::ReserveJitMethodTypeRoot(ProtoReference proto_reference,
- Handle<mirror::MethodType> method_type) {
- DCHECK(code_generation_data_ != nullptr);
- code_generation_data_->ReserveJitMethodTypeRoot(proto_reference, method_type);
-}
-
-uint64_t CodeGenerator::GetJitMethodTypeRootIndex(ProtoReference proto_reference) {
- DCHECK(code_generation_data_ != nullptr);
- return code_generation_data_->GetJitMethodTypeRootIndex(proto_reference);
-}
-
void CodeGenerator::EmitJitRootPatches([[maybe_unused]] uint8_t* code,
[[maybe_unused]] const uint8_t* roots_data) {
DCHECK(code_generation_data_ != nullptr);
DCHECK_EQ(code_generation_data_->GetNumberOfJitStringRoots(), 0u);
DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u);
- DCHECK_EQ(code_generation_data_->GetNumberOfJitMethodTypeRoots(), 0u);
}
uint32_t CodeGenerator::GetArrayLengthOffset(HArrayLength* array_length) {
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 95e73d699f..94831cab9f 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -29,12 +29,10 @@
#include "base/memory_region.h"
#include "base/pointer_size.h"
#include "class_root.h"
-#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "graph_visualizer.h"
#include "locations.h"
-#include "mirror/method_type.h"
#include "nodes.h"
#include "oat/oat_quick_method_header.h"
#include "optimizing_compiler_stats.h"
@@ -834,9 +832,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
uint64_t GetJitStringRootIndex(StringReference string_reference);
void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass);
uint64_t GetJitClassRootIndex(TypeReference type_reference);
- void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
- Handle<mirror::MethodType> method_type);
- uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference);
// Emit the patches assocatied with JIT roots. Only applies to JIT compiled code.
virtual void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 4d74a29d40..94ec6accff 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -35,7 +35,6 @@
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
-#include "mirror/method_type.h"
#include "mirror/object_reference.h"
#include "mirror/var_handle.h"
#include "optimizing/nodes.h"
@@ -1618,7 +1617,6 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- jit_method_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
}
@@ -6804,31 +6802,20 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* lo
codegen_->GenerateLoadMethodHandleRuntimeCall(load);
}
-Label* CodeGeneratorX86_64::NewJitRootMethodTypePatch(const DexFile& dex_file,
- dex::ProtoIndex proto_index,
- Handle<mirror::MethodType> handle) {
- ReserveJitMethodTypeRoot(ProtoReference(&dex_file, proto_index), handle);
- // Add a patch entry and return the label.
- jit_method_type_patches_.emplace_back(&dex_file, proto_index.index_);
- PatchInfo<Label>* info = &jit_method_type_patches_.back();
- return &info->label;
-}
-
void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
if (load->GetLoadKind() == HLoadMethodType::LoadKind::kRuntimeCall) {
- Location location = Location::RegisterLocation(RAX);
- CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
+ Location location = Location::RegisterLocation(RAX);
+ CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
} else {
+ DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kBssEntry);
locations->SetOut(Location::RequiresRegister());
- if (load->GetLoadKind() == HLoadMethodType::LoadKind::kBssEntry) {
- if (codegen_->EmitNonBakerReadBarrier()) {
- // For non-Baker read barrier we have a temp-clobbering call.
- } else {
- // Rely on the pResolveMethodType to save everything.
- locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
- }
+ if (codegen_->EmitNonBakerReadBarrier()) {
+ // For non-Baker read barrier we have a temp-clobbering call.
+ } else {
+ // Rely on the pResolveMethodType to save everything.
+ locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
}
}
}
@@ -6855,17 +6842,6 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load)
__ Bind(slow_path->GetExitLabel());
return;
}
- case HLoadMethodType::LoadKind::kJitTableAddress: {
- Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
- /* no_rip= */ true);
- Handle<mirror::MethodType> method_type = load->GetMethodType();
- DCHECK(method_type != nullptr);
- Label* fixup_label = codegen_->NewJitRootMethodTypePatch(
- load->GetDexFile(), load->GetProtoIndex(), method_type);
- GenerateGcRootFieldLoad(
- load, out_loc, address, fixup_label, codegen_->GetCompilerReadBarrierOption());
- return;
- }
default:
DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
codegen_->GenerateLoadMethodTypeRuntimeCall(load);
@@ -8482,12 +8458,6 @@ void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots
uint64_t index_in_table = GetJitClassRootIndex(type_reference);
PatchJitRootUse(code, roots_data, info, index_in_table);
}
-
- for (const PatchInfo<Label>& info : jit_method_type_patches_) {
- ProtoReference proto_reference(info.target_dex_file, dex::ProtoIndex(info.offset_or_index));
- uint64_t index_in_table = GetJitMethodTypeRootIndex(proto_reference);
- PatchJitRootUse(code, roots_data, info, index_in_table);
- }
}
bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 0cb34858f9..b8e2456381 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -543,9 +543,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
Label* NewJitRootClassPatch(const DexFile& dex_file,
dex::TypeIndex type_index,
Handle<mirror::Class> handle);
- Label* NewJitRootMethodTypePatch(const DexFile& dex_file,
- dex::ProtoIndex proto_index,
- Handle<mirror::MethodType> method_type);
void LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference);
void LoadIntrinsicDeclaringClass(CpuRegister reg, HInvoke* invoke);
@@ -763,8 +760,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
// Patches for class literals in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_class_patches_;
- // Patches for method type in JIT compiled code.
- ArenaDeque<PatchInfo<Label>> jit_method_type_patches_;
// Fixups for jump tables need to be handled specially.
ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_;
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 3e9c42ba70..81970d2108 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -2697,10 +2697,9 @@ void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint3
const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
HLoadMethodType* load_method_type =
new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
- HSharpening::ProcessLoadMethodType(load_method_type,
- code_generator_,
- *dex_compilation_unit_,
- graph_->GetHandleCache()->GetHandles());
+ if (!code_generator_->GetCompilerOptions().IsJitCompiler()) {
+ load_method_type->SetLoadKind(HLoadMethodType::LoadKind::kBssEntry);
+ }
AppendInstruction(load_method_type);
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index d9df752e92..37d17478ff 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -7208,8 +7208,6 @@ class HLoadMethodType final : public HInstruction {
enum class LoadKind {
// Load from an entry in the .bss section using a PC-relative load.
kBssEntry,
- // Load from the root table associated with the JIT compiled method.
- kJitTableAddress,
// Load using a single runtime call.
kRuntimeCall,
@@ -7246,10 +7244,6 @@ class HLoadMethodType final : public HInstruction {
dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
- Handle<mirror::MethodType> GetMethodType() const { return method_type_; }
-
- void SetMethodType(Handle<mirror::MethodType> method_type) { method_type_ = method_type; }
-
const DexFile& GetDexFile() const { return dex_file_; }
static SideEffects SideEffectsForArchRuntimeCalls() {
@@ -7279,8 +7273,6 @@ class HLoadMethodType final : public HInstruction {
const dex::ProtoIndex proto_index_;
const DexFile& dex_file_;
-
- Handle<mirror::MethodType> method_type_;
};
std::ostream& operator<<(std::ostream& os, HLoadMethodType::LoadKind rhs);
@@ -7291,7 +7283,6 @@ inline void HLoadMethodType::SetLoadKind(LoadKind load_kind) {
DCHECK(GetBlock() == nullptr);
DCHECK(GetEnvironment() == nullptr);
DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
- DCHECK_IMPLIES(GetLoadKind() == LoadKind::kJitTableAddress, GetMethodType() != nullptr);
SetPackedField<LoadKindField>(load_kind);
}
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index e900b3ef3a..a6f86ff496 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -468,45 +468,4 @@ void HSharpening::ProcessLoadString(
load_string->SetLoadKind(load_kind);
}
-void HSharpening::ProcessLoadMethodType(
- HLoadMethodType* load_method_type,
- CodeGenerator* codegen,
- const DexCompilationUnit& dex_compilation_unit,
- VariableSizedHandleScope* handles) {
- const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
-
- HLoadMethodType::LoadKind desired_load_kind = static_cast<HLoadMethodType::LoadKind>(-1);
-
- if (compiler_options.IsJitCompiler()) {
- DCHECK(!compiler_options.GetCompilePic());
- Runtime* runtime = Runtime::Current();
- ClassLinker* class_linker = runtime->GetClassLinker();
- ScopedObjectAccess soa(Thread::Current());
- ObjPtr<mirror::MethodType> method_type =
- class_linker->ResolveMethodType(Thread::Current(),
- load_method_type->GetProtoIndex(),
- dex_compilation_unit.GetDexCache(),
- dex_compilation_unit.GetClassLoader());
-
- if (method_type != nullptr) {
- load_method_type->SetMethodType(handles->NewHandle(method_type));
- desired_load_kind = HLoadMethodType::LoadKind::kJitTableAddress;
- } else {
- DCHECK_EQ(load_method_type->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
- desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
- Thread::Current()->ClearException();
- }
- } else {
- if (compiler_options.GetCompilePic()) {
- desired_load_kind = HLoadMethodType::LoadKind::kBssEntry;
- } else {
- // Test configuration, do not sharpen.
- desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
- }
- }
-
- DCHECK_NE(desired_load_kind, static_cast<HLoadMethodType::LoadKind>(-1));
- load_method_type->SetLoadKind(desired_load_kind);
-}
-
} // namespace art
diff --git a/compiler/optimizing/sharpening.h b/compiler/optimizing/sharpening.h
index 88d3b2f604..6dfe904f27 100644
--- a/compiler/optimizing/sharpening.h
+++ b/compiler/optimizing/sharpening.h
@@ -27,7 +27,7 @@ class CodeGenerator;
class DexCompilationUnit;
// Utility methods that try to improve the way we dispatch methods, and access
-// types, strings and method types.
+// types and strings.
class HSharpening {
public:
// Used by the builder and InstructionSimplifier.
@@ -54,12 +54,6 @@ class HSharpening {
CodeGenerator* codegen,
const DexCompilationUnit& dex_compilation_unit,
VariableSizedHandleScope* handles);
-
- // Used by the builder.
- static void ProcessLoadMethodType(HLoadMethodType* load_method_type,
- CodeGenerator* codegen,
- const DexCompilationUnit& dex_compilation_unit,
- VariableSizedHandleScope* handles);
};
} // namespace art
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 024272a10d..5e31667509 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -31,8 +31,6 @@
#include "dex/signature.h"
#include "gc_root-inl.h"
#include "imtable-inl.h"
-#include "jit/jit.h"
-#include "jit/jit_code_cache-inl.h"
#include "jit/jit_options.h"
#include "mirror/class-inl.h"
#include "mirror/dex_cache-inl.h"
@@ -621,11 +619,6 @@ void ArtMethod::VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) {
}
}
}
-
- Runtime* runtime = Runtime::Current();
- if (runtime->GetJit() != nullptr) {
- runtime->GetJit()->GetCodeCache()->VisitRootTables(this, visitor);
- }
}
template<typename RootVisitorType>
diff --git a/runtime/jit/jit_code_cache-inl.h b/runtime/jit/jit_code_cache-inl.h
deleted file mode 100644
index 6f2a9ddaae..0000000000
--- a/runtime/jit/jit_code_cache-inl.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
-#define ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
-
-#include "jit/jit_code_cache.h"
-
-#include "base/macros.h"
-#include "read_barrier.h"
-#include "thread.h"
-#include "well_known_classes-inl.h"
-
-namespace art HIDDEN {
-
-class ArtMethod;
-
-namespace jit {
-
-template<typename RootVisitorType>
-EXPORT void JitCodeCache::VisitRootTables(ArtMethod* method, RootVisitorType& visitor) {
- if (method->IsNative()) {
- return;
- }
-
- Thread* self = Thread::Current();
- ScopedDebugDisallowReadBarriers sddrb(self);
- MutexLock mu(self, *Locks::jit_lock_);
-
- auto range = method_code_map_reversed_.equal_range(method);
-
- for (auto it = range.first; it != range.second; ++it) {
- uint32_t number_of_roots = 0;
- const uint8_t* root_table = GetRootTable(it->second, &number_of_roots);
- uint8_t* roots_data = private_region_.IsInDataSpace(root_table)
- ? private_region_.GetWritableDataAddress(root_table)
- : shared_region_.GetWritableDataAddress(root_table);
- GcRoot<mirror::Object>* roots = reinterpret_cast<GcRoot<mirror::Object>*>(roots_data);
- for (uint32_t i = 0; i < number_of_roots; ++i) {
- // This does not need a read barrier because this is called by GC.
- mirror::Object* object = roots[i].Read<kWithoutReadBarrier>();
- if (!(object == nullptr ||
- object == Runtime::GetWeakClassSentinel() ||
- object->IsString<kDefaultVerifyFlags>() ||
- object->IsClass<kDefaultVerifyFlags>())) {
- // We don't need to visit j.l.Class and j.l.String and the only remaining possible
- // objects are MethodType-s.
- if (kIsDebugBuild) {
- ObjPtr<mirror::Class> method_type_class =
- WellKnownClasses::java_lang_invoke_MethodType.Get<kWithoutReadBarrier>();
- ObjPtr<mirror::Class> klass =
- object->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
- CHECK(klass == method_type_class ||
- klass == ReadBarrier::IsMarked(method_type_class.Ptr()) ||
- ReadBarrier::IsMarked(klass.Ptr()) == method_type_class);
- }
-
- visitor.VisitRoot(roots[i].AddressWithoutBarrier());
- }
- }
- }
-}
-
-} // namespace jit
-} // namespace art
-
-#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_INL_H_
-
-
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 9bf14b5038..7ea9efb0f9 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -59,7 +59,6 @@
#include "thread-current-inl.h"
#include "thread-inl.h"
#include "thread_list.h"
-#include "well_known_classes-inl.h"
namespace art HIDDEN {
namespace jit {
@@ -304,8 +303,10 @@ bool JitCodeCache::ContainsMethod(ArtMethod* method) {
return true;
}
} else {
- if (method_code_map_reversed_.find(method) != method_code_map_reversed_.end()) {
- return true;
+ for (const auto& it : method_code_map_) {
+ if (it.second == method) {
+ return true;
+ }
}
if (zygote_map_.ContainsMethod(method)) {
return true;
@@ -397,6 +398,16 @@ static void DCheckRootsAreValid(const std::vector<Handle<mirror::Object>>& roots
}
}
+static const uint8_t* GetRootTable(const void* code_ptr, uint32_t* number_of_roots = nullptr) {
+ OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(code_ptr);
+ uint8_t* data = method_header->GetOptimizedCodeInfoPtr();
+ uint32_t roots = GetNumberOfRoots(data);
+ if (number_of_roots != nullptr) {
+ *number_of_roots = roots;
+ }
+ return data - ComputeRootTableSize(roots);
+}
+
void JitCodeCache::SweepRootTables(IsMarkedVisitor* visitor) {
Thread* self = Thread::Current();
ScopedDebugDisallowReadBarriers sddrb(self);
@@ -423,27 +434,13 @@ void JitCodeCache::SweepRootTables(IsMarkedVisitor* visitor) {
if (new_object != object) {
roots[i] = GcRoot<mirror::Object>(new_object);
}
- } else if (object->IsClass<kDefaultVerifyFlags>()) {
+ } else {
mirror::Object* new_klass = visitor->IsMarked(object);
if (new_klass == nullptr) {
roots[i] = GcRoot<mirror::Object>(Runtime::GetWeakClassSentinel());
} else if (new_klass != object) {
roots[i] = GcRoot<mirror::Object>(new_klass);
}
- } else {
- mirror::Object* new_method_type = visitor->IsMarked(object);
- if (new_method_type != nullptr) {
- ObjPtr<mirror::Class> method_type_class =
- WellKnownClasses::java_lang_invoke_MethodType.Get<kWithoutReadBarrier>();
- DCHECK_EQ((new_method_type->GetClass<kVerifyNone, kWithoutReadBarrier>()),
- method_type_class.Ptr());
-
- if (new_method_type != object) {
- roots[i] = GcRoot<mirror::Object>(new_method_type);
- }
- } else {
- roots[i] = nullptr;
- }
}
}
}
@@ -549,16 +546,11 @@ void JitCodeCache::RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) {
++it;
}
}
- for (auto it = method_code_map_reversed_.begin(); it != method_code_map_reversed_.end();) {
- const void* code_ptr = it->second;
- ArtMethod* method = it->first;
- if (alloc.ContainsUnsafe(method)) {
- method_headers.insert(OatQuickMethodHeader::FromCodePointer(code_ptr));
- VLOG(jit) << "JIT removed " << method->PrettyMethod() << ": " << code_ptr;
-
- DCHECK_EQ(method_code_map_.count(code_ptr), 1u);
- method_code_map_.erase(code_ptr);
- it = method_code_map_reversed_.erase(it);
+ for (auto it = method_code_map_.begin(); it != method_code_map_.end();) {
+ if (alloc.ContainsUnsafe(it->second)) {
+ method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->first));
+ VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
+ it = method_code_map_.erase(it);
} else {
++it;
}
@@ -603,16 +595,6 @@ void JitCodeCache::WaitUntilInlineCacheAccessible(Thread* self) {
}
}
-const uint8_t* JitCodeCache::GetRootTable(const void* code_ptr, uint32_t* number_of_roots) {
- OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(code_ptr);
- uint8_t* data = method_header->GetOptimizedCodeInfoPtr();
- uint32_t num_roots = GetNumberOfRoots(data);
- if (number_of_roots != nullptr) {
- *number_of_roots = num_roots;
- }
- return data - ComputeRootTableSize(num_roots);
-}
-
void JitCodeCache::BroadcastForInlineCacheAccess() {
Thread* self = Thread::Current();
MutexLock mu(self, *Locks::jit_lock_);
@@ -781,7 +763,6 @@ bool JitCodeCache::Commit(Thread* self,
} else {
ScopedDebugDisallowReadBarriers sddrb(self);
method_code_map_.Put(code_ptr, method);
- method_code_map_reversed_.emplace(method, code_ptr);
}
if (compilation_kind == CompilationKind::kOsr) {
ScopedDebugDisallowReadBarriers sddrb(self);
@@ -879,16 +860,17 @@ bool JitCodeCache::RemoveMethodLocked(ArtMethod* method, bool release_memory) {
}
}
} else {
- auto range = method_code_map_reversed_.equal_range(method);
- for (auto it = range.first; it != range.second;) {
- in_cache = true;
- if (release_memory) {
- FreeCodeAndData(it->second);
+ for (auto it = method_code_map_.begin(); it != method_code_map_.end();) {
+ if (it->second == method) {
+ in_cache = true;
+ if (release_memory) {
+ FreeCodeAndData(it->first);
+ }
+ VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
+ it = method_code_map_.erase(it);
+ } else {
+ ++it;
}
- VLOG(jit) << "JIT removed " << it->first->PrettyMethod() << ": " << it->second;
- DCHECK_EQ(method_code_map_.Get(it->second), it->first);
- method_code_map_.erase(it->second);
- it = method_code_map_reversed_.erase(it);
}
auto osr_it = osr_code_map_.find(method);
@@ -928,26 +910,12 @@ void JitCodeCache::MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_meth
}
return;
}
- // Update method_code_map_ and method_code_map_reversed_ to point to the new method.
- auto range = method_code_map_reversed_.equal_range(old_method);
- std::multimap<ArtMethod*, const void*> remapped_code_ptrs;
- for (auto it = range.first; it != range.second;) {
- const void* code_ptr = it->second;
-
- auto next = std::next(it);
- auto node = method_code_map_reversed_.extract(it);
- node.key() = new_method;
- remapped_code_ptrs.insert(std::move(node));
-
- DCHECK_EQ(method_code_map_.count(code_ptr), 1u);
- method_code_map_.find(code_ptr)->second = new_method;
-
- it = next;
+ // Update method_code_map_ to point to the new method.
+ for (auto& it : method_code_map_) {
+ if (it.second == old_method) {
+ it.second = new_method;
+ }
}
-
- DCHECK_EQ(method_code_map_reversed_.count(old_method), 0u);
- method_code_map_reversed_.merge(remapped_code_ptrs);
-
// Update osr_code_map_ to point to the new method.
auto code_map = osr_code_map_.find(old_method);
if (code_map != osr_code_map_.end()) {
@@ -1202,19 +1170,16 @@ void JitCodeCache::RemoveUnmarkedCode(Thread* self) {
it = jni_stubs_map_.erase(it);
}
}
- for (auto it = method_code_map_reversed_.begin(); it != method_code_map_reversed_.end();) {
- const void* code_ptr = it->second;
+ for (auto it = method_code_map_.begin(); it != method_code_map_.end();) {
+ const void* code_ptr = it->first;
uintptr_t allocation = FromCodeToAllocation(code_ptr);
if (IsInZygoteExecSpace(code_ptr) || GetLiveBitmap()->Test(allocation)) {
++it;
} else {
OatQuickMethodHeader* header = OatQuickMethodHeader::FromCodePointer(code_ptr);
method_headers.insert(header);
- VLOG(jit) << "JIT removed " << it->first->PrettyMethod() << ": " << code_ptr;
-
- DCHECK_EQ(method_code_map_.count(code_ptr), 1u);
- method_code_map_.erase(code_ptr);
- it = method_code_map_reversed_.erase(it);
+ VLOG(jit) << "JIT removed " << it->second->PrettyMethod() << ": " << it->first;
+ it = method_code_map_.erase(it);
}
}
FreeAllMethodHeaders(method_headers);
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index ed242b215e..96fc7e2706 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -25,7 +25,6 @@
#include <unordered_set>
#include <vector>
-#include "android-base/thread_annotations.h"
#include "base/arena_containers.h"
#include "base/array_ref.h"
#include "base/atomic.h"
@@ -359,11 +358,6 @@ class JitCodeCache {
bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
- // Visit GC roots (except j.l.Class and j.l.String) held by JIT-ed code.
- template<typename RootVisitorType>
- EXPORT void VisitRootTables(ArtMethod* method,
- RootVisitorType& visitor) NO_THREAD_SAFETY_ANALYSIS;
-
void SweepRootTables(IsMarkedVisitor* visitor)
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -518,8 +512,6 @@ class JitCodeCache {
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- EXPORT const uint8_t* GetRootTable(const void* code_ptr, uint32_t* number_of_roots = nullptr);
-
class JniStubKey;
class JniStubData;
@@ -560,9 +552,6 @@ class JitCodeCache {
// Holds compiled code associated to the ArtMethod.
SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
- // Content is identical to `method_code_map_`, but keyed by `ArtMethod*`.
- // Multimap because one method can be compiled differently (see `CompilationKind`).
- std::multimap<ArtMethod*, const void*> method_code_map_reversed_ GUARDED_BY(Locks::jit_lock_);
// Holds compiled code associated to the ArtMethod. Used when pre-jitting
// methods whose entrypoints have the resolution stub.
diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h
index f29daad204..bd8bbe0108 100644
--- a/runtime/well_known_classes.h
+++ b/runtime/well_known_classes.h
@@ -251,8 +251,6 @@ struct EXPORT WellKnownClasses {
java_lang_StackOverflowError;
static constexpr ClassFromField<&java_lang_Thread_daemon> java_lang_Thread;
static constexpr ClassFromField<&java_lang_ThreadGroup_groups> java_lang_ThreadGroup;
- static constexpr ClassFromMethod<&java_lang_invoke_MethodType_makeImpl>
- java_lang_invoke_MethodType;
static constexpr ClassFromMethod<&java_lang_reflect_InvocationTargetException_init>
java_lang_reflect_InvocationTargetException;
static constexpr ClassFromMethod<&java_lang_reflect_Parameter_init>