summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/optimizing/code_generation_data.cc11
-rw-r--r--compiler/optimizing/code_generation_data.h30
-rw-r--r--compiler/optimizing/code_generator.cc13
-rw-r--r--compiler/optimizing/code_generator.h5
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc46
-rw-r--r--compiler/optimizing/code_generator_x86_64.h5
-rw-r--r--compiler/optimizing/instruction_builder.cc7
-rw-r--r--compiler/optimizing/nodes.h9
-rw-r--r--compiler/optimizing/sharpening.cc41
-rw-r--r--compiler/optimizing/sharpening.h8
10 files changed, 161 insertions, 14 deletions
diff --git a/compiler/optimizing/code_generation_data.cc b/compiler/optimizing/code_generation_data.cc
index 7b23d46dc5..afc4f62f0f 100644
--- a/compiler/optimizing/code_generation_data.cc
+++ b/compiler/optimizing/code_generation_data.cc
@@ -20,6 +20,7 @@
#include "intern_table.h"
#include "mirror/object-inl.h"
#include "runtime.h"
+#include "well_known_classes-inl.h"
namespace art HIDDEN {
@@ -52,6 +53,16 @@ void CodeGenerationData::EmitJitRoots(
entry.second = index;
++index;
}
+ for (auto& entry : jit_method_type_roots_) {
+ // Update the `roots` with the MethodType, and replace the address temporarily
+ // stored to the index in the table.
+ uint64_t address = entry.second;
+ roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
+ DCHECK(roots->back() != nullptr);
+ DCHECK(roots->back()->InstanceOf(WellKnownClasses::java_lang_invoke_MethodType.Get()));
+ entry.second = index;
+ ++index;
+ }
}
} // namespace art
diff --git a/compiler/optimizing/code_generation_data.h b/compiler/optimizing/code_generation_data.h
index e78ba8f574..0d4db66ab4 100644
--- a/compiler/optimizing/code_generation_data.h
+++ b/compiler/optimizing/code_generation_data.h
@@ -23,10 +23,12 @@
#include "base/scoped_arena_allocator.h"
#include "base/scoped_arena_containers.h"
#include "code_generator.h"
+#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "handle.h"
#include "mirror/class.h"
+#include "mirror/method_type.h"
#include "mirror/object.h"
#include "mirror/string.h"
#include "stack_map_stream.h"
@@ -82,8 +84,24 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
return jit_class_roots_.size();
}
+ void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
+ Handle<mirror::MethodType> method_type) {
+ jit_method_type_roots_.Overwrite(proto_reference,
+ reinterpret_cast64<uint64_t>(method_type.GetReference()));
+ }
+
+ uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference) const {
+ return jit_method_type_roots_.Get(proto_reference);
+ }
+
+ size_t GetNumberOfJitMethodTypeRoots() const {
+ return jit_method_type_roots_.size();
+ }
+
size_t GetNumberOfJitRoots() const {
- return GetNumberOfJitStringRoots() + GetNumberOfJitClassRoots();
+ return GetNumberOfJitStringRoots() +
+ GetNumberOfJitClassRoots() +
+ GetNumberOfJitMethodTypeRoots();
}
void EmitJitRoots(/*out*/std::vector<Handle<mirror::Object>>* roots)
@@ -97,7 +115,9 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
jit_string_roots_(StringReferenceValueComparator(),
allocator_.Adapter(kArenaAllocCodeGenerator)),
jit_class_roots_(TypeReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)) {
+ allocator_.Adapter(kArenaAllocCodeGenerator)),
+ jit_method_type_roots_(ProtoReferenceValueComparator(),
+ allocator_.Adapter(kArenaAllocCodeGenerator)) {
slow_paths_.reserve(kDefaultSlowPathsCapacity);
}
@@ -116,6 +136,12 @@ class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator>
// Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
// will compute all the indices.
ScopedArenaSafeMap<TypeReference, uint64_t, TypeReferenceValueComparator> jit_class_roots_;
+
+ // Maps a ProtoReference (dex_file, proto_index) to the index in the literal table.
+ // Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
+ // will compute all the indices.
+ ScopedArenaSafeMap<ProtoReference, uint64_t, ProtoReferenceValueComparator>
+ jit_method_type_roots_;
};
} // namespace art
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 88bd818b0c..51714ef548 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -16,6 +16,7 @@
#include "code_generator.h"
#include "base/globals.h"
+#include "mirror/method_type.h"
#ifdef ART_ENABLE_CODEGEN_arm
#include "code_generator_arm_vixl.h"
@@ -209,11 +210,23 @@ uint64_t CodeGenerator::GetJitClassRootIndex(TypeReference type_reference) {
return code_generation_data_->GetJitClassRootIndex(type_reference);
}
+void CodeGenerator::ReserveJitMethodTypeRoot(ProtoReference proto_reference,
+ Handle<mirror::MethodType> method_type) {
+ DCHECK(code_generation_data_ != nullptr);
+ code_generation_data_->ReserveJitMethodTypeRoot(proto_reference, method_type);
+}
+
+uint64_t CodeGenerator::GetJitMethodTypeRootIndex(ProtoReference proto_reference) {
+ DCHECK(code_generation_data_ != nullptr);
+ return code_generation_data_->GetJitMethodTypeRootIndex(proto_reference);
+}
+
void CodeGenerator::EmitJitRootPatches([[maybe_unused]] uint8_t* code,
[[maybe_unused]] const uint8_t* roots_data) {
DCHECK(code_generation_data_ != nullptr);
DCHECK_EQ(code_generation_data_->GetNumberOfJitStringRoots(), 0u);
DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u);
+ DCHECK_EQ(code_generation_data_->GetNumberOfJitMethodTypeRoots(), 0u);
}
uint32_t CodeGenerator::GetArrayLengthOffset(HArrayLength* array_length) {
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index aec7b45a1a..950bae5c8f 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -29,10 +29,12 @@
#include "base/memory_region.h"
#include "base/pointer_size.h"
#include "class_root.h"
+#include "dex/proto_reference.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "graph_visualizer.h"
#include "locations.h"
+#include "mirror/method_type.h"
#include "nodes.h"
#include "oat/oat_quick_method_header.h"
#include "optimizing_compiler_stats.h"
@@ -834,6 +836,9 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
uint64_t GetJitStringRootIndex(StringReference string_reference);
void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass);
uint64_t GetJitClassRootIndex(TypeReference type_reference);
+ void ReserveJitMethodTypeRoot(ProtoReference proto_reference,
+ Handle<mirror::MethodType> method_type);
+ uint64_t GetJitMethodTypeRootIndex(ProtoReference proto_reference);
// Emit the patches assocatied with JIT roots. Only applies to JIT compiled code.
virtual void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index e2b4344be9..f61bb39ccc 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -35,6 +35,7 @@
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
+#include "mirror/method_type.h"
#include "mirror/object_reference.h"
#include "mirror/var_handle.h"
#include "optimizing/nodes.h"
@@ -1628,6 +1629,7 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ jit_method_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
}
@@ -6824,20 +6826,31 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* lo
codegen_->GenerateLoadMethodHandleRuntimeCall(load);
}
+Label* CodeGeneratorX86_64::NewJitRootMethodTypePatch(const DexFile& dex_file,
+ dex::ProtoIndex proto_index,
+ Handle<mirror::MethodType> handle) {
+ ReserveJitMethodTypeRoot(ProtoReference(&dex_file, proto_index), handle);
+ // Add a patch entry and return the label.
+ jit_method_type_patches_.emplace_back(&dex_file, proto_index.index_);
+ PatchInfo<Label>* info = &jit_method_type_patches_.back();
+ return &info->label;
+}
+
void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
LocationSummary* locations =
new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
if (load->GetLoadKind() == HLoadMethodType::LoadKind::kRuntimeCall) {
- Location location = Location::RegisterLocation(RAX);
- CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
+ Location location = Location::RegisterLocation(RAX);
+ CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
} else {
- DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kBssEntry);
locations->SetOut(Location::RequiresRegister());
- if (codegen_->EmitNonBakerReadBarrier()) {
- // For non-Baker read barrier we have a temp-clobbering call.
- } else {
- // Rely on the pResolveMethodType to save everything.
- locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
+ if (load->GetLoadKind() == HLoadMethodType::LoadKind::kBssEntry) {
+ if (codegen_->EmitNonBakerReadBarrier()) {
+ // For non-Baker read barrier we have a temp-clobbering call.
+ } else {
+ // Rely on the pResolveMethodType to save everything.
+ locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
+ }
}
}
}
@@ -6864,6 +6877,17 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load)
__ Bind(slow_path->GetExitLabel());
return;
}
+ case HLoadMethodType::LoadKind::kJitTableAddress: {
+ Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
+ /* no_rip= */ true);
+ Handle<mirror::MethodType> method_type = load->GetMethodType();
+ DCHECK(method_type != nullptr);
+ Label* fixup_label = codegen_->NewJitRootMethodTypePatch(
+ load->GetDexFile(), load->GetProtoIndex(), method_type);
+ GenerateGcRootFieldLoad(
+ load, out_loc, address, fixup_label, codegen_->GetCompilerReadBarrierOption());
+ return;
+ }
default:
DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
codegen_->GenerateLoadMethodTypeRuntimeCall(load);
@@ -8543,6 +8567,12 @@ void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots
uint64_t index_in_table = GetJitClassRootIndex(type_reference);
PatchJitRootUse(code, roots_data, info, index_in_table);
}
+
+ for (const PatchInfo<Label>& info : jit_method_type_patches_) {
+ ProtoReference proto_reference(info.target_dex_file, dex::ProtoIndex(info.offset_or_index));
+ uint64_t index_in_table = GetJitMethodTypeRootIndex(proto_reference);
+ PatchJitRootUse(code, roots_data, info, index_in_table);
+ }
}
bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 81c8ead32e..ddeb33a261 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -546,6 +546,9 @@ class CodeGeneratorX86_64 : public CodeGenerator {
Label* NewJitRootClassPatch(const DexFile& dex_file,
dex::TypeIndex type_index,
Handle<mirror::Class> handle);
+ Label* NewJitRootMethodTypePatch(const DexFile& dex_file,
+ dex::ProtoIndex proto_index,
+ Handle<mirror::MethodType> method_type);
void LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference);
void LoadIntrinsicDeclaringClass(CpuRegister reg, HInvoke* invoke);
@@ -765,6 +768,8 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
// Patches for class literals in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_class_patches_;
+ // Patches for method type in JIT compiled code.
+ ArenaDeque<PatchInfo<Label>> jit_method_type_patches_;
// Fixups for jump tables need to be handled specially.
ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_;
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 3a64769a8b..c97c78ca17 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -2720,9 +2720,10 @@ void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint3
const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
HLoadMethodType* load_method_type =
new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
- if (!code_generator_->GetCompilerOptions().IsJitCompiler()) {
- load_method_type->SetLoadKind(HLoadMethodType::LoadKind::kBssEntry);
- }
+ HSharpening::ProcessLoadMethodType(load_method_type,
+ code_generator_,
+ *dex_compilation_unit_,
+ graph_->GetHandleCache()->GetHandles());
AppendInstruction(load_method_type);
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 1e3aca64db..825134497d 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -7218,6 +7218,8 @@ class HLoadMethodType final : public HInstruction {
enum class LoadKind {
// Load from an entry in the .bss section using a PC-relative load.
kBssEntry,
+ // Load from the root table associated with the JIT compiled method.
+ kJitTableAddress,
// Load using a single runtime call.
kRuntimeCall,
@@ -7254,6 +7256,10 @@ class HLoadMethodType final : public HInstruction {
dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
+ Handle<mirror::MethodType> GetMethodType() const { return method_type_; }
+
+ void SetMethodType(Handle<mirror::MethodType> method_type) { method_type_ = method_type; }
+
const DexFile& GetDexFile() const { return dex_file_; }
static SideEffects SideEffectsForArchRuntimeCalls() {
@@ -7283,6 +7289,8 @@ class HLoadMethodType final : public HInstruction {
const dex::ProtoIndex proto_index_;
const DexFile& dex_file_;
+
+ Handle<mirror::MethodType> method_type_;
};
std::ostream& operator<<(std::ostream& os, HLoadMethodType::LoadKind rhs);
@@ -7293,6 +7301,7 @@ inline void HLoadMethodType::SetLoadKind(LoadKind load_kind) {
DCHECK(GetBlock() == nullptr);
DCHECK(GetEnvironment() == nullptr);
DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
+ DCHECK_IMPLIES(GetLoadKind() == LoadKind::kJitTableAddress, GetMethodType() != nullptr);
SetPackedField<LoadKindField>(load_kind);
}
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index cb94491b8e..1b6a9fb601 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -471,4 +471,45 @@ void HSharpening::ProcessLoadString(
load_string->SetLoadKind(load_kind);
}
+void HSharpening::ProcessLoadMethodType(
+ HLoadMethodType* load_method_type,
+ CodeGenerator* codegen,
+ const DexCompilationUnit& dex_compilation_unit,
+ VariableSizedHandleScope* handles) {
+ const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
+
+ HLoadMethodType::LoadKind desired_load_kind = static_cast<HLoadMethodType::LoadKind>(-1);
+
+ if (compiler_options.IsJitCompiler()) {
+ DCHECK(!compiler_options.GetCompilePic());
+ Runtime* runtime = Runtime::Current();
+ ClassLinker* class_linker = runtime->GetClassLinker();
+ ScopedObjectAccess soa(Thread::Current());
+ ObjPtr<mirror::MethodType> method_type =
+ class_linker->ResolveMethodType(Thread::Current(),
+ load_method_type->GetProtoIndex(),
+ dex_compilation_unit.GetDexCache(),
+ dex_compilation_unit.GetClassLoader());
+
+ if (method_type != nullptr) {
+ load_method_type->SetMethodType(handles->NewHandle(method_type));
+ desired_load_kind = HLoadMethodType::LoadKind::kJitTableAddress;
+ } else {
+ DCHECK_EQ(load_method_type->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall);
+ desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
+ Thread::Current()->ClearException();
+ }
+ } else {
+ if (compiler_options.GetCompilePic()) {
+ desired_load_kind = HLoadMethodType::LoadKind::kBssEntry;
+ } else {
+ // Test configuration, do not sharpen.
+ desired_load_kind = HLoadMethodType::LoadKind::kRuntimeCall;
+ }
+ }
+
+ DCHECK_NE(desired_load_kind, static_cast<HLoadMethodType::LoadKind>(-1));
+ load_method_type->SetLoadKind(desired_load_kind);
+}
+
} // namespace art
diff --git a/compiler/optimizing/sharpening.h b/compiler/optimizing/sharpening.h
index 6dfe904f27..88d3b2f604 100644
--- a/compiler/optimizing/sharpening.h
+++ b/compiler/optimizing/sharpening.h
@@ -27,7 +27,7 @@ class CodeGenerator;
class DexCompilationUnit;
// Utility methods that try to improve the way we dispatch methods, and access
-// types and strings.
+// types, strings and method types.
class HSharpening {
public:
// Used by the builder and InstructionSimplifier.
@@ -54,6 +54,12 @@ class HSharpening {
CodeGenerator* codegen,
const DexCompilationUnit& dex_compilation_unit,
VariableSizedHandleScope* handles);
+
+ // Used by the builder.
+ static void ProcessLoadMethodType(HLoadMethodType* load_method_type,
+ CodeGenerator* codegen,
+ const DexCompilationUnit& dex_compilation_unit,
+ VariableSizedHandleScope* handles);
};
} // namespace art