Move GcRoot JIT patching logic to its own file for arm64.
To re-use it for other compilers.
Test: test.py
Change-Id: I56ab018364c94f394bb406c51f1e54dc72252dd8
diff --git a/compiler/optimizing/code_generation_data.h b/compiler/optimizing/code_generation_data.h
index 0c6b483..e78ba8f 100644
--- a/compiler/optimizing/code_generation_data.h
+++ b/compiler/optimizing/code_generation_data.h
@@ -17,9 +17,12 @@
#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATION_DATA_H_
#define ART_COMPILER_OPTIMIZING_CODE_GENERATION_DATA_H_
+#include <memory>
+
#include "arch/instruction_set.h"
#include "base/scoped_arena_allocator.h"
#include "base/scoped_arena_containers.h"
+#include "code_generator.h"
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "handle.h"
@@ -30,8 +33,6 @@
namespace art HIDDEN {
-class SlowPathCode;
-
class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator> {
public:
static std::unique_ptr<CodeGenerationData> Create(ArenaStack* arena_stack,
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index bcbffe4..a171b74 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -812,6 +812,10 @@
StackMapStream* GetStackMapStream();
+ CodeGenerationData* GetCodeGenerationData() {
+ return code_generation_data_.get();
+ }
+
void ReserveJitStringRoot(StringReference string_reference, Handle<mirror::String> string);
uint64_t GetJitStringRootIndex(StringReference string_reference);
void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass);
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 130d9bb..8ebfbfc 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -999,14 +999,7 @@
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- uint32_literals_(std::less<uint32_t>(),
- graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- uint64_literals_(std::less<uint64_t>(),
- graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- jit_string_patches_(StringReferenceValueComparator(),
- graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- jit_class_patches_(TypeReferenceValueComparator(),
- graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ jit_patches_(&assembler_, graph->GetAllocator()),
jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
// Save the link register (containing the return address) to mimic Quick.
@@ -1304,7 +1297,7 @@
UseScratchRegisterScope temps(masm);
Register temp = temps.AcquireX();
Register counter = temps.AcquireW();
- __ Ldr(temp, DeduplicateUint64Literal(address));
+ __ Ldr(temp, jit_patches_.DeduplicateUint64Literal(address));
__ Ldrh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
__ Cbz(counter, slow_path->GetEntryLabel());
__ Add(counter, counter, -1);
@@ -4788,7 +4781,8 @@
case MethodLoadKind::kJitDirectAddress: {
// Load method address from literal pool.
__ Ldr(XRegisterFrom(temp),
- DeduplicateUint64Literal(reinterpret_cast<uint64_t>(invoke->GetResolvedMethod())));
+ jit_patches_.DeduplicateUint64Literal(
+ reinterpret_cast<uint64_t>(invoke->GetResolvedMethod())));
break;
}
case MethodLoadKind::kRuntimeCall: {
@@ -5129,25 +5123,8 @@
return label;
}
-vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
- uint64_t address) {
- return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
-}
-
-vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
- const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
- ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
- return jit_string_patches_.GetOrCreate(
- StringReference(&dex_file, string_index),
- [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); });
-}
-
-vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
- const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
- ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
- return jit_class_patches_.GetOrCreate(
- TypeReference(&dex_file, type_index),
- [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); });
+void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
+ jit_patches_.EmitJitRootPatches(code, roots_data, *GetCodeGenerationData());
}
void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
@@ -5374,18 +5351,6 @@
assembler.CopyInstructions(code_region);
}
-vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
- return uint32_literals_.GetOrCreate(
- value,
- [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
-}
-
-vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
- return uint64_literals_.GetOrCreate(
- value,
- [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
-}
-
void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
// Explicit clinit checks triggered by static invokes must have been pruned by
// art::PrepareForRegisterAllocation.
@@ -7056,32 +7021,6 @@
}
}
-static void PatchJitRootUse(uint8_t* code,
- const uint8_t* roots_data,
- vixl::aarch64::Literal<uint32_t>* literal,
- uint64_t index_in_table) {
- uint32_t literal_offset = literal->GetOffset();
- uintptr_t address =
- reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
- uint8_t* data = code + literal_offset;
- reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
-}
-
-void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
- for (const auto& entry : jit_string_patches_) {
- const StringReference& string_reference = entry.first;
- vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
- uint64_t index_in_table = GetJitStringRootIndex(string_reference);
- PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
- }
- for (const auto& entry : jit_class_patches_) {
- const TypeReference& type_reference = entry.first;
- vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
- uint64_t index_in_table = GetJitClassRootIndex(type_reference);
- PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
- }
-}
-
MemOperand InstructionCodeGeneratorARM64::VecNEONAddress(
HVecMemoryOperation* instruction,
UseScratchRegisterScope* temps_scope,
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 52c5377..c1b9ff1 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -26,6 +26,7 @@
#include "dex/string_reference.h"
#include "dex/type_reference.h"
#include "driver/compiler_options.h"
+#include "jit_patches_arm64.h"
#include "nodes.h"
#include "parallel_move_resolver.h"
#include "utils/arm64/assembler_arm64.h"
@@ -836,13 +837,21 @@
// the associated patch for AOT or slow path for JIT.
void EmitBakerReadBarrierCbnz(uint32_t custom_data);
- vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address);
+ vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address) {
+ return jit_patches_.DeduplicateBootImageAddressLiteral(address);
+ }
vixl::aarch64::Literal<uint32_t>* DeduplicateJitStringLiteral(const DexFile& dex_file,
dex::StringIndex string_index,
- Handle<mirror::String> handle);
+ Handle<mirror::String> handle) {
+ return jit_patches_.DeduplicateJitStringLiteral(
+ dex_file, string_index, handle, GetCodeGenerationData());
+ }
vixl::aarch64::Literal<uint32_t>* DeduplicateJitClassLiteral(const DexFile& dex_file,
- dex::TypeIndex string_index,
- Handle<mirror::Class> handle);
+ dex::TypeIndex class_index,
+ Handle<mirror::Class> handle) {
+ return jit_patches_.DeduplicateJitClassLiteral(
+ dex_file, class_index, handle, GetCodeGenerationData());
+ }
void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg);
void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
@@ -1072,18 +1081,6 @@
uint32_t encoded_data,
/*out*/ std::string* debug_name);
- using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, vixl::aarch64::Literal<uint64_t>*>;
- using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, vixl::aarch64::Literal<uint32_t>*>;
- using StringToLiteralMap = ArenaSafeMap<StringReference,
- vixl::aarch64::Literal<uint32_t>*,
- StringReferenceValueComparator>;
- using TypeToLiteralMap = ArenaSafeMap<TypeReference,
- vixl::aarch64::Literal<uint32_t>*,
- TypeReferenceValueComparator>;
-
- vixl::aarch64::Literal<uint32_t>* DeduplicateUint32Literal(uint32_t value);
- vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value);
-
// The PcRelativePatchInfo is used for PC-relative addressing of methods/strings/types,
// whether through .data.bimg.rel.ro, .bss, or directly in the boot image.
struct PcRelativePatchInfo : PatchInfo<vixl::aarch64::Label> {
@@ -1156,14 +1153,7 @@
// Baker read barrier patch info.
ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
- // Deduplication map for 32-bit literals, used for JIT for boot image addresses.
- Uint32ToLiteralMap uint32_literals_;
- // Deduplication map for 64-bit literals, used for JIT for method address or method code.
- Uint64ToLiteralMap uint64_literals_;
- // Patches for string literals in JIT compiled code.
- StringToLiteralMap jit_string_patches_;
- // Patches for class literals in JIT compiled code.
- TypeToLiteralMap jit_class_patches_;
+ JitPatchesARM64 jit_patches_;
// Baker read barrier slow paths, mapping custom data (uint32_t) to label.
// Wrap the label to work around vixl::aarch64::Label being non-copyable
diff --git a/compiler/optimizing/jit_patches_arm64.cc b/compiler/optimizing/jit_patches_arm64.cc
new file mode 100644
index 0000000..76ba182
--- /dev/null
+++ b/compiler/optimizing/jit_patches_arm64.cc
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "code_generation_data.h"
+#include "gc_root.h"
+#include "jit_patches_arm64.h"
+
+namespace art HIDDEN {
+
+namespace arm64 {
+
+vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateUint32Literal(
+ uint32_t value) {
+ return uint32_literals_.GetOrCreate(
+ value,
+ [this, value]() {
+ return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(value);
+ });
+}
+
+vixl::aarch64::Literal<uint64_t>* JitPatchesARM64::DeduplicateUint64Literal(
+ uint64_t value) {
+ return uint64_literals_.GetOrCreate(
+ value,
+ [this, value]() {
+ return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint64_t>(value);
+ });
+}
+
+static void PatchJitRootUse(uint8_t* code,
+ const uint8_t* roots_data,
+ vixl::aarch64::Literal<uint32_t>* literal,
+ uint64_t index_in_table) {
+ uint32_t literal_offset = literal->GetOffset();
+ uintptr_t address =
+ reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
+ uint8_t* data = code + literal_offset;
+ reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
+}
+
+void JitPatchesARM64::EmitJitRootPatches(
+ uint8_t* code,
+ const uint8_t* roots_data,
+ const CodeGenerationData& code_generation_data) const {
+ for (const auto& entry : jit_string_patches_) {
+ const StringReference& string_reference = entry.first;
+ vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
+ uint64_t index_in_table = code_generation_data.GetJitStringRootIndex(string_reference);
+ PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
+ }
+ for (const auto& entry : jit_class_patches_) {
+ const TypeReference& type_reference = entry.first;
+ vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
+ uint64_t index_in_table = code_generation_data.GetJitClassRootIndex(type_reference);
+ PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
+ }
+}
+
+vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateBootImageAddressLiteral(
+ uint64_t address) {
+ return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
+}
+
+vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateJitStringLiteral(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ Handle<mirror::String> handle,
+ CodeGenerationData* code_generation_data) {
+ code_generation_data->ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
+ return jit_string_patches_.GetOrCreate(
+ StringReference(&dex_file, string_index),
+ [this]() {
+ return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u);
+ });
+}
+
+vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateJitClassLiteral(
+ const DexFile& dex_file,
+ dex::TypeIndex type_index,
+ Handle<mirror::Class> handle,
+ CodeGenerationData* code_generation_data) {
+ code_generation_data->ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
+ return jit_class_patches_.GetOrCreate(
+ TypeReference(&dex_file, type_index),
+ [this]() {
+ return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u);
+ });
+}
+
+} // namespace arm64
+} // namespace art
diff --git a/compiler/optimizing/jit_patches_arm64.h b/compiler/optimizing/jit_patches_arm64.h
new file mode 100644
index 0000000..f928723
--- /dev/null
+++ b/compiler/optimizing/jit_patches_arm64.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_OPTIMIZING_JIT_PATCHES_ARM64_H_
+#define ART_COMPILER_OPTIMIZING_JIT_PATCHES_ARM64_H_
+
+#include "base/arena_allocator.h"
+#include "base/arena_containers.h"
+#include "dex/dex_file.h"
+#include "dex/string_reference.h"
+#include "dex/type_reference.h"
+#include "handle.h"
+#include "mirror/class.h"
+#include "mirror/string.h"
+#include "utils/arm64/assembler_arm64.h"
+
+// TODO(VIXL): Make VIXL compile with -Wshadow.
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wshadow"
+#include "aarch64/disasm-aarch64.h"
+#include "aarch64/macro-assembler-aarch64.h"
+#pragma GCC diagnostic pop
+
+namespace art HIDDEN {
+
+class CodeGenerationData;
+
+namespace arm64 {
+
+/**
+ * Helper for emitting string or class literals into JIT generated code,
+ * which can be shared between different compilers.
+ */
+class JitPatchesARM64 {
+ public:
+ JitPatchesARM64(Arm64Assembler* assembler, ArenaAllocator* allocator) :
+ assembler_(assembler),
+ uint32_literals_(std::less<uint32_t>(),
+ allocator->Adapter(kArenaAllocCodeGenerator)),
+ uint64_literals_(std::less<uint64_t>(),
+ allocator->Adapter(kArenaAllocCodeGenerator)),
+ jit_string_patches_(StringReferenceValueComparator(),
+ allocator->Adapter(kArenaAllocCodeGenerator)),
+ jit_class_patches_(TypeReferenceValueComparator(),
+ allocator->Adapter(kArenaAllocCodeGenerator)) {
+ }
+
+ using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, vixl::aarch64::Literal<uint64_t>*>;
+ using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, vixl::aarch64::Literal<uint32_t>*>;
+ using StringToLiteralMap = ArenaSafeMap<StringReference,
+ vixl::aarch64::Literal<uint32_t>*,
+ StringReferenceValueComparator>;
+ using TypeToLiteralMap = ArenaSafeMap<TypeReference,
+ vixl::aarch64::Literal<uint32_t>*,
+ TypeReferenceValueComparator>;
+
+ vixl::aarch64::Literal<uint32_t>* DeduplicateUint32Literal(uint32_t value);
+ vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value);
+ vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address);
+ vixl::aarch64::Literal<uint32_t>* DeduplicateJitStringLiteral(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ Handle<mirror::String> handle,
+ CodeGenerationData* code_generation_data);
+ vixl::aarch64::Literal<uint32_t>* DeduplicateJitClassLiteral(
+ const DexFile& dex_file,
+ dex::TypeIndex type_index,
+ Handle<mirror::Class> handle,
+ CodeGenerationData* code_generation_data);
+
+ void EmitJitRootPatches(uint8_t* code,
+ const uint8_t* roots_data,
+ const CodeGenerationData& code_generation_data) const;
+
+ Arm64Assembler* GetAssembler() const { return assembler_; }
+ vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
+
+ private:
+ Arm64Assembler* assembler_;
+ // Deduplication map for 32-bit literals, used for JIT for boot image addresses.
+ Uint32ToLiteralMap uint32_literals_;
+ // Deduplication map for 64-bit literals, used for JIT for method address or method code.
+ Uint64ToLiteralMap uint64_literals_;
+ // Patches for string literals in JIT compiled code.
+ StringToLiteralMap jit_string_patches_;
+ // Patches for class literals in JIT compiled code.
+ TypeToLiteralMap jit_class_patches_;
+};
+
+} // namespace arm64
+
+} // namespace art
+
+#endif // ART_COMPILER_OPTIMIZING_JIT_PATCHES_ARM64_H_