summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2017-09-20 13:37:47 +0100
committer Vladimir Marko <vmarko@google.com> 2017-09-20 15:55:10 +0100
commitd8dbc8da0e5cc6b5c2176ce2d3877e6194d72c0c (patch)
treeaf6e9fb02471d75ebdea46190a0aa3e9dbdb892d /compiler/optimizing
parent93780a60090356921b844dbefdc13442c9f18b52 (diff)
Refactor compiled_method.h .
Move LinkerPatch to compiler/linker/linker_patch.h . Move SrcMapElem to compiler/debug/src_map_elem.h . Introduce compiled_method-inl.h to reduce the number of `#include`s in compiled_method.h . Test: m test-art-host-gtest Test: testrunner.py --host Change-Id: Id211cdf94a63ad265bf4709f1a5e06dffbe30f64
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator.cc3
-rw-r--r--compiler/optimizing/code_generator.h7
-rw-r--r--compiler/optimizing/code_generator_arm64.cc45
-rw-r--r--compiler/optimizing/code_generator_arm64.h6
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc45
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h6
-rw-r--r--compiler/optimizing/code_generator_mips.cc41
-rw-r--r--compiler/optimizing/code_generator_mips.h6
-rw-r--r--compiler/optimizing/code_generator_mips64.cc41
-rw-r--r--compiler/optimizing/code_generator_mips64.h6
-rw-r--r--compiler/optimizing/code_generator_x86.cc40
-rw-r--r--compiler/optimizing/code_generator_x86.h6
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc40
-rw-r--r--compiler/optimizing/code_generator_x86_64.h6
-rw-r--r--compiler/optimizing/optimizing_compiler.cc11
15 files changed, 161 insertions, 148 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 1e5f1ec00f..6533e2b9f7 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -288,7 +288,8 @@ void CodeGenerator::Finalize(CodeAllocator* allocator) {
GetAssembler()->FinalizeInstructions(code);
}
-void CodeGenerator::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches ATTRIBUTE_UNUSED) {
+void CodeGenerator::EmitLinkerPatches(
+ ArenaVector<linker::LinkerPatch>* linker_patches ATTRIBUTE_UNUSED) {
// No linker patches by default.
}
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 30c2b52242..4b4abdfaa3 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -61,9 +61,12 @@ class Assembler;
class CodeGenerator;
class CompilerDriver;
class CompilerOptions;
-class LinkerPatch;
class ParallelMoveResolver;
+namespace linker {
+class LinkerPatch;
+} // namespace linker
+
class CodeAllocator {
public:
CodeAllocator() {}
@@ -205,7 +208,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
virtual void Initialize() = 0;
virtual void Finalize(CodeAllocator* allocator);
- virtual void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches);
+ virtual void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches);
virtual void GenerateFrameEntry() = 0;
virtual void GenerateFrameExit() = 0;
virtual void Bind(HBasicBlock* block) = 0;
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 8814cfc251..aaea7c1025 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -31,6 +31,7 @@
#include "intrinsics.h"
#include "intrinsics_arm64.h"
#include "linker/arm64/relative_patcher_arm64.h"
+#include "linker/linker_patch.h"
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
@@ -4754,10 +4755,10 @@ void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_la
__ ldr(out, MemOperand(base, /* offset placeholder */ 0));
}
-template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches) {
+ ArenaVector<linker::LinkerPatch>* linker_patches) {
for (const PcRelativePatchInfo& info : infos) {
linker_patches->push_back(Factory(info.label.GetLocation(),
&info.target_dex_file,
@@ -4766,7 +4767,7 @@ inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
}
}
-void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
+void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
pc_relative_method_patches_.size() +
@@ -4778,28 +4779,28 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc
baker_read_barrier_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
+ pc_relative_method_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
+ pc_relative_string_patches_, linker_patches);
} else {
DCHECK(pc_relative_method_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
- linker_patches);
- }
- EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
+ pc_relative_string_patches_, linker_patches);
+ }
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
+ method_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
+ type_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
+ string_bss_entry_patches_, linker_patches);
for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
- linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.GetLocation(),
- info.custom_data));
+ linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
+ info.label.GetLocation(), info.custom_data));
}
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 69c511907e..cebdaa102c 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -627,7 +627,7 @@ class CodeGeneratorARM64 : public CodeGenerator {
vixl::aarch64::Register out,
vixl::aarch64::Register base);
- void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
@@ -805,9 +805,9 @@ class CodeGeneratorARM64 : public CodeGenerator {
void EmitJumpTables();
- template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+ template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches);
+ ArenaVector<linker::LinkerPatch>* linker_patches);
// Labels for each block that will be compiled.
// We use a deque so that the `vixl::aarch64::Label` objects do not move in memory.
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index baf68c4e11..e1ea08073f 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -30,6 +30,7 @@
#include "heap_poisoning.h"
#include "intrinsics_arm_vixl.h"
#include "linker/arm/relative_patcher_thumb2.h"
+#include "linker/linker_patch.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
#include "thread.h"
@@ -9191,10 +9192,10 @@ VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitClassLiteral(const DexFil
});
}
-template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches(
const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches) {
+ ArenaVector<linker::LinkerPatch>* linker_patches) {
for (const PcRelativePatchInfo& info : infos) {
const DexFile& dex_file = info.target_dex_file;
size_t offset_or_index = info.offset_or_index;
@@ -9211,7 +9212,7 @@ inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches(
}
}
-void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
+void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
/* MOVW+MOVT for each entry */ 2u * pc_relative_method_patches_.size() +
@@ -9223,28 +9224,28 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
baker_read_barrier_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
+ pc_relative_method_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
+ pc_relative_string_patches_, linker_patches);
} else {
DCHECK(pc_relative_method_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
- linker_patches);
- }
- EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
+ pc_relative_string_patches_, linker_patches);
+ }
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
+ method_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
+ type_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
+ string_bss_entry_patches_, linker_patches);
for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
- linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.GetLocation(),
- info.custom_data));
+ linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
+ info.label.GetLocation(), info.custom_data));
}
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index e78bc15614..337ecf1163 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -594,7 +594,7 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
dex::TypeIndex type_index,
Handle<mirror::Class> handle);
- void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
@@ -778,9 +778,9 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
uint32_t offset_or_index,
ArenaDeque<PcRelativePatchInfo>* patches);
- template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+ template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches);
+ ArenaVector<linker::LinkerPatch>* linker_patches);
// Labels for each block that will be compiled.
// We use a deque so that the `vixl::aarch32::Label` objects do not move in memory.
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 6256722661..8ada76a053 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -29,6 +29,7 @@
#include "heap_poisoning.h"
#include "intrinsics.h"
#include "intrinsics_mips.h"
+#include "linker/linker_patch.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
#include "offsets.h"
@@ -1628,10 +1629,10 @@ void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* lo
}
}
-template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches) {
+ ArenaVector<linker::LinkerPatch>* linker_patches) {
for (const PcRelativePatchInfo& info : infos) {
const DexFile& dex_file = info.target_dex_file;
size_t offset_or_index = info.offset_or_index;
@@ -1647,7 +1648,7 @@ inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
}
}
-void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
+void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
pc_relative_method_patches_.size() +
@@ -1658,25 +1659,25 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch
string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
+ pc_relative_method_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
+ pc_relative_string_patches_, linker_patches);
} else {
DCHECK(pc_relative_method_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
- linker_patches);
- }
- EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
+ pc_relative_string_patches_, linker_patches);
+ }
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
+ method_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
+ type_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
+ string_bss_entry_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index f15f8c672a..2b1075d12b 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -395,7 +395,7 @@ class CodeGeneratorMIPS : public CodeGenerator {
const MipsAssembler& GetAssembler() const OVERRIDE { return assembler_; }
// Emit linker patches.
- void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
// Fast path implementation of ReadBarrier::Barrier for a heap
@@ -679,9 +679,9 @@ class CodeGeneratorMIPS : public CodeGenerator {
const PcRelativePatchInfo* info_high,
ArenaDeque<PcRelativePatchInfo>* patches);
- template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+ template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches);
+ ArenaVector<linker::LinkerPatch>* linker_patches);
// Labels for each block that will be compiled.
MipsLabel* block_labels_;
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index e8ae2db019..119e0f6b76 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -27,6 +27,7 @@
#include "heap_poisoning.h"
#include "intrinsics.h"
#include "intrinsics_mips64.h"
+#include "linker/linker_patch.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
#include "offsets.h"
@@ -1541,10 +1542,10 @@ void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
}
}
-template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches) {
+ ArenaVector<linker::LinkerPatch>* linker_patches) {
for (const PcRelativePatchInfo& info : infos) {
const DexFile& dex_file = info.target_dex_file;
size_t offset_or_index = info.offset_or_index;
@@ -1556,7 +1557,7 @@ inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
}
}
-void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
+void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
pc_relative_method_patches_.size() +
@@ -1567,25 +1568,25 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
+ pc_relative_method_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
+ pc_relative_string_patches_, linker_patches);
} else {
DCHECK(pc_relative_method_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(pc_relative_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
- linker_patches);
- }
- EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
+ pc_relative_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
+ pc_relative_string_patches_, linker_patches);
+ }
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
+ method_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
+ type_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
+ string_bss_entry_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 3035621972..9fe47ee297 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -374,7 +374,7 @@ class CodeGeneratorMIPS64 : public CodeGenerator {
const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
// Emit linker patches.
- void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
// Fast path implementation of ReadBarrier::Barrier for a heap
@@ -643,9 +643,9 @@ class CodeGeneratorMIPS64 : public CodeGenerator {
const PcRelativePatchInfo* info_high,
ArenaDeque<PcRelativePatchInfo>* patches);
- template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+ template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches);
+ ArenaVector<linker::LinkerPatch>* linker_patches);
// Labels for each block that will be compiled.
Mips64Label* block_labels_; // Indexed by block id.
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 0b9130fa5a..99581ee9b8 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -26,6 +26,7 @@
#include "heap_poisoning.h"
#include "intrinsics.h"
#include "intrinsics_x86.h"
+#include "linker/linker_patch.h"
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
@@ -4675,10 +4676,10 @@ Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
-template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
const ArenaDeque<X86PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches) {
+ ArenaVector<linker::LinkerPatch>* linker_patches) {
for (const X86PcRelativePatchInfo& info : infos) {
uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
linker_patches->push_back(Factory(
@@ -4686,7 +4687,7 @@ inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
}
}
-void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
+void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
boot_image_method_patches_.size() +
@@ -4697,24 +4698,25 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(boot_image_method_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
+ boot_image_method_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
+ boot_image_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
+ string_patches_, linker_patches);
} else {
DCHECK(boot_image_method_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(boot_image_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(string_patches_,
- linker_patches);
- }
- EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
+ boot_image_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
+ string_patches_, linker_patches);
+ }
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
+ method_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
+ type_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
+ string_bss_entry_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index b32d57a774..e8f919d122 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -431,7 +431,7 @@ class CodeGeneratorX86 : public CodeGenerator {
void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
// Emit linker patches.
- void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
void PatchJitRootUse(uint8_t* code,
const uint8_t* roots_data,
@@ -617,9 +617,9 @@ class CodeGeneratorX86 : public CodeGenerator {
HX86ComputeBaseMethodAddress* method_address;
};
- template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+ template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
void EmitPcRelativeLinkerPatches(const ArenaDeque<X86PcRelativePatchInfo>& infos,
- ArenaVector<LinkerPatch>* linker_patches);
+ ArenaVector<linker::LinkerPatch>* linker_patches);
Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke, Register temp);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 39a65806a4..65b3f62104 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -25,6 +25,7 @@
#include "heap_poisoning.h"
#include "intrinsics.h"
#include "intrinsics_x86_64.h"
+#include "linker/linker_patch.h"
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
@@ -1106,10 +1107,10 @@ Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
-template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
const ArenaDeque<PatchInfo<Label>>& infos,
- ArenaVector<LinkerPatch>* linker_patches) {
+ ArenaVector<linker::LinkerPatch>* linker_patches) {
for (const PatchInfo<Label>& info : infos) {
uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
linker_patches->push_back(
@@ -1117,7 +1118,7 @@ inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
}
}
-void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
+void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
boot_image_method_patches_.size() +
@@ -1128,24 +1129,25 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(boot_image_method_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
+ boot_image_method_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
+ boot_image_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
+ string_patches_, linker_patches);
} else {
DCHECK(boot_image_method_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(boot_image_type_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(string_patches_,
- linker_patches);
- }
- EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
- linker_patches);
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
+ boot_image_type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
+ string_patches_, linker_patches);
+ }
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
+ method_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
+ type_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
+ string_bss_entry_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index f5fa86bf23..8e8e695a64 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -424,7 +424,7 @@ class CodeGeneratorX86_64 : public CodeGenerator {
void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
- void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
void PatchJitRootUse(uint8_t* code,
const uint8_t* roots_data,
@@ -586,9 +586,9 @@ class CodeGeneratorX86_64 : public CodeGenerator {
static constexpr int32_t kDummy32BitOffset = 256;
private:
- template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
+ template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
static void EmitPcRelativeLinkerPatches(const ArenaDeque<PatchInfo<Label>>& infos,
- ArenaVector<LinkerPatch>* linker_patches);
+ ArenaVector<linker::LinkerPatch>* linker_patches);
// Labels for each block that will be compiled.
Label* block_labels_; // Indexed by block id.
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 8dd2762a75..7451196677 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -77,6 +77,7 @@
#include "jit/jit_logger.h"
#include "jni/quick/jni_compiler.h"
#include "licm.h"
+#include "linker/linker_patch.h"
#include "load_store_analysis.h"
#include "load_store_elimination.h"
#include "loop_optimization.h"
@@ -833,13 +834,13 @@ void OptimizingCompiler::RunOptimizations(HGraph* graph,
RunArchOptimizations(driver->GetInstructionSet(), graph, codegen, pass_observer);
}
-static ArenaVector<LinkerPatch> EmitAndSortLinkerPatches(CodeGenerator* codegen) {
- ArenaVector<LinkerPatch> linker_patches(codegen->GetGraph()->GetArena()->Adapter());
+static ArenaVector<linker::LinkerPatch> EmitAndSortLinkerPatches(CodeGenerator* codegen) {
+ ArenaVector<linker::LinkerPatch> linker_patches(codegen->GetGraph()->GetArena()->Adapter());
codegen->EmitLinkerPatches(&linker_patches);
// Sort patches by literal offset. Required for .oat_patches encoding.
std::sort(linker_patches.begin(), linker_patches.end(),
- [](const LinkerPatch& lhs, const LinkerPatch& rhs) {
+ [](const linker::LinkerPatch& lhs, const linker::LinkerPatch& rhs) {
return lhs.LiteralOffset() < rhs.LiteralOffset();
});
@@ -851,7 +852,7 @@ CompiledMethod* OptimizingCompiler::Emit(ArenaAllocator* arena,
CodeGenerator* codegen,
CompilerDriver* compiler_driver,
const DexFile::CodeItem* code_item) const {
- ArenaVector<LinkerPatch> linker_patches = EmitAndSortLinkerPatches(codegen);
+ ArenaVector<linker::LinkerPatch> linker_patches = EmitAndSortLinkerPatches(codegen);
ArenaVector<uint8_t> stack_map(arena->Adapter(kArenaAllocStackMaps));
ArenaVector<uint8_t> method_info(arena->Adapter(kArenaAllocStackMaps));
size_t stack_map_size = 0;
@@ -876,7 +877,7 @@ CompiledMethod* OptimizingCompiler::Emit(ArenaAllocator* arena,
ArrayRef<const uint8_t>(method_info),
ArrayRef<const uint8_t>(stack_map),
ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()),
- ArrayRef<const LinkerPatch>(linker_patches));
+ ArrayRef<const linker::LinkerPatch>(linker_patches));
return compiled_method;
}