blob: a4003ffea53bb30834fe2d6163df3db81147e391 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
19
20#include "code_generator.h"
21#include "nodes.h"
22#include "parallel_move_resolver.h"
23#include "utils/arm64/assembler_arm64.h"
24#include "a64/disasm-a64.h"
25#include "a64/macro-assembler-a64.h"
26#include "arch/arm64/quick_method_frame_info_arm64.h"
27
28namespace art {
29namespace arm64 {
30
31class CodeGeneratorARM64;
32
33static constexpr size_t kArm64WordSize = 8;
34static const vixl::Register kParameterCoreRegisters[] = {
35 vixl::x1, vixl::x2, vixl::x3, vixl::x4, vixl::x5, vixl::x6, vixl::x7
36};
37static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
38static const vixl::FPRegister kParameterFPRegisters[] = {
39 vixl::d0, vixl::d1, vixl::d2, vixl::d3, vixl::d4, vixl::d5, vixl::d6, vixl::d7
40};
41static constexpr size_t kParameterFPRegistersLength = arraysize(kParameterFPRegisters);
42
43const vixl::Register tr = vixl::x18; // Thread Register
44const vixl::Register wSuspend = vixl::w19; // Suspend Register
45const vixl::Register xSuspend = vixl::x19;
46
47const vixl::CPURegList vixl_reserved_core_registers(vixl::ip0, vixl::ip1);
48const vixl::CPURegList runtime_reserved_core_registers(tr, xSuspend, vixl::lr);
49const vixl::CPURegList quick_callee_saved_registers(vixl::CPURegister::kRegister,
50 vixl::kXRegSize,
51 kArm64CalleeSaveRefSpills);
52
53class InvokeDexCallingConvention : public CallingConvention<vixl::Register, vixl::FPRegister> {
54 public:
55 InvokeDexCallingConvention()
56 : CallingConvention(kParameterCoreRegisters,
57 kParameterCoreRegistersLength,
58 kParameterFPRegisters,
59 kParameterFPRegistersLength) {}
60
61 Location GetReturnLocation(Primitive::Type return_type) {
62 DCHECK_NE(return_type, Primitive::kPrimVoid);
63 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
64 LOG(FATAL) << "Unimplemented return type " << return_type;
65 }
66 return Location::RegisterLocation(X0);
67 }
68
69
70 private:
71 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
72};
73
74class InvokeDexCallingConventionVisitor {
75 public:
76 InvokeDexCallingConventionVisitor() : gp_index_(0), stack_index_(0) {}
77
78 Location GetNextLocation(Primitive::Type type);
79 Location GetReturnLocation(Primitive::Type return_type) {
80 return calling_convention.GetReturnLocation(return_type);
81 }
82
83 private:
84 InvokeDexCallingConvention calling_convention;
85 // The current index for core registers.
86 uint32_t gp_index_;
87 // The current stack index.
88 uint32_t stack_index_;
89
90 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitor);
91};
92
93class InstructionCodeGeneratorARM64 : public HGraphVisitor {
94 public:
95 InstructionCodeGeneratorARM64(HGraph* graph, CodeGeneratorARM64* codegen);
96
97#define DECLARE_VISIT_INSTRUCTION(name, super) \
98 virtual void Visit##name(H##name* instr);
99 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
100#undef DECLARE_VISIT_INSTRUCTION
101
102 void LoadCurrentMethod(XRegister reg);
103
104 Arm64Assembler* GetAssembler() const { return assembler_; }
105
106 private:
107 void HandleAddSub(HBinaryOperation* instr);
108
109 Arm64Assembler* const assembler_;
110 CodeGeneratorARM64* const codegen_;
111
112 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARM64);
113};
114
115class LocationsBuilderARM64 : public HGraphVisitor {
116 public:
117 explicit LocationsBuilderARM64(HGraph* graph, CodeGeneratorARM64* codegen)
118 : HGraphVisitor(graph), codegen_(codegen) {}
119
120#define DECLARE_VISIT_INSTRUCTION(name, super) \
121 virtual void Visit##name(H##name* instr);
122 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
123#undef DECLARE_VISIT_INSTRUCTION
124
125 private:
126 void HandleAddSub(HBinaryOperation* instr);
127 void HandleInvoke(HInvoke* instr);
128
129 CodeGeneratorARM64* const codegen_;
130 InvokeDexCallingConventionVisitor parameter_visitor_;
131
132 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARM64);
133};
134
135class CodeGeneratorARM64 : public CodeGenerator {
136 public:
137 explicit CodeGeneratorARM64(HGraph* graph);
138 virtual ~CodeGeneratorARM64() { }
139
140 virtual void GenerateFrameEntry() OVERRIDE;
141 virtual void GenerateFrameExit() OVERRIDE;
142
143 static const vixl::CPURegList& GetFramePreservedRegisters() {
144 static const vixl::CPURegList frame_preserved_regs =
145 vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize, vixl::lr.Bit());
146 return frame_preserved_regs;
147 }
148 static int GetFramePreservedRegistersSize() {
149 return GetFramePreservedRegisters().TotalSizeInBytes();
150 }
151
152 virtual void Bind(HBasicBlock* block) OVERRIDE;
153
154 vixl::Label* GetLabelOf(HBasicBlock* block) const {
155 return block_labels_ + block->GetBlockId();
156 }
157
158 virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE;
159
160 virtual size_t GetWordSize() const OVERRIDE {
161 return kArm64WordSize;
162 }
163
164 virtual size_t FrameEntrySpillSize() const OVERRIDE;
165
166 virtual HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
167 virtual HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
168 virtual Arm64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
169
170 // Emit a write barrier.
171 void MarkGCCard(vixl::Register object, vixl::Register value);
172
173 // Register allocation.
174
175 virtual void SetupBlockedRegisters() const OVERRIDE;
176 // AllocateFreeRegister() is only used when allocating registers locally
177 // during CompileBaseline().
178 virtual Location AllocateFreeRegister(Primitive::Type type) const OVERRIDE;
179
180 virtual Location GetStackLocation(HLoadLocal* load) const OVERRIDE;
181
182 virtual size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE {
183 UNIMPLEMENTED(INFO) << "TODO: SaveCoreRegister";
184 return 0;
185 }
186
187 virtual size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE {
188 UNIMPLEMENTED(INFO) << "TODO: RestoreCoreRegister";
189 return 0;
190 }
191
192 // The number of registers that can be allocated. The register allocator may
193 // decide to reserve and not use a few of them.
194 // We do not consider registers sp, xzr, wzr. They are either not allocatable
195 // (xzr, wzr), or make for poor allocatable registers (sp alignment
196 // requirements, etc.). This also facilitates our task as all other registers
197 // can easily be mapped via to or from their type and index or code.
198 static const int kNumberOfAllocatableCoreRegisters = vixl::kNumberOfRegisters - 1;
199 static const int kNumberOfAllocatableFloatingPointRegisters = vixl::kNumberOfFPRegisters;
200 static const int kNumberOfAllocatableRegisters =
201 kNumberOfAllocatableCoreRegisters + kNumberOfAllocatableFloatingPointRegisters;
202 static constexpr int kNumberOfAllocatableRegisterPairs = 0;
203
204 virtual void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
205 virtual void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
206
207 virtual InstructionSet GetInstructionSet() const OVERRIDE {
208 return InstructionSet::kArm64;
209 }
210
211 void MoveHelper(Location destination, Location source, Primitive::Type type);
212
213 virtual void Initialize() OVERRIDE {
214 HGraph* graph = GetGraph();
215 int length = graph->GetBlocks().Size();
216 block_labels_ = graph->GetArena()->AllocArray<vixl::Label>(length);
217 for (int i = 0; i < length; ++i) {
218 new(block_labels_ + i) vixl::Label();
219 }
220 }
221
222 private:
223 // Labels for each block that will be compiled.
224 vixl::Label* block_labels_;
225
226 LocationsBuilderARM64 location_builder_;
227 InstructionCodeGeneratorARM64 instruction_visitor_;
228 Arm64Assembler assembler_;
229
230 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM64);
231};
232
233} // namespace arm64
234} // namespace art
235
236#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_