blob: c583a449243981beda1f77ced11dc54cf4c883ca [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_
19
20#include "code_generator_arm.h"
21#include "utils/arm/assembler_arm_vixl.h"
22
23// TODO(VIXL): make vixl clean wrt -Wshadow.
24#pragma GCC diagnostic push
25#pragma GCC diagnostic ignored "-Wshadow"
26#include "aarch32/constants-aarch32.h"
27#include "aarch32/instructions-aarch32.h"
28#include "aarch32/macro-assembler-aarch32.h"
29#pragma GCC diagnostic pop
30
31// True if VIXL32 should be used for codegen on ARM.
Scott Wakelinga7812ae2016-10-17 10:03:36 +010032#ifdef ART_USE_VIXL_ARM_BACKEND
Scott Wakelingfe885462016-09-22 10:24:38 +010033static constexpr bool kArmUseVIXL32 = true;
34#else
35static constexpr bool kArmUseVIXL32 = false;
36#endif
37
38namespace art {
39namespace arm {
40
Scott Wakelinga7812ae2016-10-17 10:03:36 +010041static const vixl::aarch32::Register kParameterCoreRegistersVIXL[] = {
42 vixl::aarch32::r1,
43 vixl::aarch32::r2,
44 vixl::aarch32::r3
45};
46static const size_t kParameterCoreRegistersLengthVIXL = arraysize(kParameterCoreRegisters);
47static const vixl::aarch32::SRegister kParameterFpuRegistersVIXL[] = {
48 vixl::aarch32::s0,
49 vixl::aarch32::s1,
50 vixl::aarch32::s2,
51 vixl::aarch32::s3,
52 vixl::aarch32::s4,
53 vixl::aarch32::s5,
54 vixl::aarch32::s6,
55 vixl::aarch32::s7,
56 vixl::aarch32::s8,
57 vixl::aarch32::s9,
58 vixl::aarch32::s10,
59 vixl::aarch32::s11,
60 vixl::aarch32::s12,
61 vixl::aarch32::s13,
62 vixl::aarch32::s14,
63 vixl::aarch32::s15
64};
65static const size_t kParameterFpuRegistersLengthVIXL = arraysize(kParameterFpuRegisters);
66
Scott Wakelingfe885462016-09-22 10:24:38 +010067static const vixl::aarch32::Register kMethodRegister = vixl::aarch32::r0;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010068
Scott Wakelingfe885462016-09-22 10:24:38 +010069static const vixl::aarch32::Register kCoreAlwaysSpillRegister = vixl::aarch32::r5;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010070
71// Callee saves core registers r5, r6, r7, r8, r10, r11, and lr.
72static const vixl::aarch32::RegisterList kCoreCalleeSaves = vixl::aarch32::RegisterList::Union(
73 vixl::aarch32::RegisterList(vixl::aarch32::r5,
74 vixl::aarch32::r6,
75 vixl::aarch32::r7,
76 vixl::aarch32::r8),
77 vixl::aarch32::RegisterList(vixl::aarch32::r10,
78 vixl::aarch32::r11,
79 vixl::aarch32::lr));
80
81// Callee saves FP registers s16 to s31 inclusive.
Scott Wakelingfe885462016-09-22 10:24:38 +010082static const vixl::aarch32::SRegisterList kFpuCalleeSaves =
83 vixl::aarch32::SRegisterList(vixl::aarch32::s16, 16);
84
Scott Wakelinga7812ae2016-10-17 10:03:36 +010085static const vixl::aarch32::Register kRuntimeParameterCoreRegistersVIXL[] = {
86 vixl::aarch32::r0,
87 vixl::aarch32::r1,
88 vixl::aarch32::r2,
89 vixl::aarch32::r3
90};
91static const size_t kRuntimeParameterCoreRegistersLengthVIXL =
92 arraysize(kRuntimeParameterCoreRegisters);
93static const vixl::aarch32::SRegister kRuntimeParameterFpuRegistersVIXL[] = {
94 vixl::aarch32::s0,
95 vixl::aarch32::s1,
96 vixl::aarch32::s2,
97 vixl::aarch32::s3
98};
99static const size_t kRuntimeParameterFpuRegistersLengthVIXL =
100 arraysize(kRuntimeParameterFpuRegisters);
101
102class LoadClassSlowPathARMVIXL;
103
Scott Wakelingfe885462016-09-22 10:24:38 +0100104#define FOR_EACH_IMPLEMENTED_INSTRUCTION(M) \
105 M(Above) \
106 M(AboveOrEqual) \
107 M(Add) \
Artem Serov02109dd2016-09-23 17:17:54 +0100108 M(And) \
Scott Wakelingc34dba72016-10-03 10:14:44 +0100109 M(ArrayGet) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100110 M(ArrayLength) \
Scott Wakelingc34dba72016-10-03 10:14:44 +0100111 M(ArraySet) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100112 M(Below) \
113 M(BelowOrEqual) \
Scott Wakelingc34dba72016-10-03 10:14:44 +0100114 M(BooleanNot) \
115 M(BoundsCheck) \
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100116 M(CheckCast) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100117 M(ClearException) \
118 M(ClinitCheck) \
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100119 M(Compare) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100120 M(CurrentMethod) \
Scott Wakelingc34dba72016-10-03 10:14:44 +0100121 M(Deoptimize) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100122 M(Div) \
123 M(DivZeroCheck) \
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +0100124 M(DoubleConstant) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100125 M(Equal) \
126 M(Exit) \
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +0100127 M(FloatConstant) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100128 M(Goto) \
129 M(GreaterThan) \
130 M(GreaterThanOrEqual) \
131 M(If) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100132 M(InstanceFieldGet) \
133 M(InstanceFieldSet) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100134 M(IntConstant) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100135 M(InvokeStaticOrDirect) \
136 M(InvokeVirtual) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100137 M(LessThan) \
138 M(LessThanOrEqual) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100139 M(LoadClass) \
140 M(LoadException) \
141 M(LoadString) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100142 M(LongConstant) \
143 M(MemoryBarrier) \
144 M(Mul) \
Artem Serov02109dd2016-09-23 17:17:54 +0100145 M(Neg) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100146 M(NewArray) \
147 M(NewInstance) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100148 M(Not) \
149 M(NotEqual) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100150 M(NullCheck) \
151 M(NullConstant) \
Artem Serov02109dd2016-09-23 17:17:54 +0100152 M(Or) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100153 M(ParallelMove) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100154 M(ParameterValue) \
155 M(Phi) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100156 M(Return) \
157 M(ReturnVoid) \
Artem Serov02109dd2016-09-23 17:17:54 +0100158 M(Ror) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100159 M(Select) \
Artem Serov02109dd2016-09-23 17:17:54 +0100160 M(Shl) \
161 M(Shr) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100162 M(StaticFieldGet) \
Scott Wakelingc34dba72016-10-03 10:14:44 +0100163 M(StaticFieldSet) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100164 M(Sub) \
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100165 M(SuspendCheck) \
166 M(Throw) \
167 M(TryBoundary) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100168 M(TypeConversion) \
Artem Serov02109dd2016-09-23 17:17:54 +0100169 M(UShr) \
170 M(Xor) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100171
172// TODO: Remove once the VIXL32 backend is implemented completely.
173#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100174 M(BoundType) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100175 M(ClassTableGet) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100176 M(InstanceOf) \
177 M(InvokeInterface) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100178 M(InvokeUnresolved) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100179 M(MonitorOperation) \
180 M(NativeDebugInfo) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100181 M(PackedSwitch) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100182 M(Rem) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100183 M(UnresolvedInstanceFieldGet) \
184 M(UnresolvedInstanceFieldSet) \
185 M(UnresolvedStaticFieldGet) \
186 M(UnresolvedStaticFieldSet) \
Scott Wakelingfe885462016-09-22 10:24:38 +0100187
188class CodeGeneratorARMVIXL;
189
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100190class InvokeRuntimeCallingConventionARMVIXL
191 : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> {
192 public:
193 InvokeRuntimeCallingConventionARMVIXL()
194 : CallingConvention(kRuntimeParameterCoreRegistersVIXL,
195 kRuntimeParameterCoreRegistersLengthVIXL,
196 kRuntimeParameterFpuRegistersVIXL,
197 kRuntimeParameterFpuRegistersLengthVIXL,
198 kArmPointerSize) {}
199
200 private:
201 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConventionARMVIXL);
202};
203
204class InvokeDexCallingConventionARMVIXL
205 : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> {
206 public:
207 InvokeDexCallingConventionARMVIXL()
208 : CallingConvention(kParameterCoreRegistersVIXL,
209 kParameterCoreRegistersLengthVIXL,
210 kParameterFpuRegistersVIXL,
211 kParameterFpuRegistersLengthVIXL,
212 kArmPointerSize) {}
213
214 private:
215 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionARMVIXL);
216};
217
Scott Wakelingfe885462016-09-22 10:24:38 +0100218class SlowPathCodeARMVIXL : public SlowPathCode {
219 public:
220 explicit SlowPathCodeARMVIXL(HInstruction* instruction)
221 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
222
223 vixl::aarch32::Label* GetEntryLabel() { return &entry_label_; }
224 vixl::aarch32::Label* GetExitLabel() { return &exit_label_; }
225
226 void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE;
227 void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE;
228
229 private:
230 vixl::aarch32::Label entry_label_;
231 vixl::aarch32::Label exit_label_;
232
233 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARMVIXL);
234};
235
236class ParallelMoveResolverARMVIXL : public ParallelMoveResolverWithSwap {
237 public:
238 ParallelMoveResolverARMVIXL(ArenaAllocator* allocator, CodeGeneratorARMVIXL* codegen)
239 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
240
241 void EmitMove(size_t index) OVERRIDE;
242 void EmitSwap(size_t index) OVERRIDE;
243 void SpillScratch(int reg) OVERRIDE;
244 void RestoreScratch(int reg) OVERRIDE;
245
246 ArmVIXLAssembler* GetAssembler() const;
247
248 private:
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100249 void Exchange(vixl32::Register reg, int mem);
Scott Wakelingfe885462016-09-22 10:24:38 +0100250 void Exchange(int mem1, int mem2);
251
252 CodeGeneratorARMVIXL* const codegen_;
253
254 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverARMVIXL);
255};
256
257#define DEFINE_IMPLEMENTED_INSTRUCTION_VISITOR(Name) \
258 void Visit##Name(H##Name*) OVERRIDE;
259
260#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITOR(Name) \
261 void Visit##Name(H##Name* instr) OVERRIDE { \
262 VisitUnimplemementedInstruction(instr); }
263
264class LocationsBuilderARMVIXL : public HGraphVisitor {
265 public:
266 LocationsBuilderARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen)
267 : HGraphVisitor(graph), codegen_(codegen) {}
268
269 FOR_EACH_IMPLEMENTED_INSTRUCTION(DEFINE_IMPLEMENTED_INSTRUCTION_VISITOR)
270
271 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITOR)
272
273 private:
274 void VisitUnimplemementedInstruction(HInstruction* instruction) {
275 LOG(FATAL) << "Unimplemented Instruction: " << instruction->DebugName();
276 }
277
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100278 void HandleInvoke(HInvoke* invoke);
Artem Serov02109dd2016-09-23 17:17:54 +0100279 void HandleBitwiseOperation(HBinaryOperation* operation, Opcode opcode);
Scott Wakelingfe885462016-09-22 10:24:38 +0100280 void HandleCondition(HCondition* condition);
Artem Serov02109dd2016-09-23 17:17:54 +0100281 void HandleIntegerRotate(LocationSummary* locations);
282 void HandleLongRotate(LocationSummary* locations);
283 void HandleShift(HBinaryOperation* operation);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100284 void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info);
285 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Scott Wakelingfe885462016-09-22 10:24:38 +0100286
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100287 Location ArithmeticZeroOrFpuRegister(HInstruction* input);
Artem Serov02109dd2016-09-23 17:17:54 +0100288 Location ArmEncodableConstantOrRegister(HInstruction* constant, Opcode opcode);
289 bool CanEncodeConstantAsImmediate(HConstant* input_cst, Opcode opcode);
290 bool CanEncodeConstantAsImmediate(uint32_t value, Opcode opcode, SetCc set_cc = kCcDontCare);
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100291
Scott Wakelingfe885462016-09-22 10:24:38 +0100292 CodeGeneratorARMVIXL* const codegen_;
293 InvokeDexCallingConventionVisitorARM parameter_visitor_;
294
295 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARMVIXL);
296};
297
298class InstructionCodeGeneratorARMVIXL : public InstructionCodeGenerator {
299 public:
300 InstructionCodeGeneratorARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen);
301
302 FOR_EACH_IMPLEMENTED_INSTRUCTION(DEFINE_IMPLEMENTED_INSTRUCTION_VISITOR)
303
304 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITOR)
305
306 ArmVIXLAssembler* GetAssembler() const { return assembler_; }
307 vixl::aarch32::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
308
309 private:
310 void VisitUnimplemementedInstruction(HInstruction* instruction) {
311 LOG(FATAL) << "Unimplemented Instruction: " << instruction->DebugName();
312 }
313
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100314 // Generate code for the given suspend check. If not null, `successor`
315 // is the block to branch to if the suspend check is not needed, and after
316 // the suspend call.
Scott Wakelingfe885462016-09-22 10:24:38 +0100317 void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100318 void GenerateClassInitializationCheck(LoadClassSlowPathARMVIXL* slow_path,
319 vixl32::Register class_reg);
Scott Wakelingfe885462016-09-22 10:24:38 +0100320 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Artem Serov02109dd2016-09-23 17:17:54 +0100321 void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
322 void GenerateOrrConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
323 void GenerateEorConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
324 void HandleBitwiseOperation(HBinaryOperation* operation);
Scott Wakelingfe885462016-09-22 10:24:38 +0100325 void HandleCondition(HCondition* condition);
Artem Serov02109dd2016-09-23 17:17:54 +0100326 void HandleIntegerRotate(HRor* ror);
327 void HandleLongRotate(HRor* ror);
328 void HandleShift(HBinaryOperation* operation);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100329
330 void GenerateWideAtomicStore(vixl::aarch32::Register addr,
331 uint32_t offset,
332 vixl::aarch32::Register value_lo,
333 vixl::aarch32::Register value_hi,
334 vixl::aarch32::Register temp1,
335 vixl::aarch32::Register temp2,
336 HInstruction* instruction);
337 void GenerateWideAtomicLoad(vixl::aarch32::Register addr,
338 uint32_t offset,
339 vixl::aarch32::Register out_lo,
340 vixl::aarch32::Register out_hi);
341
342 void HandleFieldSet(HInstruction* instruction,
343 const FieldInfo& field_info,
344 bool value_can_be_null);
345 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
346
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100347 // Generate a heap reference load using two different registers
348 // `out` and `obj`:
349 //
350 // out <- *(obj + offset)
351 //
352 // while honoring heap poisoning and/or read barriers (if any).
353 //
354 // Location `maybe_temp` is used when generating a Baker's (fast
355 // path) read barrier and shall be a register in that case; it may
356 // be an invalid location otherwise.
357 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
358 Location out,
359 Location obj,
360 uint32_t offset,
361 Location maybe_temp);
362
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100363 // Generate a GC root reference load:
364 //
365 // root <- *(obj + offset)
366 //
367 // while honoring read barriers if `requires_read_barrier` is true.
368 void GenerateGcRootFieldLoad(HInstruction* instruction,
369 Location root,
370 vixl::aarch32::Register obj,
371 uint32_t offset,
Roland Levillain00468f32016-10-27 18:02:48 +0100372 bool requires_read_barrier);
Scott Wakelingfe885462016-09-22 10:24:38 +0100373 void GenerateTestAndBranch(HInstruction* instruction,
374 size_t condition_input_index,
375 vixl::aarch32::Label* true_target,
376 vixl::aarch32::Label* false_target);
377 void GenerateCompareTestAndBranch(HCondition* condition,
378 vixl::aarch32::Label* true_target,
379 vixl::aarch32::Label* false_target);
380 void GenerateVcmp(HInstruction* instruction);
381 void GenerateFPJumps(HCondition* cond,
382 vixl::aarch32::Label* true_label,
383 vixl::aarch32::Label* false_label);
384 void GenerateLongComparesAndJumps(HCondition* cond,
385 vixl::aarch32::Label* true_label,
386 vixl::aarch32::Label* false_label);
387 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
388 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
389 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
390 void GenerateDivRemConstantIntegral(HBinaryOperation* instruction);
391
392 ArmVIXLAssembler* const assembler_;
393 CodeGeneratorARMVIXL* const codegen_;
394
395 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARMVIXL);
396};
397
398class CodeGeneratorARMVIXL : public CodeGenerator {
399 public:
400 CodeGeneratorARMVIXL(HGraph* graph,
401 const ArmInstructionSetFeatures& isa_features,
402 const CompilerOptions& compiler_options,
403 OptimizingCompilerStats* stats = nullptr);
404
405 virtual ~CodeGeneratorARMVIXL() {}
406
407 void Initialize() OVERRIDE {
408 block_labels_.resize(GetGraph()->GetBlocks().size());
409 }
410
411 void GenerateFrameEntry() OVERRIDE;
412 void GenerateFrameExit() OVERRIDE;
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100413
Scott Wakelingfe885462016-09-22 10:24:38 +0100414 void Bind(HBasicBlock* block) OVERRIDE;
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100415
416 vixl::aarch32::Label* GetLabelOf(HBasicBlock* block) {
417 block = FirstNonEmptyBlock(block);
418 return &(block_labels_[block->GetBlockId()]);
419 }
420
Scott Wakelingfe885462016-09-22 10:24:38 +0100421 void MoveConstant(Location destination, int32_t value) OVERRIDE;
422 void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE;
423 void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
424
425 ArmVIXLAssembler* GetAssembler() OVERRIDE { return &assembler_; }
426
427 const ArmVIXLAssembler& GetAssembler() const OVERRIDE { return assembler_; }
428
429 vixl::aarch32::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
430
431 size_t GetWordSize() const OVERRIDE { return kArmWordSize; }
432
433 size_t GetFloatingPointSpillSlotSize() const OVERRIDE { return vixl::aarch32::kRegSizeInBytes; }
434
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100435 uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
436 vixl::aarch32::Label* block_entry_label = GetLabelOf(block);
437 DCHECK(block_entry_label->IsBound());
438 return block_entry_label->GetLocation();
439 }
440
Scott Wakelingfe885462016-09-22 10:24:38 +0100441 HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
442
443 HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
444
Scott Wakelingfe885462016-09-22 10:24:38 +0100445 void GenerateMemoryBarrier(MemBarrierKind kind);
446 void Finalize(CodeAllocator* allocator) OVERRIDE;
447 void SetupBlockedRegisters() const OVERRIDE;
448
Scott Wakelingfe885462016-09-22 10:24:38 +0100449 void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
450 void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
451
452 InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kThumb2; }
453
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100454 // Helper method to move a 32-bit value between two locations.
455 void Move32(Location destination, Location source);
456
Scott Wakelingc34dba72016-10-03 10:14:44 +0100457 void LoadFromShiftedRegOffset(Primitive::Type type,
458 Location out_loc,
459 vixl::aarch32::Register base,
460 vixl::aarch32::Register reg_index,
461 vixl::aarch32::Condition cond = vixl::aarch32::al);
462 void StoreToShiftedRegOffset(Primitive::Type type,
463 Location out_loc,
464 vixl::aarch32::Register base,
465 vixl::aarch32::Register reg_index,
466 vixl::aarch32::Condition cond = vixl::aarch32::al);
467
Scott Wakelingfe885462016-09-22 10:24:38 +0100468 const ArmInstructionSetFeatures& GetInstructionSetFeatures() const { return isa_features_; }
469
470 vixl::aarch32::Label* GetFrameEntryLabel() { return &frame_entry_label_; }
471
472 // Saves the register in the stack. Returns the size taken on stack.
473 size_t SaveCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
474 uint32_t reg_id ATTRIBUTE_UNUSED) OVERRIDE {
475 UNIMPLEMENTED(INFO) << "TODO: SaveCoreRegister";
476 return 0;
477 }
478
479 // Restores the register from the stack. Returns the size taken on stack.
480 size_t RestoreCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
481 uint32_t reg_id ATTRIBUTE_UNUSED) OVERRIDE {
482 UNIMPLEMENTED(INFO) << "TODO: RestoreCoreRegister";
483 return 0;
484 }
485
486 size_t SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
487 uint32_t reg_id ATTRIBUTE_UNUSED) OVERRIDE {
488 UNIMPLEMENTED(INFO) << "TODO: SaveFloatingPointRegister";
489 return 0;
490 }
491
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100492 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
Scott Wakelingfe885462016-09-22 10:24:38 +0100493
494 bool NeedsTwoRegisters(Primitive::Type type) const OVERRIDE {
495 return type == Primitive::kPrimDouble || type == Primitive::kPrimLong;
496 }
497
498 void ComputeSpillMask() OVERRIDE;
499
500 void GenerateImplicitNullCheck(HNullCheck* null_check) OVERRIDE;
501 void GenerateExplicitNullCheck(HNullCheck* null_check) OVERRIDE;
502
503 ParallelMoveResolver* GetMoveResolver() OVERRIDE {
504 return &move_resolver_;
505 }
506
507 // Generate code to invoke a runtime entry point.
508 void InvokeRuntime(QuickEntrypointEnum entrypoint,
509 HInstruction* instruction,
510 uint32_t dex_pc,
511 SlowPathCode* slow_path = nullptr) OVERRIDE;
512
513 // Generate code to invoke a runtime entry point, but do not record
514 // PC-related information in a stack map.
515 void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
516 HInstruction* instruction,
517 SlowPathCode* slow_path);
518
519 void GenerateInvokeRuntime(int32_t entry_point_offset);
520
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100521 // Emit a write barrier.
522 void MarkGCCard(vixl::aarch32::Register temp,
523 vixl::aarch32::Register card,
524 vixl::aarch32::Register object,
525 vixl::aarch32::Register value,
526 bool can_be_null);
527
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100528 // Fast path implementation of ReadBarrier::Barrier for a heap
529 // reference field load when Baker's read barriers are used.
530 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
531 Location ref,
532 vixl::aarch32::Register obj,
533 uint32_t offset,
534 Location temp,
535 bool needs_null_check);
536
537 // Factored implementation, used by GenerateFieldLoadWithBakerReadBarrier,
538 // GenerateArrayLoadWithBakerReadBarrier and some intrinsics.
539 //
540 // Load the object reference located at the address
541 // `obj + offset + (index << scale_factor)`, held by object `obj`, into
542 // `ref`, and mark it if needed.
543 //
544 // If `always_update_field` is true, the value of the reference is
545 // atomically updated in the holder (`obj`). This operation
546 // requires an extra temporary register, which must be provided as a
547 // non-null pointer (`temp2`).
548 void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
549 Location ref,
550 vixl::aarch32::Register obj,
551 uint32_t offset,
552 Location index,
553 ScaleFactor scale_factor,
554 Location temp,
555 bool needs_null_check,
556 bool always_update_field = false,
557 vixl::aarch32::Register* temp2 = nullptr);
558
559 // Generate a read barrier for a heap reference within `instruction`
560 // using a slow path.
561 //
562 // A read barrier for an object reference read from the heap is
563 // implemented as a call to the artReadBarrierSlow runtime entry
564 // point, which is passed the values in locations `ref`, `obj`, and
565 // `offset`:
566 //
567 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
568 // mirror::Object* obj,
569 // uint32_t offset);
570 //
571 // The `out` location contains the value returned by
572 // artReadBarrierSlow.
573 //
574 // When `index` is provided (i.e. for array accesses), the offset
575 // value passed to artReadBarrierSlow is adjusted to take `index`
576 // into account.
577 void GenerateReadBarrierSlow(HInstruction* instruction,
578 Location out,
579 Location ref,
580 Location obj,
581 uint32_t offset,
582 Location index = Location::NoLocation());
583
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100584 // If read barriers are enabled, generate a read barrier for a heap
585 // reference using a slow path. If heap poisoning is enabled, also
586 // unpoison the reference in `out`.
587 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
588 Location out,
589 Location ref,
590 Location obj,
591 uint32_t offset,
592 Location index = Location::NoLocation());
593
Scott Wakelingfe885462016-09-22 10:24:38 +0100594 // Check if the desired_string_load_kind is supported. If it is, return it,
595 // otherwise return a fall-back kind that should be used instead.
596 HLoadString::LoadKind GetSupportedLoadStringKind(
597 HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
598
599 // Check if the desired_class_load_kind is supported. If it is, return it,
600 // otherwise return a fall-back kind that should be used instead.
601 HLoadClass::LoadKind GetSupportedLoadClassKind(
602 HLoadClass::LoadKind desired_class_load_kind) OVERRIDE;
603
604 // Check if the desired_dispatch_info is supported. If it is, return it,
605 // otherwise return a fall-back info that should be used instead.
606 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
607 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100608 HInvokeStaticOrDirect* invoke) OVERRIDE;
Scott Wakelingfe885462016-09-22 10:24:38 +0100609
610 void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
611 void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
612
613 void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
614
615 void GenerateNop() OVERRIDE;
616
Scott Wakelingfe885462016-09-22 10:24:38 +0100617 private:
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100618 vixl::aarch32::Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
619 vixl::aarch32::Register temp);
620
Scott Wakelingfe885462016-09-22 10:24:38 +0100621 // Labels for each block that will be compiled.
622 // We use a deque so that the `vixl::aarch32::Label` objects do not move in memory.
623 ArenaDeque<vixl::aarch32::Label> block_labels_; // Indexed by block id.
624 vixl::aarch32::Label frame_entry_label_;
625
626 LocationsBuilderARMVIXL location_builder_;
627 InstructionCodeGeneratorARMVIXL instruction_visitor_;
628 ParallelMoveResolverARMVIXL move_resolver_;
629
630 ArmVIXLAssembler assembler_;
631 const ArmInstructionSetFeatures& isa_features_;
632
633 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARMVIXL);
634};
635
636#undef FOR_EACH_IMPLEMENTED_INSTRUCTION
637#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
638#undef DEFINE_IMPLEMENTED_INSTRUCTION_VISITOR
639#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITOR
640
641
642} // namespace arm
643} // namespace art
644
645#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_