blob: c94cc93dad0c6829f9825d772fcf601585c3b8d1 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
19
20#include "code_generator.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070021#include "driver/compiler_options.h"
22#include "nodes.h"
23#include "parallel_move_resolver.h"
Mathieu Chartierdbddc222017-05-24 12:04:13 -070024#include "type_reference.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070025#include "utils/mips64/assembler_mips64.h"
26
27namespace art {
28namespace mips64 {
29
Alexey Frunze4dda3372015-06-01 18:31:49 -070030// InvokeDexCallingConvention registers
31
32static constexpr GpuRegister kParameterCoreRegisters[] =
33 { A1, A2, A3, A4, A5, A6, A7 };
34static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
35
36static constexpr FpuRegister kParameterFpuRegisters[] =
37 { F13, F14, F15, F16, F17, F18, F19 };
38static constexpr size_t kParameterFpuRegistersLength = arraysize(kParameterFpuRegisters);
39
40
41// InvokeRuntimeCallingConvention registers
42
43static constexpr GpuRegister kRuntimeParameterCoreRegisters[] =
44 { A0, A1, A2, A3, A4, A5, A6, A7 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46 arraysize(kRuntimeParameterCoreRegisters);
47
48static constexpr FpuRegister kRuntimeParameterFpuRegisters[] =
49 { F12, F13, F14, F15, F16, F17, F18, F19 };
50static constexpr size_t kRuntimeParameterFpuRegistersLength =
51 arraysize(kRuntimeParameterFpuRegisters);
52
53
54static constexpr GpuRegister kCoreCalleeSaves[] =
Alexey Frunze627c1a02017-01-30 19:28:14 -080055 { S0, S1, S2, S3, S4, S5, S6, S7, GP, S8, RA };
Alexey Frunze4dda3372015-06-01 18:31:49 -070056static constexpr FpuRegister kFpuCalleeSaves[] =
57 { F24, F25, F26, F27, F28, F29, F30, F31 };
58
59
60class CodeGeneratorMIPS64;
61
Lena Djokicca8c2952017-05-29 11:31:46 +020062VectorRegister VectorRegisterFrom(Location location);
63
Alexey Frunze4dda3372015-06-01 18:31:49 -070064class InvokeDexCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
65 public:
66 InvokeDexCallingConvention()
67 : CallingConvention(kParameterCoreRegisters,
68 kParameterCoreRegistersLength,
69 kParameterFpuRegisters,
70 kParameterFpuRegistersLength,
71 kMips64PointerSize) {}
72
73 private:
74 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
75};
76
77class InvokeDexCallingConventionVisitorMIPS64 : public InvokeDexCallingConventionVisitor {
78 public:
79 InvokeDexCallingConventionVisitorMIPS64() {}
80 virtual ~InvokeDexCallingConventionVisitorMIPS64() {}
81
82 Location GetNextLocation(Primitive::Type type) OVERRIDE;
83 Location GetReturnLocation(Primitive::Type type) const OVERRIDE;
84 Location GetMethodLocation() const OVERRIDE;
85
86 private:
87 InvokeDexCallingConvention calling_convention;
88
89 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorMIPS64);
90};
91
92class InvokeRuntimeCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
93 public:
94 InvokeRuntimeCallingConvention()
95 : CallingConvention(kRuntimeParameterCoreRegisters,
96 kRuntimeParameterCoreRegistersLength,
97 kRuntimeParameterFpuRegisters,
98 kRuntimeParameterFpuRegistersLength,
99 kMips64PointerSize) {}
100
101 Location GetReturnLocation(Primitive::Type return_type);
102
103 private:
104 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
105};
106
Calin Juravlee460d1d2015-09-29 04:52:17 +0100107class FieldAccessCallingConventionMIPS64 : public FieldAccessCallingConvention {
108 public:
109 FieldAccessCallingConventionMIPS64() {}
110
111 Location GetObjectLocation() const OVERRIDE {
112 return Location::RegisterLocation(A1);
113 }
114 Location GetFieldIndexLocation() const OVERRIDE {
115 return Location::RegisterLocation(A0);
116 }
117 Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
Goran Jakovljevic8c34ec12015-10-14 11:23:48 +0200118 return Location::RegisterLocation(V0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100119 }
Alexey Frunze0cb12422017-01-25 19:30:18 -0800120 Location GetSetValueLocation(Primitive::Type type ATTRIBUTE_UNUSED,
121 bool is_instance) const OVERRIDE {
122 return is_instance
Alexey Frunze00580bd2015-11-11 13:31:12 -0800123 ? Location::RegisterLocation(A2)
Alexey Frunze0cb12422017-01-25 19:30:18 -0800124 : Location::RegisterLocation(A1);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100125 }
126 Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
127 return Location::FpuRegisterLocation(F0);
128 }
129
130 private:
131 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionMIPS64);
132};
133
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134class ParallelMoveResolverMIPS64 : public ParallelMoveResolverWithSwap {
135 public:
136 ParallelMoveResolverMIPS64(ArenaAllocator* allocator, CodeGeneratorMIPS64* codegen)
137 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
138
139 void EmitMove(size_t index) OVERRIDE;
140 void EmitSwap(size_t index) OVERRIDE;
141 void SpillScratch(int reg) OVERRIDE;
142 void RestoreScratch(int reg) OVERRIDE;
143
144 void Exchange(int index1, int index2, bool double_slot);
145
146 Mips64Assembler* GetAssembler() const;
147
148 private:
149 CodeGeneratorMIPS64* const codegen_;
150
151 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverMIPS64);
152};
153
154class SlowPathCodeMIPS64 : public SlowPathCode {
155 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000156 explicit SlowPathCodeMIPS64(HInstruction* instruction)
157 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700158
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700159 Mips64Label* GetEntryLabel() { return &entry_label_; }
160 Mips64Label* GetExitLabel() { return &exit_label_; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161
162 private:
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700163 Mips64Label entry_label_;
164 Mips64Label exit_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700165
166 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeMIPS64);
167};
168
169class LocationsBuilderMIPS64 : public HGraphVisitor {
170 public:
171 LocationsBuilderMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen)
172 : HGraphVisitor(graph), codegen_(codegen) {}
173
174#define DECLARE_VISIT_INSTRUCTION(name, super) \
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100175 void Visit##name(H##name* instr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700176
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100177 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
178 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700179
180#undef DECLARE_VISIT_INSTRUCTION
181
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100182 void VisitInstruction(HInstruction* instruction) OVERRIDE {
183 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
184 << " (id " << instruction->GetId() << ")";
185 }
186
Alexey Frunze4dda3372015-06-01 18:31:49 -0700187 private:
188 void HandleInvoke(HInvoke* invoke);
189 void HandleBinaryOp(HBinaryOperation* operation);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000190 void HandleCondition(HCondition* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700191 void HandleShift(HBinaryOperation* operation);
192 void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info);
193 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +0100194 Location RegisterOrZeroConstant(HInstruction* instruction);
195 Location FpuRegisterOrConstantForStore(HInstruction* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700196
197 InvokeDexCallingConventionVisitorMIPS64 parameter_visitor_;
198
199 CodeGeneratorMIPS64* const codegen_;
200
201 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderMIPS64);
202};
203
Aart Bik42249c32016-01-07 15:33:50 -0800204class InstructionCodeGeneratorMIPS64 : public InstructionCodeGenerator {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700205 public:
206 InstructionCodeGeneratorMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen);
207
208#define DECLARE_VISIT_INSTRUCTION(name, super) \
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100209 void Visit##name(H##name* instr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700210
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100211 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
212 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700213
214#undef DECLARE_VISIT_INSTRUCTION
215
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100216 void VisitInstruction(HInstruction* instruction) OVERRIDE {
217 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
218 << " (id " << instruction->GetId() << ")";
219 }
220
Alexey Frunze4dda3372015-06-01 18:31:49 -0700221 Mips64Assembler* GetAssembler() const { return assembler_; }
222
Alexey Frunze0960ac52016-12-20 17:24:59 -0800223 // Compare-and-jump packed switch generates approx. 3 + 2.5 * N 32-bit
224 // instructions for N cases.
225 // Table-based packed switch generates approx. 11 32-bit instructions
226 // and N 32-bit data words for N cases.
227 // At N = 6 they come out as 18 and 17 32-bit words respectively.
228 // We switch to the table-based method starting with 7 cases.
229 static constexpr uint32_t kPackedSwitchJumpTableThreshold = 6;
230
Chris Larsen5633ce72017-04-10 15:47:40 -0700231 void GenerateMemoryBarrier(MemBarrierKind kind);
232
Alexey Frunze4dda3372015-06-01 18:31:49 -0700233 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700234 void GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path, GpuRegister class_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700235 void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
236 void HandleBinaryOp(HBinaryOperation* operation);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000237 void HandleCondition(HCondition* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700238 void HandleShift(HBinaryOperation* operation);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100239 void HandleFieldSet(HInstruction* instruction,
240 const FieldInfo& field_info,
241 bool value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700242 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Alexey Frunze15958152017-02-09 19:08:30 -0800243
244 // Generate a heap reference load using one register `out`:
245 //
246 // out <- *(out + offset)
247 //
248 // while honoring heap poisoning and/or read barriers (if any).
249 //
250 // Location `maybe_temp` is used when generating a read barrier and
251 // shall be a register in that case; it may be an invalid location
252 // otherwise.
253 void GenerateReferenceLoadOneRegister(HInstruction* instruction,
254 Location out,
255 uint32_t offset,
256 Location maybe_temp,
257 ReadBarrierOption read_barrier_option);
258 // Generate a heap reference load using two different registers
259 // `out` and `obj`:
260 //
261 // out <- *(obj + offset)
262 //
263 // while honoring heap poisoning and/or read barriers (if any).
264 //
265 // Location `maybe_temp` is used when generating a Baker's (fast
266 // path) read barrier and shall be a register in that case; it may
267 // be an invalid location otherwise.
268 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
269 Location out,
270 Location obj,
271 uint32_t offset,
272 Location maybe_temp,
273 ReadBarrierOption read_barrier_option);
274
Alexey Frunzef63f5692016-12-13 17:43:11 -0800275 // Generate a GC root reference load:
276 //
277 // root <- *(obj + offset)
278 //
279 // while honoring read barriers (if any).
280 void GenerateGcRootFieldLoad(HInstruction* instruction,
281 Location root,
282 GpuRegister obj,
Alexey Frunze15958152017-02-09 19:08:30 -0800283 uint32_t offset,
284 ReadBarrierOption read_barrier_option);
285
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286 void GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +0000287 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700288 Mips64Label* true_target,
289 Mips64Label* false_target);
Alexey Frunzec857c742015-09-23 15:12:39 -0700290 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
291 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
292 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
293 void GenerateDivRemIntegral(HBinaryOperation* instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -0800294 void GenerateIntLongCompare(IfCondition cond, bool is64bit, LocationSummary* locations);
295 void GenerateIntLongCompareAndBranch(IfCondition cond,
296 bool is64bit,
297 LocationSummary* locations,
298 Mips64Label* label);
Tijana Jakovljevic43758192016-12-30 09:23:01 +0100299 void GenerateFpCompare(IfCondition cond,
300 bool gt_bias,
301 Primitive::Type type,
302 LocationSummary* locations);
Alexey Frunze299a9392015-12-08 16:08:02 -0800303 void GenerateFpCompareAndBranch(IfCondition cond,
304 bool gt_bias,
305 Primitive::Type type,
306 LocationSummary* locations,
307 Mips64Label* label);
David Brazdilfc6a86a2015-06-26 10:33:45 +0000308 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Alexey Frunze0960ac52016-12-20 17:24:59 -0800309 void GenPackedSwitchWithCompares(GpuRegister value_reg,
310 int32_t lower_bound,
311 uint32_t num_entries,
312 HBasicBlock* switch_block,
313 HBasicBlock* default_block);
314 void GenTableBasedPackedSwitch(GpuRegister value_reg,
315 int32_t lower_bound,
316 uint32_t num_entries,
317 HBasicBlock* switch_block,
318 HBasicBlock* default_block);
Goran Jakovljevic19680d32017-05-11 10:38:36 +0200319 int32_t VecAddress(LocationSummary* locations,
320 size_t size,
321 /* out */ GpuRegister* adjusted_base);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700322
323 Mips64Assembler* const assembler_;
324 CodeGeneratorMIPS64* const codegen_;
325
326 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorMIPS64);
327};
328
329class CodeGeneratorMIPS64 : public CodeGenerator {
330 public:
331 CodeGeneratorMIPS64(HGraph* graph,
332 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100333 const CompilerOptions& compiler_options,
334 OptimizingCompilerStats* stats = nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700335 virtual ~CodeGeneratorMIPS64() {}
336
337 void GenerateFrameEntry() OVERRIDE;
338 void GenerateFrameExit() OVERRIDE;
339
340 void Bind(HBasicBlock* block) OVERRIDE;
341
Lazar Trsicd9672662015-09-03 17:33:01 +0200342 size_t GetWordSize() const OVERRIDE { return kMips64DoublewordSize; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700343
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200344 size_t GetFloatingPointSpillSlotSize() const OVERRIDE {
345 return GetGraph()->HasSIMD()
346 ? 2 * kMips64DoublewordSize // 16 bytes for each spill.
347 : 1 * kMips64DoublewordSize; // 8 bytes for each spill.
348 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700349
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100350 uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700351 return assembler_.GetLabelLocation(GetLabelOf(block));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700352 }
353
354 HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
355 HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
356 Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100357 const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358
Alexey Frunze19f6c692016-11-30 19:19:55 -0800359 // Emit linker patches.
360 void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
Alexey Frunze627c1a02017-01-30 19:28:14 -0800361 void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800362
Alexey Frunze15958152017-02-09 19:08:30 -0800363 // Fast path implementation of ReadBarrier::Barrier for a heap
364 // reference field load when Baker's read barriers are used.
365 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
366 Location ref,
367 GpuRegister obj,
368 uint32_t offset,
369 Location temp,
370 bool needs_null_check);
371 // Fast path implementation of ReadBarrier::Barrier for a heap
372 // reference array load when Baker's read barriers are used.
373 void GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
374 Location ref,
375 GpuRegister obj,
376 uint32_t data_offset,
377 Location index,
378 Location temp,
379 bool needs_null_check);
380
381 // Factored implementation, used by GenerateFieldLoadWithBakerReadBarrier,
382 // GenerateArrayLoadWithBakerReadBarrier and some intrinsics.
383 //
384 // Load the object reference located at the address
385 // `obj + offset + (index << scale_factor)`, held by object `obj`, into
386 // `ref`, and mark it if needed.
387 //
388 // If `always_update_field` is true, the value of the reference is
389 // atomically updated in the holder (`obj`).
390 void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
391 Location ref,
392 GpuRegister obj,
393 uint32_t offset,
394 Location index,
395 ScaleFactor scale_factor,
396 Location temp,
397 bool needs_null_check,
398 bool always_update_field = false);
399
400 // Generate a read barrier for a heap reference within `instruction`
401 // using a slow path.
402 //
403 // A read barrier for an object reference read from the heap is
404 // implemented as a call to the artReadBarrierSlow runtime entry
405 // point, which is passed the values in locations `ref`, `obj`, and
406 // `offset`:
407 //
408 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
409 // mirror::Object* obj,
410 // uint32_t offset);
411 //
412 // The `out` location contains the value returned by
413 // artReadBarrierSlow.
414 //
415 // When `index` is provided (i.e. for array accesses), the offset
416 // value passed to artReadBarrierSlow is adjusted to take `index`
417 // into account.
418 void GenerateReadBarrierSlow(HInstruction* instruction,
419 Location out,
420 Location ref,
421 Location obj,
422 uint32_t offset,
423 Location index = Location::NoLocation());
424
425 // If read barriers are enabled, generate a read barrier for a heap
426 // reference using a slow path. If heap poisoning is enabled, also
427 // unpoison the reference in `out`.
428 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
429 Location out,
430 Location ref,
431 Location obj,
432 uint32_t offset,
433 Location index = Location::NoLocation());
434
435 // Generate a read barrier for a GC root within `instruction` using
436 // a slow path.
437 //
438 // A read barrier for an object reference GC root is implemented as
439 // a call to the artReadBarrierForRootSlow runtime entry point,
440 // which is passed the value in location `root`:
441 //
442 // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root);
443 //
444 // The `out` location contains the value returned by
445 // artReadBarrierForRootSlow.
446 void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root);
447
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100448 void MarkGCCard(GpuRegister object, GpuRegister value, bool value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700449
450 // Register allocation.
451
David Brazdil58282f42016-01-14 12:45:10 +0000452 void SetupBlockedRegisters() const OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700453
Roland Levillainf41f9562016-09-14 19:26:48 +0100454 size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
455 size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
456 size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
457 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700458
459 void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
460 void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
461
462 InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kMips64; }
463
464 const Mips64InstructionSetFeatures& GetInstructionSetFeatures() const {
465 return isa_features_;
466 }
467
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700468 Mips64Label* GetLabelOf(HBasicBlock* block) const {
469 return CommonGetLabelOf<Mips64Label>(block_labels_, block);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700470 }
471
472 void Initialize() OVERRIDE {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700473 block_labels_ = CommonInitializeLabels<Mips64Label>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700474 }
475
Alexey Frunzec3789802016-12-22 13:54:23 -0800476 // We prefer aligned loads and stores (less code), so spill and restore registers in slow paths
477 // at aligned locations.
478 uint32_t GetPreferredSlotsAlignment() const OVERRIDE { return kMips64DoublewordSize; }
479
Alexey Frunze4dda3372015-06-01 18:31:49 -0700480 void Finalize(CodeAllocator* allocator) OVERRIDE;
481
482 // Code generation helpers.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100483 void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700484
Calin Juravle175dc732015-08-25 15:42:32 +0100485 void MoveConstant(Location destination, int32_t value) OVERRIDE;
486
Calin Juravlee460d1d2015-09-29 04:52:17 +0100487 void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
488
489
Alexey Frunze4dda3372015-06-01 18:31:49 -0700490 void SwapLocations(Location loc1, Location loc2, Primitive::Type type);
491
492 // Generate code to invoke a runtime entry point.
Calin Juravle175dc732015-08-25 15:42:32 +0100493 void InvokeRuntime(QuickEntrypointEnum entrypoint,
494 HInstruction* instruction,
495 uint32_t dex_pc,
Serban Constantinescufc734082016-07-19 17:18:07 +0100496 SlowPathCode* slow_path = nullptr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700497
Alexey Frunze15958152017-02-09 19:08:30 -0800498 // Generate code to invoke a runtime entry point, but do not record
499 // PC-related information in a stack map.
500 void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
501 HInstruction* instruction,
502 SlowPathCode* slow_path);
503
504 void GenerateInvokeRuntime(int32_t entry_point_offset);
505
Alexey Frunze4dda3372015-06-01 18:31:49 -0700506 ParallelMoveResolver* GetMoveResolver() OVERRIDE { return &move_resolver_; }
507
Roland Levillainf41f9562016-09-14 19:26:48 +0100508 bool NeedsTwoRegisters(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE { return false; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700509
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000510 // Check if the desired_string_load_kind is supported. If it is, return it,
511 // otherwise return a fall-back kind that should be used instead.
512 HLoadString::LoadKind GetSupportedLoadStringKind(
513 HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
514
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100515 // Check if the desired_class_load_kind is supported. If it is, return it,
516 // otherwise return a fall-back kind that should be used instead.
517 HLoadClass::LoadKind GetSupportedLoadClassKind(
518 HLoadClass::LoadKind desired_class_load_kind) OVERRIDE;
519
Vladimir Markodc151b22015-10-15 18:02:30 +0100520 // Check if the desired_dispatch_info is supported. If it is, return it,
521 // otherwise return a fall-back info that should be used instead.
522 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
523 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100524 HInvokeStaticOrDirect* invoke) OVERRIDE;
Vladimir Markodc151b22015-10-15 18:02:30 +0100525
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100526 void GenerateStaticOrDirectCall(
527 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE;
528 void GenerateVirtualCall(
529 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700530
531 void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
532 Primitive::Type type ATTRIBUTE_UNUSED) OVERRIDE {
Chris Larsen3acee732015-11-18 13:31:08 -0800533 UNIMPLEMENTED(FATAL) << "Not implemented on MIPS64";
Andreas Gampe85b62f22015-09-09 13:15:38 -0700534 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700535
Roland Levillainf41f9562016-09-14 19:26:48 +0100536 void GenerateNop() OVERRIDE;
537 void GenerateImplicitNullCheck(HNullCheck* instruction) OVERRIDE;
538 void GenerateExplicitNullCheck(HNullCheck* instruction) OVERRIDE;
David Srbeckyc7098ff2016-02-09 14:30:11 +0000539
Alexey Frunze19f6c692016-11-30 19:19:55 -0800540 // The PcRelativePatchInfo is used for PC-relative addressing of dex cache arrays,
541 // boot image strings and method calls. The only difference is the interpretation of
542 // the offset_or_index.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700543 // The 16-bit halves of the 32-bit PC-relative offset are patched separately, necessitating
544 // two patches/infos. There can be more than two patches/infos if the instruction supplying
545 // the high half is shared with e.g. a slow path, while the low half is supplied by separate
546 // instructions, e.g.:
547 // auipc r1, high // patch
548 // lwu r2, low(r1) // patch
549 // beqzc r2, slow_path
550 // back:
551 // ...
552 // slow_path:
553 // ...
554 // sw r2, low(r1) // patch
555 // bc back
Alexey Frunze19f6c692016-11-30 19:19:55 -0800556 struct PcRelativePatchInfo {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700557 PcRelativePatchInfo(const DexFile& dex_file,
558 uint32_t off_or_idx,
559 const PcRelativePatchInfo* info_high)
560 : target_dex_file(dex_file),
561 offset_or_index(off_or_idx),
562 label(),
563 patch_info_high(info_high) { }
Alexey Frunze19f6c692016-11-30 19:19:55 -0800564
565 const DexFile& target_dex_file;
566 // Either the dex cache array element offset or the string/type/method index.
567 uint32_t offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700568 // Label for the instruction to patch.
569 Mips64Label label;
570 // Pointer to the info for the high half patch or nullptr if this is the high half patch info.
571 const PcRelativePatchInfo* patch_info_high;
572
573 private:
574 PcRelativePatchInfo(PcRelativePatchInfo&& other) = delete;
575 DISALLOW_COPY_AND_ASSIGN(PcRelativePatchInfo);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800576 };
577
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700578 PcRelativePatchInfo* NewPcRelativeMethodPatch(MethodReference target_method,
579 const PcRelativePatchInfo* info_high = nullptr);
580 PcRelativePatchInfo* NewMethodBssEntryPatch(MethodReference target_method,
581 const PcRelativePatchInfo* info_high = nullptr);
582 PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file,
583 dex::TypeIndex type_index,
584 const PcRelativePatchInfo* info_high = nullptr);
585 PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file,
586 dex::TypeIndex type_index,
587 const PcRelativePatchInfo* info_high = nullptr);
Vladimir Marko65979462017-05-19 17:25:12 +0100588 PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700589 dex::StringIndex string_index,
590 const PcRelativePatchInfo* info_high = nullptr);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800591 Literal* DeduplicateBootImageAddressLiteral(uint64_t address);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800592
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700593 void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
594 GpuRegister out,
595 PcRelativePatchInfo* info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800596
Alexey Frunze627c1a02017-01-30 19:28:14 -0800597 void PatchJitRootUse(uint8_t* code,
598 const uint8_t* roots_data,
599 const Literal* literal,
600 uint64_t index_in_table) const;
601 Literal* DeduplicateJitStringLiteral(const DexFile& dex_file,
602 dex::StringIndex string_index,
603 Handle<mirror::String> handle);
604 Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
605 dex::TypeIndex type_index,
606 Handle<mirror::Class> handle);
607
Alexey Frunze4dda3372015-06-01 18:31:49 -0700608 private:
Alexey Frunzef63f5692016-12-13 17:43:11 -0800609 using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, Literal*>;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800610 using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, Literal*>;
Alexey Frunze627c1a02017-01-30 19:28:14 -0800611 using StringToLiteralMap = ArenaSafeMap<StringReference,
612 Literal*,
613 StringReferenceValueComparator>;
614 using TypeToLiteralMap = ArenaSafeMap<TypeReference,
615 Literal*,
616 TypeReferenceValueComparator>;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800617
618 Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800619 Literal* DeduplicateUint64Literal(uint64_t value);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800620
621 PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
622 uint32_t offset_or_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700623 const PcRelativePatchInfo* info_high,
Alexey Frunze19f6c692016-11-30 19:19:55 -0800624 ArenaDeque<PcRelativePatchInfo>* patches);
625
626 template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
627 void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
628 ArenaVector<LinkerPatch>* linker_patches);
629
Alexey Frunze4dda3372015-06-01 18:31:49 -0700630 // Labels for each block that will be compiled.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700631 Mips64Label* block_labels_; // Indexed by block id.
632 Mips64Label frame_entry_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700633 LocationsBuilderMIPS64 location_builder_;
634 InstructionCodeGeneratorMIPS64 instruction_visitor_;
635 ParallelMoveResolverMIPS64 move_resolver_;
636 Mips64Assembler assembler_;
637 const Mips64InstructionSetFeatures& isa_features_;
638
Alexey Frunzef63f5692016-12-13 17:43:11 -0800639 // Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
640 Uint32ToLiteralMap uint32_literals_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800641 // Deduplication map for 64-bit literals, used for non-patchable method address or method code
642 // address.
643 Uint64ToLiteralMap uint64_literals_;
Vladimir Marko65979462017-05-19 17:25:12 +0100644 // PC-relative method patch info for kBootImageLinkTimePcRelative.
645 ArenaDeque<PcRelativePatchInfo> pc_relative_method_patches_;
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100646 // PC-relative method patch info for kBssEntry.
647 ArenaDeque<PcRelativePatchInfo> method_bss_entry_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000648 // PC-relative type patch info for kBootImageLinkTimePcRelative.
Alexey Frunzef63f5692016-12-13 17:43:11 -0800649 ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000650 // PC-relative type patch info for kBssEntry.
651 ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
Vladimir Marko65979462017-05-19 17:25:12 +0100652 // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
653 ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
654
Alexey Frunze627c1a02017-01-30 19:28:14 -0800655 // Patches for string root accesses in JIT compiled code.
656 StringToLiteralMap jit_string_patches_;
657 // Patches for class root accesses in JIT compiled code.
658 TypeToLiteralMap jit_class_patches_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800659
Alexey Frunze4dda3372015-06-01 18:31:49 -0700660 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorMIPS64);
661};
662
663} // namespace mips64
664} // namespace art
665
666#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_