blob: 9c6b6f62cb39e54b13911cb6b3e0ed6ab7d6de45 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
19
20#include "code_generator.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070021#include "driver/compiler_options.h"
22#include "nodes.h"
23#include "parallel_move_resolver.h"
Mathieu Chartierdbddc222017-05-24 12:04:13 -070024#include "type_reference.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070025#include "utils/mips64/assembler_mips64.h"
26
27namespace art {
28namespace mips64 {
29
Alexey Frunze4dda3372015-06-01 18:31:49 -070030// InvokeDexCallingConvention registers
31
32static constexpr GpuRegister kParameterCoreRegisters[] =
33 { A1, A2, A3, A4, A5, A6, A7 };
34static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
35
36static constexpr FpuRegister kParameterFpuRegisters[] =
37 { F13, F14, F15, F16, F17, F18, F19 };
38static constexpr size_t kParameterFpuRegistersLength = arraysize(kParameterFpuRegisters);
39
40
41// InvokeRuntimeCallingConvention registers
42
43static constexpr GpuRegister kRuntimeParameterCoreRegisters[] =
44 { A0, A1, A2, A3, A4, A5, A6, A7 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46 arraysize(kRuntimeParameterCoreRegisters);
47
48static constexpr FpuRegister kRuntimeParameterFpuRegisters[] =
49 { F12, F13, F14, F15, F16, F17, F18, F19 };
50static constexpr size_t kRuntimeParameterFpuRegistersLength =
51 arraysize(kRuntimeParameterFpuRegisters);
52
53
54static constexpr GpuRegister kCoreCalleeSaves[] =
Alexey Frunze627c1a02017-01-30 19:28:14 -080055 { S0, S1, S2, S3, S4, S5, S6, S7, GP, S8, RA };
Alexey Frunze4dda3372015-06-01 18:31:49 -070056static constexpr FpuRegister kFpuCalleeSaves[] =
57 { F24, F25, F26, F27, F28, F29, F30, F31 };
58
59
60class CodeGeneratorMIPS64;
61
62class InvokeDexCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
63 public:
64 InvokeDexCallingConvention()
65 : CallingConvention(kParameterCoreRegisters,
66 kParameterCoreRegistersLength,
67 kParameterFpuRegisters,
68 kParameterFpuRegistersLength,
69 kMips64PointerSize) {}
70
71 private:
72 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
73};
74
75class InvokeDexCallingConventionVisitorMIPS64 : public InvokeDexCallingConventionVisitor {
76 public:
77 InvokeDexCallingConventionVisitorMIPS64() {}
78 virtual ~InvokeDexCallingConventionVisitorMIPS64() {}
79
80 Location GetNextLocation(Primitive::Type type) OVERRIDE;
81 Location GetReturnLocation(Primitive::Type type) const OVERRIDE;
82 Location GetMethodLocation() const OVERRIDE;
83
84 private:
85 InvokeDexCallingConvention calling_convention;
86
87 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorMIPS64);
88};
89
90class InvokeRuntimeCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
91 public:
92 InvokeRuntimeCallingConvention()
93 : CallingConvention(kRuntimeParameterCoreRegisters,
94 kRuntimeParameterCoreRegistersLength,
95 kRuntimeParameterFpuRegisters,
96 kRuntimeParameterFpuRegistersLength,
97 kMips64PointerSize) {}
98
99 Location GetReturnLocation(Primitive::Type return_type);
100
101 private:
102 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
103};
104
Calin Juravlee460d1d2015-09-29 04:52:17 +0100105class FieldAccessCallingConventionMIPS64 : public FieldAccessCallingConvention {
106 public:
107 FieldAccessCallingConventionMIPS64() {}
108
109 Location GetObjectLocation() const OVERRIDE {
110 return Location::RegisterLocation(A1);
111 }
112 Location GetFieldIndexLocation() const OVERRIDE {
113 return Location::RegisterLocation(A0);
114 }
115 Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
Goran Jakovljevic8c34ec12015-10-14 11:23:48 +0200116 return Location::RegisterLocation(V0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100117 }
Alexey Frunze0cb12422017-01-25 19:30:18 -0800118 Location GetSetValueLocation(Primitive::Type type ATTRIBUTE_UNUSED,
119 bool is_instance) const OVERRIDE {
120 return is_instance
Alexey Frunze00580bd2015-11-11 13:31:12 -0800121 ? Location::RegisterLocation(A2)
Alexey Frunze0cb12422017-01-25 19:30:18 -0800122 : Location::RegisterLocation(A1);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100123 }
124 Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
125 return Location::FpuRegisterLocation(F0);
126 }
127
128 private:
129 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionMIPS64);
130};
131
Alexey Frunze4dda3372015-06-01 18:31:49 -0700132class ParallelMoveResolverMIPS64 : public ParallelMoveResolverWithSwap {
133 public:
134 ParallelMoveResolverMIPS64(ArenaAllocator* allocator, CodeGeneratorMIPS64* codegen)
135 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
136
137 void EmitMove(size_t index) OVERRIDE;
138 void EmitSwap(size_t index) OVERRIDE;
139 void SpillScratch(int reg) OVERRIDE;
140 void RestoreScratch(int reg) OVERRIDE;
141
142 void Exchange(int index1, int index2, bool double_slot);
143
144 Mips64Assembler* GetAssembler() const;
145
146 private:
147 CodeGeneratorMIPS64* const codegen_;
148
149 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverMIPS64);
150};
151
152class SlowPathCodeMIPS64 : public SlowPathCode {
153 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000154 explicit SlowPathCodeMIPS64(HInstruction* instruction)
155 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700156
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700157 Mips64Label* GetEntryLabel() { return &entry_label_; }
158 Mips64Label* GetExitLabel() { return &exit_label_; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700159
160 private:
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700161 Mips64Label entry_label_;
162 Mips64Label exit_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700163
164 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeMIPS64);
165};
166
167class LocationsBuilderMIPS64 : public HGraphVisitor {
168 public:
169 LocationsBuilderMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen)
170 : HGraphVisitor(graph), codegen_(codegen) {}
171
172#define DECLARE_VISIT_INSTRUCTION(name, super) \
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100173 void Visit##name(H##name* instr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700174
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100175 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
176 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700177
178#undef DECLARE_VISIT_INSTRUCTION
179
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100180 void VisitInstruction(HInstruction* instruction) OVERRIDE {
181 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
182 << " (id " << instruction->GetId() << ")";
183 }
184
Alexey Frunze4dda3372015-06-01 18:31:49 -0700185 private:
186 void HandleInvoke(HInvoke* invoke);
187 void HandleBinaryOp(HBinaryOperation* operation);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000188 void HandleCondition(HCondition* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700189 void HandleShift(HBinaryOperation* operation);
190 void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info);
191 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +0100192 Location RegisterOrZeroConstant(HInstruction* instruction);
193 Location FpuRegisterOrConstantForStore(HInstruction* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700194
195 InvokeDexCallingConventionVisitorMIPS64 parameter_visitor_;
196
197 CodeGeneratorMIPS64* const codegen_;
198
199 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderMIPS64);
200};
201
Aart Bik42249c32016-01-07 15:33:50 -0800202class InstructionCodeGeneratorMIPS64 : public InstructionCodeGenerator {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700203 public:
204 InstructionCodeGeneratorMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen);
205
206#define DECLARE_VISIT_INSTRUCTION(name, super) \
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100207 void Visit##name(H##name* instr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700208
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100209 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
210 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700211
212#undef DECLARE_VISIT_INSTRUCTION
213
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100214 void VisitInstruction(HInstruction* instruction) OVERRIDE {
215 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
216 << " (id " << instruction->GetId() << ")";
217 }
218
Alexey Frunze4dda3372015-06-01 18:31:49 -0700219 Mips64Assembler* GetAssembler() const { return assembler_; }
220
Alexey Frunze0960ac52016-12-20 17:24:59 -0800221 // Compare-and-jump packed switch generates approx. 3 + 2.5 * N 32-bit
222 // instructions for N cases.
223 // Table-based packed switch generates approx. 11 32-bit instructions
224 // and N 32-bit data words for N cases.
225 // At N = 6 they come out as 18 and 17 32-bit words respectively.
226 // We switch to the table-based method starting with 7 cases.
227 static constexpr uint32_t kPackedSwitchJumpTableThreshold = 6;
228
Chris Larsen5633ce72017-04-10 15:47:40 -0700229 void GenerateMemoryBarrier(MemBarrierKind kind);
230
Alexey Frunze4dda3372015-06-01 18:31:49 -0700231 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700232 void GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path, GpuRegister class_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700233 void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
234 void HandleBinaryOp(HBinaryOperation* operation);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000235 void HandleCondition(HCondition* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700236 void HandleShift(HBinaryOperation* operation);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100237 void HandleFieldSet(HInstruction* instruction,
238 const FieldInfo& field_info,
239 bool value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Alexey Frunze15958152017-02-09 19:08:30 -0800241
242 // Generate a heap reference load using one register `out`:
243 //
244 // out <- *(out + offset)
245 //
246 // while honoring heap poisoning and/or read barriers (if any).
247 //
248 // Location `maybe_temp` is used when generating a read barrier and
249 // shall be a register in that case; it may be an invalid location
250 // otherwise.
251 void GenerateReferenceLoadOneRegister(HInstruction* instruction,
252 Location out,
253 uint32_t offset,
254 Location maybe_temp,
255 ReadBarrierOption read_barrier_option);
256 // Generate a heap reference load using two different registers
257 // `out` and `obj`:
258 //
259 // out <- *(obj + offset)
260 //
261 // while honoring heap poisoning and/or read barriers (if any).
262 //
263 // Location `maybe_temp` is used when generating a Baker's (fast
264 // path) read barrier and shall be a register in that case; it may
265 // be an invalid location otherwise.
266 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
267 Location out,
268 Location obj,
269 uint32_t offset,
270 Location maybe_temp,
271 ReadBarrierOption read_barrier_option);
272
Alexey Frunzef63f5692016-12-13 17:43:11 -0800273 // Generate a GC root reference load:
274 //
275 // root <- *(obj + offset)
276 //
277 // while honoring read barriers (if any).
278 void GenerateGcRootFieldLoad(HInstruction* instruction,
279 Location root,
280 GpuRegister obj,
Alexey Frunze15958152017-02-09 19:08:30 -0800281 uint32_t offset,
282 ReadBarrierOption read_barrier_option);
283
Alexey Frunze4dda3372015-06-01 18:31:49 -0700284 void GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +0000285 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700286 Mips64Label* true_target,
287 Mips64Label* false_target);
Alexey Frunzec857c742015-09-23 15:12:39 -0700288 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
289 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
290 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
291 void GenerateDivRemIntegral(HBinaryOperation* instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -0800292 void GenerateIntLongCompare(IfCondition cond, bool is64bit, LocationSummary* locations);
293 void GenerateIntLongCompareAndBranch(IfCondition cond,
294 bool is64bit,
295 LocationSummary* locations,
296 Mips64Label* label);
Tijana Jakovljevic43758192016-12-30 09:23:01 +0100297 void GenerateFpCompare(IfCondition cond,
298 bool gt_bias,
299 Primitive::Type type,
300 LocationSummary* locations);
Alexey Frunze299a9392015-12-08 16:08:02 -0800301 void GenerateFpCompareAndBranch(IfCondition cond,
302 bool gt_bias,
303 Primitive::Type type,
304 LocationSummary* locations,
305 Mips64Label* label);
David Brazdilfc6a86a2015-06-26 10:33:45 +0000306 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Alexey Frunze0960ac52016-12-20 17:24:59 -0800307 void GenPackedSwitchWithCompares(GpuRegister value_reg,
308 int32_t lower_bound,
309 uint32_t num_entries,
310 HBasicBlock* switch_block,
311 HBasicBlock* default_block);
312 void GenTableBasedPackedSwitch(GpuRegister value_reg,
313 int32_t lower_bound,
314 uint32_t num_entries,
315 HBasicBlock* switch_block,
316 HBasicBlock* default_block);
Goran Jakovljevic19680d32017-05-11 10:38:36 +0200317 int32_t VecAddress(LocationSummary* locations,
318 size_t size,
319 /* out */ GpuRegister* adjusted_base);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700320
321 Mips64Assembler* const assembler_;
322 CodeGeneratorMIPS64* const codegen_;
323
324 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorMIPS64);
325};
326
327class CodeGeneratorMIPS64 : public CodeGenerator {
328 public:
329 CodeGeneratorMIPS64(HGraph* graph,
330 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100331 const CompilerOptions& compiler_options,
332 OptimizingCompilerStats* stats = nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700333 virtual ~CodeGeneratorMIPS64() {}
334
335 void GenerateFrameEntry() OVERRIDE;
336 void GenerateFrameExit() OVERRIDE;
337
338 void Bind(HBasicBlock* block) OVERRIDE;
339
Lazar Trsicd9672662015-09-03 17:33:01 +0200340 size_t GetWordSize() const OVERRIDE { return kMips64DoublewordSize; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700341
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200342 size_t GetFloatingPointSpillSlotSize() const OVERRIDE {
343 return GetGraph()->HasSIMD()
344 ? 2 * kMips64DoublewordSize // 16 bytes for each spill.
345 : 1 * kMips64DoublewordSize; // 8 bytes for each spill.
346 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700347
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100348 uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700349 return assembler_.GetLabelLocation(GetLabelOf(block));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350 }
351
352 HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
353 HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
354 Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100355 const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700356
Alexey Frunze19f6c692016-11-30 19:19:55 -0800357 // Emit linker patches.
358 void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
Alexey Frunze627c1a02017-01-30 19:28:14 -0800359 void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800360
Alexey Frunze15958152017-02-09 19:08:30 -0800361 // Fast path implementation of ReadBarrier::Barrier for a heap
362 // reference field load when Baker's read barriers are used.
363 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
364 Location ref,
365 GpuRegister obj,
366 uint32_t offset,
367 Location temp,
368 bool needs_null_check);
369 // Fast path implementation of ReadBarrier::Barrier for a heap
370 // reference array load when Baker's read barriers are used.
371 void GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
372 Location ref,
373 GpuRegister obj,
374 uint32_t data_offset,
375 Location index,
376 Location temp,
377 bool needs_null_check);
378
379 // Factored implementation, used by GenerateFieldLoadWithBakerReadBarrier,
380 // GenerateArrayLoadWithBakerReadBarrier and some intrinsics.
381 //
382 // Load the object reference located at the address
383 // `obj + offset + (index << scale_factor)`, held by object `obj`, into
384 // `ref`, and mark it if needed.
385 //
386 // If `always_update_field` is true, the value of the reference is
387 // atomically updated in the holder (`obj`).
388 void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
389 Location ref,
390 GpuRegister obj,
391 uint32_t offset,
392 Location index,
393 ScaleFactor scale_factor,
394 Location temp,
395 bool needs_null_check,
396 bool always_update_field = false);
397
398 // Generate a read barrier for a heap reference within `instruction`
399 // using a slow path.
400 //
401 // A read barrier for an object reference read from the heap is
402 // implemented as a call to the artReadBarrierSlow runtime entry
403 // point, which is passed the values in locations `ref`, `obj`, and
404 // `offset`:
405 //
406 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
407 // mirror::Object* obj,
408 // uint32_t offset);
409 //
410 // The `out` location contains the value returned by
411 // artReadBarrierSlow.
412 //
413 // When `index` is provided (i.e. for array accesses), the offset
414 // value passed to artReadBarrierSlow is adjusted to take `index`
415 // into account.
416 void GenerateReadBarrierSlow(HInstruction* instruction,
417 Location out,
418 Location ref,
419 Location obj,
420 uint32_t offset,
421 Location index = Location::NoLocation());
422
423 // If read barriers are enabled, generate a read barrier for a heap
424 // reference using a slow path. If heap poisoning is enabled, also
425 // unpoison the reference in `out`.
426 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
427 Location out,
428 Location ref,
429 Location obj,
430 uint32_t offset,
431 Location index = Location::NoLocation());
432
433 // Generate a read barrier for a GC root within `instruction` using
434 // a slow path.
435 //
436 // A read barrier for an object reference GC root is implemented as
437 // a call to the artReadBarrierForRootSlow runtime entry point,
438 // which is passed the value in location `root`:
439 //
440 // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root);
441 //
442 // The `out` location contains the value returned by
443 // artReadBarrierForRootSlow.
444 void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root);
445
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100446 void MarkGCCard(GpuRegister object, GpuRegister value, bool value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700447
448 // Register allocation.
449
David Brazdil58282f42016-01-14 12:45:10 +0000450 void SetupBlockedRegisters() const OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700451
Roland Levillainf41f9562016-09-14 19:26:48 +0100452 size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
453 size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
454 size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
455 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700456
457 void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
458 void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
459
460 InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kMips64; }
461
462 const Mips64InstructionSetFeatures& GetInstructionSetFeatures() const {
463 return isa_features_;
464 }
465
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700466 Mips64Label* GetLabelOf(HBasicBlock* block) const {
467 return CommonGetLabelOf<Mips64Label>(block_labels_, block);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700468 }
469
470 void Initialize() OVERRIDE {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700471 block_labels_ = CommonInitializeLabels<Mips64Label>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700472 }
473
Alexey Frunzec3789802016-12-22 13:54:23 -0800474 // We prefer aligned loads and stores (less code), so spill and restore registers in slow paths
475 // at aligned locations.
476 uint32_t GetPreferredSlotsAlignment() const OVERRIDE { return kMips64DoublewordSize; }
477
Alexey Frunze4dda3372015-06-01 18:31:49 -0700478 void Finalize(CodeAllocator* allocator) OVERRIDE;
479
480 // Code generation helpers.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100481 void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700482
Calin Juravle175dc732015-08-25 15:42:32 +0100483 void MoveConstant(Location destination, int32_t value) OVERRIDE;
484
Calin Juravlee460d1d2015-09-29 04:52:17 +0100485 void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
486
487
Alexey Frunze4dda3372015-06-01 18:31:49 -0700488 void SwapLocations(Location loc1, Location loc2, Primitive::Type type);
489
490 // Generate code to invoke a runtime entry point.
Calin Juravle175dc732015-08-25 15:42:32 +0100491 void InvokeRuntime(QuickEntrypointEnum entrypoint,
492 HInstruction* instruction,
493 uint32_t dex_pc,
Serban Constantinescufc734082016-07-19 17:18:07 +0100494 SlowPathCode* slow_path = nullptr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700495
Alexey Frunze15958152017-02-09 19:08:30 -0800496 // Generate code to invoke a runtime entry point, but do not record
497 // PC-related information in a stack map.
498 void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
499 HInstruction* instruction,
500 SlowPathCode* slow_path);
501
502 void GenerateInvokeRuntime(int32_t entry_point_offset);
503
Alexey Frunze4dda3372015-06-01 18:31:49 -0700504 ParallelMoveResolver* GetMoveResolver() OVERRIDE { return &move_resolver_; }
505
Roland Levillainf41f9562016-09-14 19:26:48 +0100506 bool NeedsTwoRegisters(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE { return false; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700507
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000508 // Check if the desired_string_load_kind is supported. If it is, return it,
509 // otherwise return a fall-back kind that should be used instead.
510 HLoadString::LoadKind GetSupportedLoadStringKind(
511 HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
512
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100513 // Check if the desired_class_load_kind is supported. If it is, return it,
514 // otherwise return a fall-back kind that should be used instead.
515 HLoadClass::LoadKind GetSupportedLoadClassKind(
516 HLoadClass::LoadKind desired_class_load_kind) OVERRIDE;
517
Vladimir Markodc151b22015-10-15 18:02:30 +0100518 // Check if the desired_dispatch_info is supported. If it is, return it,
519 // otherwise return a fall-back info that should be used instead.
520 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
521 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100522 HInvokeStaticOrDirect* invoke) OVERRIDE;
Vladimir Markodc151b22015-10-15 18:02:30 +0100523
Andreas Gampe85b62f22015-09-09 13:15:38 -0700524 void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
Alexey Frunze53afca12015-11-05 16:34:23 -0800525 void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700526
527 void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
528 Primitive::Type type ATTRIBUTE_UNUSED) OVERRIDE {
Chris Larsen3acee732015-11-18 13:31:08 -0800529 UNIMPLEMENTED(FATAL) << "Not implemented on MIPS64";
Andreas Gampe85b62f22015-09-09 13:15:38 -0700530 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700531
Roland Levillainf41f9562016-09-14 19:26:48 +0100532 void GenerateNop() OVERRIDE;
533 void GenerateImplicitNullCheck(HNullCheck* instruction) OVERRIDE;
534 void GenerateExplicitNullCheck(HNullCheck* instruction) OVERRIDE;
David Srbeckyc7098ff2016-02-09 14:30:11 +0000535
Alexey Frunze19f6c692016-11-30 19:19:55 -0800536 // The PcRelativePatchInfo is used for PC-relative addressing of dex cache arrays,
537 // boot image strings and method calls. The only difference is the interpretation of
538 // the offset_or_index.
539 struct PcRelativePatchInfo {
540 PcRelativePatchInfo(const DexFile& dex_file, uint32_t off_or_idx)
541 : target_dex_file(dex_file), offset_or_index(off_or_idx) { }
542 PcRelativePatchInfo(PcRelativePatchInfo&& other) = default;
543
544 const DexFile& target_dex_file;
545 // Either the dex cache array element offset or the string/type/method index.
546 uint32_t offset_or_index;
547 // Label for the auipc instruction.
548 Mips64Label pc_rel_label;
549 };
550
Vladimir Marko65979462017-05-19 17:25:12 +0100551 PcRelativePatchInfo* NewPcRelativeMethodPatch(MethodReference target_method);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800552 PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
Vladimir Marko1998cd02017-01-13 13:02:58 +0000553 PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
Vladimir Marko65979462017-05-19 17:25:12 +0100554 PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
555 dex::StringIndex string_index);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800556 PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
557 uint32_t element_offset);
558 PcRelativePatchInfo* NewPcRelativeCallPatch(const DexFile& dex_file,
559 uint32_t method_index);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800560 Literal* DeduplicateBootImageAddressLiteral(uint64_t address);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800561
562 void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info, GpuRegister out);
563
Alexey Frunze627c1a02017-01-30 19:28:14 -0800564 void PatchJitRootUse(uint8_t* code,
565 const uint8_t* roots_data,
566 const Literal* literal,
567 uint64_t index_in_table) const;
568 Literal* DeduplicateJitStringLiteral(const DexFile& dex_file,
569 dex::StringIndex string_index,
570 Handle<mirror::String> handle);
571 Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
572 dex::TypeIndex type_index,
573 Handle<mirror::Class> handle);
574
Alexey Frunze4dda3372015-06-01 18:31:49 -0700575 private:
Alexey Frunzef63f5692016-12-13 17:43:11 -0800576 using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, Literal*>;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800577 using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, Literal*>;
Alexey Frunze627c1a02017-01-30 19:28:14 -0800578 using StringToLiteralMap = ArenaSafeMap<StringReference,
579 Literal*,
580 StringReferenceValueComparator>;
581 using TypeToLiteralMap = ArenaSafeMap<TypeReference,
582 Literal*,
583 TypeReferenceValueComparator>;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800584
585 Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800586 Literal* DeduplicateUint64Literal(uint64_t value);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800587
588 PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
589 uint32_t offset_or_index,
590 ArenaDeque<PcRelativePatchInfo>* patches);
591
592 template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
593 void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
594 ArenaVector<LinkerPatch>* linker_patches);
595
Alexey Frunze4dda3372015-06-01 18:31:49 -0700596 // Labels for each block that will be compiled.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700597 Mips64Label* block_labels_; // Indexed by block id.
598 Mips64Label frame_entry_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700599 LocationsBuilderMIPS64 location_builder_;
600 InstructionCodeGeneratorMIPS64 instruction_visitor_;
601 ParallelMoveResolverMIPS64 move_resolver_;
602 Mips64Assembler assembler_;
603 const Mips64InstructionSetFeatures& isa_features_;
604
Alexey Frunzef63f5692016-12-13 17:43:11 -0800605 // Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
606 Uint32ToLiteralMap uint32_literals_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800607 // Deduplication map for 64-bit literals, used for non-patchable method address or method code
608 // address.
609 Uint64ToLiteralMap uint64_literals_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800610 // PC-relative patch info.
611 ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
Vladimir Marko65979462017-05-19 17:25:12 +0100612 // PC-relative method patch info for kBootImageLinkTimePcRelative.
613 ArenaDeque<PcRelativePatchInfo> pc_relative_method_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000614 // PC-relative type patch info for kBootImageLinkTimePcRelative.
Alexey Frunzef63f5692016-12-13 17:43:11 -0800615 ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000616 // PC-relative type patch info for kBssEntry.
617 ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
Vladimir Marko65979462017-05-19 17:25:12 +0100618 // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
619 ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
620
Alexey Frunze627c1a02017-01-30 19:28:14 -0800621 // Patches for string root accesses in JIT compiled code.
622 StringToLiteralMap jit_string_patches_;
623 // Patches for class root accesses in JIT compiled code.
624 TypeToLiteralMap jit_class_patches_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800625
Alexey Frunze4dda3372015-06-01 18:31:49 -0700626 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorMIPS64);
627};
628
629} // namespace mips64
630} // namespace art
631
632#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_