blob: 44d37599780c39ea6207f14242d1495847745422 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080021#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070022#include "entrypoints/quick/quick_entrypoints.h"
23#include "entrypoints/quick/quick_entrypoints_enum.h"
24#include "gc/accounting/card_table.h"
25#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070026#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "offsets.h"
30#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070032#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/stack_checks.h"
34
35namespace art {
36namespace mips64 {
37
38static constexpr int kCurrentMethodStackOffset = 0;
39static constexpr GpuRegister kMethodRegisterArgument = A0;
40
Alexey Frunze4dda3372015-06-01 18:31:49 -070041Location Mips64ReturnLocation(Primitive::Type return_type) {
42 switch (return_type) {
43 case Primitive::kPrimBoolean:
44 case Primitive::kPrimByte:
45 case Primitive::kPrimChar:
46 case Primitive::kPrimShort:
47 case Primitive::kPrimInt:
48 case Primitive::kPrimNot:
49 case Primitive::kPrimLong:
50 return Location::RegisterLocation(V0);
51
52 case Primitive::kPrimFloat:
53 case Primitive::kPrimDouble:
54 return Location::FpuRegisterLocation(F0);
55
56 case Primitive::kPrimVoid:
57 return Location();
58 }
59 UNREACHABLE();
60}
61
62Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
63 return Mips64ReturnLocation(type);
64}
65
66Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
67 return Location::RegisterLocation(kMethodRegisterArgument);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
71 Location next_location;
72 if (type == Primitive::kPrimVoid) {
73 LOG(FATAL) << "Unexpected parameter type " << type;
74 }
75
76 if (Primitive::IsFloatingPointType(type) &&
77 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
78 next_location = Location::FpuRegisterLocation(
79 calling_convention.GetFpuRegisterAt(float_index_++));
80 gp_index_++;
81 } else if (!Primitive::IsFloatingPointType(type) &&
82 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
83 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
84 float_index_++;
85 } else {
86 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
87 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
88 : Location::StackSlot(stack_offset);
89 }
90
91 // Space on the stack is reserved for all arguments.
92 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
93
94 // TODO: review
95
96 // TODO: shouldn't we use a whole machine word per argument on the stack?
97 // Implicit 4-byte method pointer (and such) will cause misalignment.
98
99 return next_location;
100}
101
102Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
103 return Mips64ReturnLocation(type);
104}
105
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100106// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
107#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700108#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700109
110class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700113
114 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100115 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700116 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
117 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000118 if (instruction_->CanThrowIntoCatchBlock()) {
119 // Live registers will be restored in the catch block if caught.
120 SaveLiveRegisters(codegen, instruction_->GetLocations());
121 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700122 // We're moving two locations to locations that could overlap, so we need a parallel
123 // move resolver.
124 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100125 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700126 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
127 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100128 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700129 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
130 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100131 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
132 ? kQuickThrowStringBounds
133 : kQuickThrowArrayBounds;
134 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100135 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700136 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
137 }
138
Alexandre Rames8158f282015-08-07 10:26:17 +0100139 bool IsFatal() const OVERRIDE { return true; }
140
Roland Levillain46648892015-06-19 16:07:18 +0100141 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
142
Alexey Frunze4dda3372015-06-01 18:31:49 -0700143 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700144 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
145};
146
147class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
148 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000149 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700150
151 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
152 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
153 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100154 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700155 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
156 }
157
Alexandre Rames8158f282015-08-07 10:26:17 +0100158 bool IsFatal() const OVERRIDE { return true; }
159
Roland Levillain46648892015-06-19 16:07:18 +0100160 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
161
Alexey Frunze4dda3372015-06-01 18:31:49 -0700162 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700163 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
164};
165
166class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
167 public:
168 LoadClassSlowPathMIPS64(HLoadClass* cls,
169 HInstruction* at,
170 uint32_t dex_pc,
171 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000172 : SlowPathCodeMIPS64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700173 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
174 }
175
176 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
177 LocationSummary* locations = at_->GetLocations();
178 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
179
180 __ Bind(GetEntryLabel());
181 SaveLiveRegisters(codegen, locations);
182
183 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800184 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100185 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
186 : kQuickInitializeType;
187 mips64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700188 if (do_clinit_) {
189 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
190 } else {
191 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
192 }
193
194 // Move the class to the desired location.
195 Location out = locations->Out();
196 if (out.IsValid()) {
197 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
198 Primitive::Type type = at_->GetType();
199 mips64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
200 }
201
202 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700203 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700204 }
205
Roland Levillain46648892015-06-19 16:07:18 +0100206 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
207
Alexey Frunze4dda3372015-06-01 18:31:49 -0700208 private:
209 // The class this slow path will load.
210 HLoadClass* const cls_;
211
212 // The instruction where this slow path is happening.
213 // (Might be the load class or an initialization check).
214 HInstruction* const at_;
215
216 // The dex PC of `at_`.
217 const uint32_t dex_pc_;
218
219 // Whether to initialize the class.
220 const bool do_clinit_;
221
222 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
223};
224
225class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
226 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000227 explicit LoadStringSlowPathMIPS64(HLoadString* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700228
229 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
230 LocationSummary* locations = instruction_->GetLocations();
231 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
232 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
233
234 __ Bind(GetEntryLabel());
235 SaveLiveRegisters(codegen, locations);
236
237 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800238 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
David Srbecky9cd6d372016-02-09 15:24:47 +0000239 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index);
Serban Constantinescufc734082016-07-19 17:18:07 +0100240 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700241 instruction_,
242 instruction_->GetDexPc(),
243 this);
244 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
245 Primitive::Type type = instruction_->GetType();
246 mips64_codegen->MoveLocation(locations->Out(),
247 calling_convention.GetReturnLocation(type),
248 type);
249
250 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700251 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700252 }
253
Roland Levillain46648892015-06-19 16:07:18 +0100254 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
255
Alexey Frunze4dda3372015-06-01 18:31:49 -0700256 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700257 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
258};
259
260class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
261 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000262 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263
264 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
265 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
266 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000267 if (instruction_->CanThrowIntoCatchBlock()) {
268 // Live registers will be restored in the catch block if caught.
269 SaveLiveRegisters(codegen, instruction_->GetLocations());
270 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100271 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700272 instruction_,
273 instruction_->GetDexPc(),
274 this);
275 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
276 }
277
Alexandre Rames8158f282015-08-07 10:26:17 +0100278 bool IsFatal() const OVERRIDE { return true; }
279
Roland Levillain46648892015-06-19 16:07:18 +0100280 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
281
Alexey Frunze4dda3372015-06-01 18:31:49 -0700282 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700283 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
284};
285
286class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
287 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100288 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000289 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700290
291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
292 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
293 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100294 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700296 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700297 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700298 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700299 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700300 }
301 }
302
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700303 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700304 DCHECK(successor_ == nullptr);
305 return &return_label_;
306 }
307
Roland Levillain46648892015-06-19 16:07:18 +0100308 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
309
Alexey Frunze4dda3372015-06-01 18:31:49 -0700310 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700311 // If not null, the block to branch to after the suspend check.
312 HBasicBlock* const successor_;
313
314 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700315 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700316
317 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
318};
319
320class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
321 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000322 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700323
324 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
325 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800326
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100327 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700328 DCHECK(instruction_->IsCheckCast()
329 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
330 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
331
332 __ Bind(GetEntryLabel());
333 SaveLiveRegisters(codegen, locations);
334
335 // We're moving two locations to locations that could overlap, so we need a parallel
336 // move resolver.
337 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800338 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700339 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
340 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800341 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700342 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
343 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700344 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100345 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800346 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700347 Primitive::Type ret_type = instruction_->GetType();
348 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
349 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350 } else {
351 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800352 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
353 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354 }
355
356 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700357 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358 }
359
Roland Levillain46648892015-06-19 16:07:18 +0100360 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
361
Alexey Frunze4dda3372015-06-01 18:31:49 -0700362 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700363 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
364};
365
366class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
367 public:
Aart Bik42249c32016-01-07 15:33:50 -0800368 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000369 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700370
371 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800372 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700373 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100374 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000375 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700376 }
377
Roland Levillain46648892015-06-19 16:07:18 +0100378 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
379
Alexey Frunze4dda3372015-06-01 18:31:49 -0700380 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700381 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
382};
383
384CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
385 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100386 const CompilerOptions& compiler_options,
387 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700388 : CodeGenerator(graph,
389 kNumberOfGpuRegisters,
390 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700392 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
393 arraysize(kCoreCalleeSaves)),
394 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
395 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100396 compiler_options,
397 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100398 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700399 location_builder_(graph, this),
400 instruction_visitor_(graph, this),
401 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100402 assembler_(graph->GetArena()),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800403 isa_features_(isa_features),
404 uint64_literals_(std::less<uint64_t>(),
405 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
406 method_patches_(MethodReferenceComparator(),
407 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
408 call_patches_(MethodReferenceComparator(),
409 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
410 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
411 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700412 // Save RA (containing the return address) to mimic Quick.
413 AddAllocatedRegister(Location::RegisterLocation(RA));
414}
415
416#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100417// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
418#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700419#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700420
421void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700422 // Ensure that we fix up branches.
423 __ FinalizeCode();
424
425 // Adjust native pc offsets in stack maps.
426 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
427 uint32_t old_position = stack_map_stream_.GetStackMap(i).native_pc_offset;
428 uint32_t new_position = __ GetAdjustedPosition(old_position);
429 DCHECK_GE(new_position, old_position);
430 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
431 }
432
433 // Adjust pc offsets for the disassembly information.
434 if (disasm_info_ != nullptr) {
435 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
436 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
437 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
438 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
439 it.second.start = __ GetAdjustedPosition(it.second.start);
440 it.second.end = __ GetAdjustedPosition(it.second.end);
441 }
442 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
443 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
444 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
445 }
446 }
447
Alexey Frunze4dda3372015-06-01 18:31:49 -0700448 CodeGenerator::Finalize(allocator);
449}
450
451Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
452 return codegen_->GetAssembler();
453}
454
455void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100456 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700457 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
458}
459
460void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100461 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700462 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
463}
464
465void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
466 // Pop reg
467 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +0200468 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700469}
470
471void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
472 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +0200473 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700474 __ Sd(GpuRegister(reg), SP, 0);
475}
476
477void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
478 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
479 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
480 // Allocate a scratch register other than TMP, if available.
481 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
482 // automatically unspilled when the scratch scope object is destroyed).
483 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
484 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +0200485 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700486 __ LoadFromOffset(load_type,
487 GpuRegister(ensure_scratch.GetRegister()),
488 SP,
489 index1 + stack_offset);
490 __ LoadFromOffset(load_type,
491 TMP,
492 SP,
493 index2 + stack_offset);
494 __ StoreToOffset(store_type,
495 GpuRegister(ensure_scratch.GetRegister()),
496 SP,
497 index2 + stack_offset);
498 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
499}
500
501static dwarf::Reg DWARFReg(GpuRegister reg) {
502 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
503}
504
David Srbeckyba702002016-02-01 18:15:29 +0000505static dwarf::Reg DWARFReg(FpuRegister reg) {
506 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
507}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700508
509void CodeGeneratorMIPS64::GenerateFrameEntry() {
510 __ Bind(&frame_entry_label_);
511
512 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
513
514 if (do_overflow_check) {
515 __ LoadFromOffset(kLoadWord,
516 ZERO,
517 SP,
518 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
519 RecordPcInfo(nullptr, 0);
520 }
521
Alexey Frunze4dda3372015-06-01 18:31:49 -0700522 if (HasEmptyFrame()) {
523 return;
524 }
525
526 // Make sure the frame size isn't unreasonably large. Per the various APIs
527 // it looks like it should always be less than 2GB in size, which allows
528 // us using 32-bit signed offsets from the stack pointer.
529 if (GetFrameSize() > 0x7FFFFFFF)
530 LOG(FATAL) << "Stack frame larger than 2GB";
531
532 // Spill callee-saved registers.
533 // Note that their cumulative size is small and they can be indexed using
534 // 16-bit offsets.
535
536 // TODO: increment/decrement SP in one step instead of two or remove this comment.
537
538 uint32_t ofs = FrameEntrySpillSize();
539 __ IncreaseFrameSize(ofs);
540
541 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
542 GpuRegister reg = kCoreCalleeSaves[i];
543 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200544 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700545 __ Sd(reg, SP, ofs);
546 __ cfi().RelOffset(DWARFReg(reg), ofs);
547 }
548 }
549
550 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
551 FpuRegister reg = kFpuCalleeSaves[i];
552 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200553 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700554 __ Sdc1(reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +0000555 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700556 }
557 }
558
559 // Allocate the rest of the frame and store the current method pointer
560 // at its end.
561
562 __ IncreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
563
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +0100564 // Save the current method if we need it. Note that we do not
565 // do this in HCurrentMethod, as the instruction might have been removed
566 // in the SSA graph.
567 if (RequiresCurrentMethod()) {
568 static_assert(IsInt<16>(kCurrentMethodStackOffset),
569 "kCurrentMethodStackOffset must fit into int16_t");
570 __ Sd(kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
571 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +0100572
573 if (GetGraph()->HasShouldDeoptimizeFlag()) {
574 // Initialize should_deoptimize flag to 0.
575 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
576 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700577}
578
579void CodeGeneratorMIPS64::GenerateFrameExit() {
580 __ cfi().RememberState();
581
Alexey Frunze4dda3372015-06-01 18:31:49 -0700582 if (!HasEmptyFrame()) {
583 // Deallocate the rest of the frame.
584
585 __ DecreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
586
587 // Restore callee-saved registers.
588 // Note that their cumulative size is small and they can be indexed using
589 // 16-bit offsets.
590
591 // TODO: increment/decrement SP in one step instead of two or remove this comment.
592
593 uint32_t ofs = 0;
594
595 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
596 FpuRegister reg = kFpuCalleeSaves[i];
597 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
598 __ Ldc1(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200599 ofs += kMips64DoublewordSize;
David Srbeckyba702002016-02-01 18:15:29 +0000600 __ cfi().Restore(DWARFReg(reg));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700601 }
602 }
603
604 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
605 GpuRegister reg = kCoreCalleeSaves[i];
606 if (allocated_registers_.ContainsCoreRegister(reg)) {
607 __ Ld(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200608 ofs += kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700609 __ cfi().Restore(DWARFReg(reg));
610 }
611 }
612
613 DCHECK_EQ(ofs, FrameEntrySpillSize());
614 __ DecreaseFrameSize(ofs);
615 }
616
617 __ Jr(RA);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700618 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700619
620 __ cfi().RestoreState();
621 __ cfi().DefCFAOffset(GetFrameSize());
622}
623
624void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
625 __ Bind(GetLabelOf(block));
626}
627
628void CodeGeneratorMIPS64::MoveLocation(Location destination,
629 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100630 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700631 if (source.Equals(destination)) {
632 return;
633 }
634
635 // A valid move can always be inferred from the destination and source
636 // locations. When moving from and to a register, the argument type can be
637 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100638 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700639 DCHECK_EQ(unspecified_type, false);
640
641 if (destination.IsRegister() || destination.IsFpuRegister()) {
642 if (unspecified_type) {
643 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
644 if (source.IsStackSlot() ||
645 (src_cst != nullptr && (src_cst->IsIntConstant()
646 || src_cst->IsFloatConstant()
647 || src_cst->IsNullConstant()))) {
648 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100649 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700650 } else {
651 // If the source is a double stack slot or a 64bit constant, a 64bit
652 // type is appropriate. Else the source is a register, and since the
653 // type has not been specified, we chose a 64bit type to force a 64bit
654 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100655 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700656 }
657 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100658 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
659 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700660 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
661 // Move to GPR/FPR from stack
662 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100663 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700664 __ LoadFpuFromOffset(load_type,
665 destination.AsFpuRegister<FpuRegister>(),
666 SP,
667 source.GetStackIndex());
668 } else {
669 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
670 __ LoadFromOffset(load_type,
671 destination.AsRegister<GpuRegister>(),
672 SP,
673 source.GetStackIndex());
674 }
675 } else if (source.IsConstant()) {
676 // Move to GPR/FPR from constant
677 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100678 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700679 gpr = destination.AsRegister<GpuRegister>();
680 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100681 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700682 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
683 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
684 gpr = ZERO;
685 } else {
686 __ LoadConst32(gpr, value);
687 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700688 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700689 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
690 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
691 gpr = ZERO;
692 } else {
693 __ LoadConst64(gpr, value);
694 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700695 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100696 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700697 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +0100698 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700699 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
700 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100701 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700702 if (destination.IsRegister()) {
703 // Move to GPR from GPR
704 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
705 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100706 DCHECK(destination.IsFpuRegister());
707 if (Primitive::Is64BitType(dst_type)) {
708 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
709 } else {
710 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
711 }
712 }
713 } else if (source.IsFpuRegister()) {
714 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700715 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +0100716 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700717 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
718 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100719 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700720 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
721 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100722 } else {
723 DCHECK(destination.IsRegister());
724 if (Primitive::Is64BitType(dst_type)) {
725 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
726 } else {
727 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
728 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700729 }
730 }
731 } else { // The destination is not a register. It must be a stack slot.
732 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
733 if (source.IsRegister() || source.IsFpuRegister()) {
734 if (unspecified_type) {
735 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100736 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700737 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100738 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700739 }
740 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100741 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
742 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700743 // Move to stack from GPR/FPR
744 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
745 if (source.IsRegister()) {
746 __ StoreToOffset(store_type,
747 source.AsRegister<GpuRegister>(),
748 SP,
749 destination.GetStackIndex());
750 } else {
751 __ StoreFpuToOffset(store_type,
752 source.AsFpuRegister<FpuRegister>(),
753 SP,
754 destination.GetStackIndex());
755 }
756 } else if (source.IsConstant()) {
757 // Move to stack from constant
758 HConstant* src_cst = source.GetConstant();
759 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700760 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700761 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700762 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
763 if (value != 0) {
764 gpr = TMP;
765 __ LoadConst32(gpr, value);
766 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700767 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700768 DCHECK(destination.IsDoubleStackSlot());
769 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
770 if (value != 0) {
771 gpr = TMP;
772 __ LoadConst64(gpr, value);
773 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700774 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700775 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700776 } else {
777 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
778 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
779 // Move to stack from stack
780 if (destination.IsStackSlot()) {
781 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
782 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
783 } else {
784 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
785 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
786 }
787 }
788 }
789}
790
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700791void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700792 DCHECK(!loc1.IsConstant());
793 DCHECK(!loc2.IsConstant());
794
795 if (loc1.Equals(loc2)) {
796 return;
797 }
798
799 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
800 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
801 bool is_fp_reg1 = loc1.IsFpuRegister();
802 bool is_fp_reg2 = loc2.IsFpuRegister();
803
804 if (loc2.IsRegister() && loc1.IsRegister()) {
805 // Swap 2 GPRs
806 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
807 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
808 __ Move(TMP, r2);
809 __ Move(r2, r1);
810 __ Move(r1, TMP);
811 } else if (is_fp_reg2 && is_fp_reg1) {
812 // Swap 2 FPRs
813 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
814 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700815 if (type == Primitive::kPrimFloat) {
816 __ MovS(FTMP, r1);
817 __ MovS(r1, r2);
818 __ MovS(r2, FTMP);
819 } else {
820 DCHECK_EQ(type, Primitive::kPrimDouble);
821 __ MovD(FTMP, r1);
822 __ MovD(r1, r2);
823 __ MovD(r2, FTMP);
824 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700825 } else if (is_slot1 != is_slot2) {
826 // Swap GPR/FPR and stack slot
827 Location reg_loc = is_slot1 ? loc2 : loc1;
828 Location mem_loc = is_slot1 ? loc1 : loc2;
829 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
830 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
831 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
832 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
833 if (reg_loc.IsFpuRegister()) {
834 __ StoreFpuToOffset(store_type,
835 reg_loc.AsFpuRegister<FpuRegister>(),
836 SP,
837 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700838 if (mem_loc.IsStackSlot()) {
839 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
840 } else {
841 DCHECK(mem_loc.IsDoubleStackSlot());
842 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
843 }
844 } else {
845 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
846 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
847 }
848 } else if (is_slot1 && is_slot2) {
849 move_resolver_.Exchange(loc1.GetStackIndex(),
850 loc2.GetStackIndex(),
851 loc1.IsDoubleStackSlot());
852 } else {
853 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
854 }
855}
856
Calin Juravle175dc732015-08-25 15:42:32 +0100857void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
858 DCHECK(location.IsRegister());
859 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
860}
861
Calin Juravlee460d1d2015-09-29 04:52:17 +0100862void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
863 if (location.IsRegister()) {
864 locations->AddTemp(location);
865 } else {
866 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
867 }
868}
869
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100870void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
871 GpuRegister value,
872 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700873 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700874 GpuRegister card = AT;
875 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100876 if (value_can_be_null) {
877 __ Beqzc(value, &done);
878 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700879 __ LoadFromOffset(kLoadDoubleword,
880 card,
881 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -0700882 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700883 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
884 __ Daddu(temp, card, temp);
885 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100886 if (value_can_be_null) {
887 __ Bind(&done);
888 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700889}
890
Alexey Frunze19f6c692016-11-30 19:19:55 -0800891template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
892inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
893 const ArenaDeque<PcRelativePatchInfo>& infos,
894 ArenaVector<LinkerPatch>* linker_patches) {
895 for (const PcRelativePatchInfo& info : infos) {
896 const DexFile& dex_file = info.target_dex_file;
897 size_t offset_or_index = info.offset_or_index;
898 DCHECK(info.pc_rel_label.IsBound());
899 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
900 linker_patches->push_back(Factory(pc_rel_offset, &dex_file, pc_rel_offset, offset_or_index));
901 }
902}
903
904void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
905 DCHECK(linker_patches->empty());
906 size_t size =
907 method_patches_.size() +
908 call_patches_.size() +
909 pc_relative_dex_cache_patches_.size() +
910 relative_call_patches_.size();
911 linker_patches->reserve(size);
912 for (const auto& entry : method_patches_) {
913 const MethodReference& target_method = entry.first;
914 Literal* literal = entry.second;
915 DCHECK(literal->GetLabel()->IsBound());
916 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
917 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
918 target_method.dex_file,
919 target_method.dex_method_index));
920 }
921 for (const auto& entry : call_patches_) {
922 const MethodReference& target_method = entry.first;
923 Literal* literal = entry.second;
924 DCHECK(literal->GetLabel()->IsBound());
925 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
926 linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
927 target_method.dex_file,
928 target_method.dex_method_index));
929 }
930 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
931 linker_patches);
932 for (const PcRelativePatchInfo& info : relative_call_patches_) {
933 const DexFile& dex_file = info.target_dex_file;
934 uint32_t method_index = info.offset_or_index;
935 DCHECK(info.pc_rel_label.IsBound());
936 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
937 linker_patches->push_back(
938 LinkerPatch::RelativeCodePatch(pc_rel_offset, &dex_file, method_index));
939 }
940}
941
942CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeDexCacheArrayPatch(
943 const DexFile& dex_file, uint32_t element_offset) {
944 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
945}
946
947CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeCallPatch(
948 const DexFile& dex_file, uint32_t method_index) {
949 return NewPcRelativePatch(dex_file, method_index, &relative_call_patches_);
950}
951
952CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
953 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
954 patches->emplace_back(dex_file, offset_or_index);
955 return &patches->back();
956}
957
958Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
959 return uint64_literals_.GetOrCreate(
960 value,
961 [this, value]() { return __ NewLiteral<uint64_t>(value); });
962}
963
964Literal* CodeGeneratorMIPS64::DeduplicateMethodLiteral(MethodReference target_method,
965 MethodToLiteralMap* map) {
966 return map->GetOrCreate(
967 target_method,
968 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
969}
970
971Literal* CodeGeneratorMIPS64::DeduplicateMethodAddressLiteral(MethodReference target_method) {
972 return DeduplicateMethodLiteral(target_method, &method_patches_);
973}
974
975Literal* CodeGeneratorMIPS64::DeduplicateMethodCodeLiteral(MethodReference target_method) {
976 return DeduplicateMethodLiteral(target_method, &call_patches_);
977}
978
979void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
980 GpuRegister out) {
981 __ Bind(&info->pc_rel_label);
982 // Add the high half of a 32-bit offset to PC.
983 __ Auipc(out, /* placeholder */ 0x1234);
984 // The immediately following instruction will add the sign-extended low half of the 32-bit
985 // offset to `out` (e.g. ld, jialc, addiu).
986}
987
David Brazdil58282f42016-01-14 12:45:10 +0000988void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700989 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
990 blocked_core_registers_[ZERO] = true;
991 blocked_core_registers_[K0] = true;
992 blocked_core_registers_[K1] = true;
993 blocked_core_registers_[GP] = true;
994 blocked_core_registers_[SP] = true;
995 blocked_core_registers_[RA] = true;
996
Lazar Trsicd9672662015-09-03 17:33:01 +0200997 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
998 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -0700999 blocked_core_registers_[AT] = true;
1000 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001001 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001002 blocked_fpu_registers_[FTMP] = true;
1003
1004 // Reserve suspend and thread registers.
1005 blocked_core_registers_[S0] = true;
1006 blocked_core_registers_[TR] = true;
1007
1008 // Reserve T9 for function calls
1009 blocked_core_registers_[T9] = true;
1010
1011 // TODO: review; anything else?
1012
Goran Jakovljevic782be112016-06-21 12:39:04 +02001013 if (GetGraph()->IsDebuggable()) {
1014 // Stubs do not save callee-save floating point registers. If the graph
1015 // is debuggable, we need to deal with these registers differently. For
1016 // now, just block them.
1017 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1018 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1019 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001020 }
1021}
1022
Alexey Frunze4dda3372015-06-01 18:31:49 -07001023size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1024 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001025 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001026}
1027
1028size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1029 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001030 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001031}
1032
1033size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1034 __ StoreFpuToOffset(kStoreDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001035 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001036}
1037
1038size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1039 __ LoadFpuFromOffset(kLoadDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001040 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001041}
1042
1043void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001044 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001045}
1046
1047void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001048 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001049}
1050
Calin Juravle175dc732015-08-25 15:42:32 +01001051void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001052 HInstruction* instruction,
1053 uint32_t dex_pc,
1054 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001055 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescufc734082016-07-19 17:18:07 +01001056 __ LoadFromOffset(kLoadDoubleword,
1057 T9,
1058 TR,
1059 GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001060 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001061 __ Nop();
Serban Constantinescufc734082016-07-19 17:18:07 +01001062 if (EntrypointRequiresStackMap(entrypoint)) {
1063 RecordPcInfo(instruction, dex_pc, slow_path);
1064 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001065}
1066
1067void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1068 GpuRegister class_reg) {
1069 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1070 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1071 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
1072 // TODO: barrier needed?
1073 __ Bind(slow_path->GetExitLabel());
1074}
1075
1076void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1077 __ Sync(0); // only stype 0 is supported
1078}
1079
1080void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1081 HBasicBlock* successor) {
1082 SuspendCheckSlowPathMIPS64* slow_path =
1083 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1084 codegen_->AddSlowPath(slow_path);
1085
1086 __ LoadFromOffset(kLoadUnsignedHalfword,
1087 TMP,
1088 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001089 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001090 if (successor == nullptr) {
1091 __ Bnezc(TMP, slow_path->GetEntryLabel());
1092 __ Bind(slow_path->GetReturnLabel());
1093 } else {
1094 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001095 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001096 // slow_path will return to GetLabelOf(successor).
1097 }
1098}
1099
1100InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1101 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001102 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001103 assembler_(codegen->GetAssembler()),
1104 codegen_(codegen) {}
1105
1106void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1107 DCHECK_EQ(instruction->InputCount(), 2U);
1108 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1109 Primitive::Type type = instruction->GetResultType();
1110 switch (type) {
1111 case Primitive::kPrimInt:
1112 case Primitive::kPrimLong: {
1113 locations->SetInAt(0, Location::RequiresRegister());
1114 HInstruction* right = instruction->InputAt(1);
1115 bool can_use_imm = false;
1116 if (right->IsConstant()) {
1117 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1118 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1119 can_use_imm = IsUint<16>(imm);
1120 } else if (instruction->IsAdd()) {
1121 can_use_imm = IsInt<16>(imm);
1122 } else {
1123 DCHECK(instruction->IsSub());
1124 can_use_imm = IsInt<16>(-imm);
1125 }
1126 }
1127 if (can_use_imm)
1128 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1129 else
1130 locations->SetInAt(1, Location::RequiresRegister());
1131 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1132 }
1133 break;
1134
1135 case Primitive::kPrimFloat:
1136 case Primitive::kPrimDouble:
1137 locations->SetInAt(0, Location::RequiresFpuRegister());
1138 locations->SetInAt(1, Location::RequiresFpuRegister());
1139 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1140 break;
1141
1142 default:
1143 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1144 }
1145}
1146
1147void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1148 Primitive::Type type = instruction->GetType();
1149 LocationSummary* locations = instruction->GetLocations();
1150
1151 switch (type) {
1152 case Primitive::kPrimInt:
1153 case Primitive::kPrimLong: {
1154 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1155 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1156 Location rhs_location = locations->InAt(1);
1157
1158 GpuRegister rhs_reg = ZERO;
1159 int64_t rhs_imm = 0;
1160 bool use_imm = rhs_location.IsConstant();
1161 if (use_imm) {
1162 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1163 } else {
1164 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1165 }
1166
1167 if (instruction->IsAnd()) {
1168 if (use_imm)
1169 __ Andi(dst, lhs, rhs_imm);
1170 else
1171 __ And(dst, lhs, rhs_reg);
1172 } else if (instruction->IsOr()) {
1173 if (use_imm)
1174 __ Ori(dst, lhs, rhs_imm);
1175 else
1176 __ Or(dst, lhs, rhs_reg);
1177 } else if (instruction->IsXor()) {
1178 if (use_imm)
1179 __ Xori(dst, lhs, rhs_imm);
1180 else
1181 __ Xor(dst, lhs, rhs_reg);
1182 } else if (instruction->IsAdd()) {
1183 if (type == Primitive::kPrimInt) {
1184 if (use_imm)
1185 __ Addiu(dst, lhs, rhs_imm);
1186 else
1187 __ Addu(dst, lhs, rhs_reg);
1188 } else {
1189 if (use_imm)
1190 __ Daddiu(dst, lhs, rhs_imm);
1191 else
1192 __ Daddu(dst, lhs, rhs_reg);
1193 }
1194 } else {
1195 DCHECK(instruction->IsSub());
1196 if (type == Primitive::kPrimInt) {
1197 if (use_imm)
1198 __ Addiu(dst, lhs, -rhs_imm);
1199 else
1200 __ Subu(dst, lhs, rhs_reg);
1201 } else {
1202 if (use_imm)
1203 __ Daddiu(dst, lhs, -rhs_imm);
1204 else
1205 __ Dsubu(dst, lhs, rhs_reg);
1206 }
1207 }
1208 break;
1209 }
1210 case Primitive::kPrimFloat:
1211 case Primitive::kPrimDouble: {
1212 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1213 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1214 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1215 if (instruction->IsAdd()) {
1216 if (type == Primitive::kPrimFloat)
1217 __ AddS(dst, lhs, rhs);
1218 else
1219 __ AddD(dst, lhs, rhs);
1220 } else if (instruction->IsSub()) {
1221 if (type == Primitive::kPrimFloat)
1222 __ SubS(dst, lhs, rhs);
1223 else
1224 __ SubD(dst, lhs, rhs);
1225 } else {
1226 LOG(FATAL) << "Unexpected floating-point binary operation";
1227 }
1228 break;
1229 }
1230 default:
1231 LOG(FATAL) << "Unexpected binary operation type " << type;
1232 }
1233}
1234
1235void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001236 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001237
1238 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1239 Primitive::Type type = instr->GetResultType();
1240 switch (type) {
1241 case Primitive::kPrimInt:
1242 case Primitive::kPrimLong: {
1243 locations->SetInAt(0, Location::RequiresRegister());
1244 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001245 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001246 break;
1247 }
1248 default:
1249 LOG(FATAL) << "Unexpected shift type " << type;
1250 }
1251}
1252
1253void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001254 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001255 LocationSummary* locations = instr->GetLocations();
1256 Primitive::Type type = instr->GetType();
1257
1258 switch (type) {
1259 case Primitive::kPrimInt:
1260 case Primitive::kPrimLong: {
1261 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1262 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1263 Location rhs_location = locations->InAt(1);
1264
1265 GpuRegister rhs_reg = ZERO;
1266 int64_t rhs_imm = 0;
1267 bool use_imm = rhs_location.IsConstant();
1268 if (use_imm) {
1269 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1270 } else {
1271 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1272 }
1273
1274 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001275 uint32_t shift_value = rhs_imm &
1276 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001277
Alexey Frunze92d90602015-12-18 18:16:36 -08001278 if (shift_value == 0) {
1279 if (dst != lhs) {
1280 __ Move(dst, lhs);
1281 }
1282 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001283 if (instr->IsShl()) {
1284 __ Sll(dst, lhs, shift_value);
1285 } else if (instr->IsShr()) {
1286 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001287 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001288 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001289 } else {
1290 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001291 }
1292 } else {
1293 if (shift_value < 32) {
1294 if (instr->IsShl()) {
1295 __ Dsll(dst, lhs, shift_value);
1296 } else if (instr->IsShr()) {
1297 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001298 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001299 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001300 } else {
1301 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001302 }
1303 } else {
1304 shift_value -= 32;
1305 if (instr->IsShl()) {
1306 __ Dsll32(dst, lhs, shift_value);
1307 } else if (instr->IsShr()) {
1308 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001309 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001310 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001311 } else {
1312 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001313 }
1314 }
1315 }
1316 } else {
1317 if (type == Primitive::kPrimInt) {
1318 if (instr->IsShl()) {
1319 __ Sllv(dst, lhs, rhs_reg);
1320 } else if (instr->IsShr()) {
1321 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001322 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001323 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001324 } else {
1325 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001326 }
1327 } else {
1328 if (instr->IsShl()) {
1329 __ Dsllv(dst, lhs, rhs_reg);
1330 } else if (instr->IsShr()) {
1331 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001332 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001333 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001334 } else {
1335 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001336 }
1337 }
1338 }
1339 break;
1340 }
1341 default:
1342 LOG(FATAL) << "Unexpected shift operation type " << type;
1343 }
1344}
1345
1346void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
1347 HandleBinaryOp(instruction);
1348}
1349
1350void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
1351 HandleBinaryOp(instruction);
1352}
1353
1354void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
1355 HandleBinaryOp(instruction);
1356}
1357
1358void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
1359 HandleBinaryOp(instruction);
1360}
1361
1362void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
1363 LocationSummary* locations =
1364 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1365 locations->SetInAt(0, Location::RequiresRegister());
1366 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1367 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1368 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1369 } else {
1370 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1371 }
1372}
1373
1374void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
1375 LocationSummary* locations = instruction->GetLocations();
1376 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1377 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001378 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001379
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001380 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001381 switch (type) {
1382 case Primitive::kPrimBoolean: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001383 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1384 if (index.IsConstant()) {
1385 size_t offset =
1386 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1387 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1388 } else {
1389 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1390 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
1391 }
1392 break;
1393 }
1394
1395 case Primitive::kPrimByte: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001396 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1397 if (index.IsConstant()) {
1398 size_t offset =
1399 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1400 __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1401 } else {
1402 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1403 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset);
1404 }
1405 break;
1406 }
1407
1408 case Primitive::kPrimShort: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001409 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1410 if (index.IsConstant()) {
1411 size_t offset =
1412 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1413 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1414 } else {
1415 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1416 __ Daddu(TMP, obj, TMP);
1417 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset);
1418 }
1419 break;
1420 }
1421
1422 case Primitive::kPrimChar: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001423 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1424 if (index.IsConstant()) {
1425 size_t offset =
1426 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1427 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1428 } else {
1429 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1430 __ Daddu(TMP, obj, TMP);
1431 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
1432 }
1433 break;
1434 }
1435
1436 case Primitive::kPrimInt:
1437 case Primitive::kPrimNot: {
1438 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001439 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1440 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
1441 if (index.IsConstant()) {
1442 size_t offset =
1443 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1444 __ LoadFromOffset(load_type, out, obj, offset);
1445 } else {
1446 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1447 __ Daddu(TMP, obj, TMP);
1448 __ LoadFromOffset(load_type, out, TMP, data_offset);
1449 }
1450 break;
1451 }
1452
1453 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001454 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1455 if (index.IsConstant()) {
1456 size_t offset =
1457 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1458 __ LoadFromOffset(kLoadDoubleword, out, obj, offset);
1459 } else {
1460 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1461 __ Daddu(TMP, obj, TMP);
1462 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset);
1463 }
1464 break;
1465 }
1466
1467 case Primitive::kPrimFloat: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001468 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1469 if (index.IsConstant()) {
1470 size_t offset =
1471 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1472 __ LoadFpuFromOffset(kLoadWord, out, obj, offset);
1473 } else {
1474 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1475 __ Daddu(TMP, obj, TMP);
1476 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset);
1477 }
1478 break;
1479 }
1480
1481 case Primitive::kPrimDouble: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001482 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1483 if (index.IsConstant()) {
1484 size_t offset =
1485 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1486 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset);
1487 } else {
1488 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1489 __ Daddu(TMP, obj, TMP);
1490 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset);
1491 }
1492 break;
1493 }
1494
1495 case Primitive::kPrimVoid:
1496 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1497 UNREACHABLE();
1498 }
1499 codegen_->MaybeRecordImplicitNullCheck(instruction);
1500}
1501
1502void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
1503 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1504 locations->SetInAt(0, Location::RequiresRegister());
1505 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1506}
1507
1508void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
1509 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01001510 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001511 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1512 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1513 __ LoadFromOffset(kLoadWord, out, obj, offset);
1514 codegen_->MaybeRecordImplicitNullCheck(instruction);
1515}
1516
1517void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
David Brazdilbb3d5052015-09-21 18:39:16 +01001518 bool needs_runtime_call = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001519 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1520 instruction,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001521 needs_runtime_call ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
David Brazdilbb3d5052015-09-21 18:39:16 +01001522 if (needs_runtime_call) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001523 InvokeRuntimeCallingConvention calling_convention;
1524 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1525 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1526 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1527 } else {
1528 locations->SetInAt(0, Location::RequiresRegister());
1529 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1530 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1531 locations->SetInAt(2, Location::RequiresFpuRegister());
1532 } else {
1533 locations->SetInAt(2, Location::RequiresRegister());
1534 }
1535 }
1536}
1537
1538void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
1539 LocationSummary* locations = instruction->GetLocations();
1540 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1541 Location index = locations->InAt(1);
1542 Primitive::Type value_type = instruction->GetComponentType();
1543 bool needs_runtime_call = locations->WillCall();
1544 bool needs_write_barrier =
1545 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
1546
1547 switch (value_type) {
1548 case Primitive::kPrimBoolean:
1549 case Primitive::kPrimByte: {
1550 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1551 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1552 if (index.IsConstant()) {
1553 size_t offset =
1554 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1555 __ StoreToOffset(kStoreByte, value, obj, offset);
1556 } else {
1557 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1558 __ StoreToOffset(kStoreByte, value, TMP, data_offset);
1559 }
1560 break;
1561 }
1562
1563 case Primitive::kPrimShort:
1564 case Primitive::kPrimChar: {
1565 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1566 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1567 if (index.IsConstant()) {
1568 size_t offset =
1569 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1570 __ StoreToOffset(kStoreHalfword, value, obj, offset);
1571 } else {
1572 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1573 __ Daddu(TMP, obj, TMP);
1574 __ StoreToOffset(kStoreHalfword, value, TMP, data_offset);
1575 }
1576 break;
1577 }
1578
1579 case Primitive::kPrimInt:
1580 case Primitive::kPrimNot: {
1581 if (!needs_runtime_call) {
1582 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1583 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1584 if (index.IsConstant()) {
1585 size_t offset =
1586 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1587 __ StoreToOffset(kStoreWord, value, obj, offset);
1588 } else {
1589 DCHECK(index.IsRegister()) << index;
1590 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1591 __ Daddu(TMP, obj, TMP);
1592 __ StoreToOffset(kStoreWord, value, TMP, data_offset);
1593 }
1594 codegen_->MaybeRecordImplicitNullCheck(instruction);
1595 if (needs_write_barrier) {
1596 DCHECK_EQ(value_type, Primitive::kPrimNot);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001597 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001598 }
1599 } else {
1600 DCHECK_EQ(value_type, Primitive::kPrimNot);
Serban Constantinescufc734082016-07-19 17:18:07 +01001601 codegen_->InvokeRuntime(kQuickAputObject, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00001602 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001603 }
1604 break;
1605 }
1606
1607 case Primitive::kPrimLong: {
1608 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1609 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1610 if (index.IsConstant()) {
1611 size_t offset =
1612 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1613 __ StoreToOffset(kStoreDoubleword, value, obj, offset);
1614 } else {
1615 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1616 __ Daddu(TMP, obj, TMP);
1617 __ StoreToOffset(kStoreDoubleword, value, TMP, data_offset);
1618 }
1619 break;
1620 }
1621
1622 case Primitive::kPrimFloat: {
1623 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
1624 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1625 DCHECK(locations->InAt(2).IsFpuRegister());
1626 if (index.IsConstant()) {
1627 size_t offset =
1628 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1629 __ StoreFpuToOffset(kStoreWord, value, obj, offset);
1630 } else {
1631 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1632 __ Daddu(TMP, obj, TMP);
1633 __ StoreFpuToOffset(kStoreWord, value, TMP, data_offset);
1634 }
1635 break;
1636 }
1637
1638 case Primitive::kPrimDouble: {
1639 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
1640 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1641 DCHECK(locations->InAt(2).IsFpuRegister());
1642 if (index.IsConstant()) {
1643 size_t offset =
1644 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1645 __ StoreFpuToOffset(kStoreDoubleword, value, obj, offset);
1646 } else {
1647 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1648 __ Daddu(TMP, obj, TMP);
1649 __ StoreFpuToOffset(kStoreDoubleword, value, TMP, data_offset);
1650 }
1651 break;
1652 }
1653
1654 case Primitive::kPrimVoid:
1655 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1656 UNREACHABLE();
1657 }
1658
1659 // Ints and objects are handled in the switch.
1660 if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
1661 codegen_->MaybeRecordImplicitNullCheck(instruction);
1662 }
1663}
1664
1665void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001666 RegisterSet caller_saves = RegisterSet::Empty();
1667 InvokeRuntimeCallingConvention calling_convention;
1668 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1669 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1670 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001671 locations->SetInAt(0, Location::RequiresRegister());
1672 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001673}
1674
1675void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
1676 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001677 BoundsCheckSlowPathMIPS64* slow_path =
1678 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001679 codegen_->AddSlowPath(slow_path);
1680
1681 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
1682 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
1683
1684 // length is limited by the maximum positive signed 32-bit integer.
1685 // Unsigned comparison of length and index checks for index < 0
1686 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001687 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001688}
1689
1690void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
1691 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1692 instruction,
1693 LocationSummary::kCallOnSlowPath);
1694 locations->SetInAt(0, Location::RequiresRegister());
1695 locations->SetInAt(1, Location::RequiresRegister());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001696 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001697 locations->AddTemp(Location::RequiresRegister());
1698}
1699
1700void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
1701 LocationSummary* locations = instruction->GetLocations();
1702 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1703 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
1704 GpuRegister obj_cls = locations->GetTemp(0).AsRegister<GpuRegister>();
1705
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001706 SlowPathCodeMIPS64* slow_path =
1707 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001708 codegen_->AddSlowPath(slow_path);
1709
1710 // TODO: avoid this check if we know obj is not null.
1711 __ Beqzc(obj, slow_path->GetExitLabel());
1712 // Compare the class of `obj` with `cls`.
1713 __ LoadFromOffset(kLoadUnsignedWord, obj_cls, obj, mirror::Object::ClassOffset().Int32Value());
1714 __ Bnec(obj_cls, cls, slow_path->GetEntryLabel());
1715 __ Bind(slow_path->GetExitLabel());
1716}
1717
1718void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
1719 LocationSummary* locations =
1720 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1721 locations->SetInAt(0, Location::RequiresRegister());
1722 if (check->HasUses()) {
1723 locations->SetOut(Location::SameAsFirstInput());
1724 }
1725}
1726
1727void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
1728 // We assume the class is not null.
1729 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
1730 check->GetLoadClass(),
1731 check,
1732 check->GetDexPc(),
1733 true);
1734 codegen_->AddSlowPath(slow_path);
1735 GenerateClassInitializationCheck(slow_path,
1736 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
1737}
1738
1739void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
1740 Primitive::Type in_type = compare->InputAt(0)->GetType();
1741
Alexey Frunze299a9392015-12-08 16:08:02 -08001742 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001743
1744 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001745 case Primitive::kPrimBoolean:
1746 case Primitive::kPrimByte:
1747 case Primitive::kPrimShort:
1748 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001749 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001750 case Primitive::kPrimLong:
1751 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001752 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001753 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1754 break;
1755
1756 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08001757 case Primitive::kPrimDouble:
1758 locations->SetInAt(0, Location::RequiresFpuRegister());
1759 locations->SetInAt(1, Location::RequiresFpuRegister());
1760 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001761 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001762
1763 default:
1764 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1765 }
1766}
1767
1768void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
1769 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001770 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001771 Primitive::Type in_type = instruction->InputAt(0)->GetType();
1772
1773 // 0 if: left == right
1774 // 1 if: left > right
1775 // -1 if: left < right
1776 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001777 case Primitive::kPrimBoolean:
1778 case Primitive::kPrimByte:
1779 case Primitive::kPrimShort:
1780 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001781 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001782 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001783 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001784 Location rhs_location = locations->InAt(1);
1785 bool use_imm = rhs_location.IsConstant();
1786 GpuRegister rhs = ZERO;
1787 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001788 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08001789 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
1790 if (value != 0) {
1791 rhs = AT;
1792 __ LoadConst64(rhs, value);
1793 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00001794 } else {
1795 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
1796 if (value != 0) {
1797 rhs = AT;
1798 __ LoadConst32(rhs, value);
1799 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001800 }
1801 } else {
1802 rhs = rhs_location.AsRegister<GpuRegister>();
1803 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001804 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08001805 __ Slt(res, rhs, lhs);
1806 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001807 break;
1808 }
1809
Alexey Frunze299a9392015-12-08 16:08:02 -08001810 case Primitive::kPrimFloat: {
1811 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1812 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1813 Mips64Label done;
1814 __ CmpEqS(FTMP, lhs, rhs);
1815 __ LoadConst32(res, 0);
1816 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001817 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001818 __ CmpLtS(FTMP, lhs, rhs);
1819 __ LoadConst32(res, -1);
1820 __ Bc1nez(FTMP, &done);
1821 __ LoadConst32(res, 1);
1822 } else {
1823 __ CmpLtS(FTMP, rhs, lhs);
1824 __ LoadConst32(res, 1);
1825 __ Bc1nez(FTMP, &done);
1826 __ LoadConst32(res, -1);
1827 }
1828 __ Bind(&done);
1829 break;
1830 }
1831
Alexey Frunze4dda3372015-06-01 18:31:49 -07001832 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08001833 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1834 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1835 Mips64Label done;
1836 __ CmpEqD(FTMP, lhs, rhs);
1837 __ LoadConst32(res, 0);
1838 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001839 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001840 __ CmpLtD(FTMP, lhs, rhs);
1841 __ LoadConst32(res, -1);
1842 __ Bc1nez(FTMP, &done);
1843 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001844 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08001845 __ CmpLtD(FTMP, rhs, lhs);
1846 __ LoadConst32(res, 1);
1847 __ Bc1nez(FTMP, &done);
1848 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001849 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001850 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001851 break;
1852 }
1853
1854 default:
1855 LOG(FATAL) << "Unimplemented compare type " << in_type;
1856 }
1857}
1858
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001859void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001860 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08001861 switch (instruction->InputAt(0)->GetType()) {
1862 default:
1863 case Primitive::kPrimLong:
1864 locations->SetInAt(0, Location::RequiresRegister());
1865 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1866 break;
1867
1868 case Primitive::kPrimFloat:
1869 case Primitive::kPrimDouble:
1870 locations->SetInAt(0, Location::RequiresFpuRegister());
1871 locations->SetInAt(1, Location::RequiresFpuRegister());
1872 break;
1873 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001874 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001875 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1876 }
1877}
1878
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001879void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001880 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001881 return;
1882 }
1883
Alexey Frunze299a9392015-12-08 16:08:02 -08001884 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001885 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001886 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze299a9392015-12-08 16:08:02 -08001887 Mips64Label true_label;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001888
Alexey Frunze299a9392015-12-08 16:08:02 -08001889 switch (type) {
1890 default:
1891 // Integer case.
1892 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
1893 return;
1894 case Primitive::kPrimLong:
1895 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
1896 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001897
Alexey Frunze299a9392015-12-08 16:08:02 -08001898 case Primitive::kPrimFloat:
1899 case Primitive::kPrimDouble:
1900 // TODO: don't use branches.
1901 GenerateFpCompareAndBranch(instruction->GetCondition(),
1902 instruction->IsGtBias(),
1903 type,
1904 locations,
1905 &true_label);
Aart Bike9f37602015-10-09 11:15:55 -07001906 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001907 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001908
1909 // Convert the branches into the result.
1910 Mips64Label done;
1911
1912 // False case: result = 0.
1913 __ LoadConst32(dst, 0);
1914 __ Bc(&done);
1915
1916 // True case: result = 1.
1917 __ Bind(&true_label);
1918 __ LoadConst32(dst, 1);
1919 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001920}
1921
Alexey Frunzec857c742015-09-23 15:12:39 -07001922void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
1923 DCHECK(instruction->IsDiv() || instruction->IsRem());
1924 Primitive::Type type = instruction->GetResultType();
1925
1926 LocationSummary* locations = instruction->GetLocations();
1927 Location second = locations->InAt(1);
1928 DCHECK(second.IsConstant());
1929
1930 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1931 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1932 int64_t imm = Int64FromConstant(second.GetConstant());
1933 DCHECK(imm == 1 || imm == -1);
1934
1935 if (instruction->IsRem()) {
1936 __ Move(out, ZERO);
1937 } else {
1938 if (imm == -1) {
1939 if (type == Primitive::kPrimInt) {
1940 __ Subu(out, ZERO, dividend);
1941 } else {
1942 DCHECK_EQ(type, Primitive::kPrimLong);
1943 __ Dsubu(out, ZERO, dividend);
1944 }
1945 } else if (out != dividend) {
1946 __ Move(out, dividend);
1947 }
1948 }
1949}
1950
1951void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
1952 DCHECK(instruction->IsDiv() || instruction->IsRem());
1953 Primitive::Type type = instruction->GetResultType();
1954
1955 LocationSummary* locations = instruction->GetLocations();
1956 Location second = locations->InAt(1);
1957 DCHECK(second.IsConstant());
1958
1959 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1960 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1961 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00001962 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07001963 int ctz_imm = CTZ(abs_imm);
1964
1965 if (instruction->IsDiv()) {
1966 if (type == Primitive::kPrimInt) {
1967 if (ctz_imm == 1) {
1968 // Fast path for division by +/-2, which is very common.
1969 __ Srl(TMP, dividend, 31);
1970 } else {
1971 __ Sra(TMP, dividend, 31);
1972 __ Srl(TMP, TMP, 32 - ctz_imm);
1973 }
1974 __ Addu(out, dividend, TMP);
1975 __ Sra(out, out, ctz_imm);
1976 if (imm < 0) {
1977 __ Subu(out, ZERO, out);
1978 }
1979 } else {
1980 DCHECK_EQ(type, Primitive::kPrimLong);
1981 if (ctz_imm == 1) {
1982 // Fast path for division by +/-2, which is very common.
1983 __ Dsrl32(TMP, dividend, 31);
1984 } else {
1985 __ Dsra32(TMP, dividend, 31);
1986 if (ctz_imm > 32) {
1987 __ Dsrl(TMP, TMP, 64 - ctz_imm);
1988 } else {
1989 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
1990 }
1991 }
1992 __ Daddu(out, dividend, TMP);
1993 if (ctz_imm < 32) {
1994 __ Dsra(out, out, ctz_imm);
1995 } else {
1996 __ Dsra32(out, out, ctz_imm - 32);
1997 }
1998 if (imm < 0) {
1999 __ Dsubu(out, ZERO, out);
2000 }
2001 }
2002 } else {
2003 if (type == Primitive::kPrimInt) {
2004 if (ctz_imm == 1) {
2005 // Fast path for modulo +/-2, which is very common.
2006 __ Sra(TMP, dividend, 31);
2007 __ Subu(out, dividend, TMP);
2008 __ Andi(out, out, 1);
2009 __ Addu(out, out, TMP);
2010 } else {
2011 __ Sra(TMP, dividend, 31);
2012 __ Srl(TMP, TMP, 32 - ctz_imm);
2013 __ Addu(out, dividend, TMP);
2014 if (IsUint<16>(abs_imm - 1)) {
2015 __ Andi(out, out, abs_imm - 1);
2016 } else {
2017 __ Sll(out, out, 32 - ctz_imm);
2018 __ Srl(out, out, 32 - ctz_imm);
2019 }
2020 __ Subu(out, out, TMP);
2021 }
2022 } else {
2023 DCHECK_EQ(type, Primitive::kPrimLong);
2024 if (ctz_imm == 1) {
2025 // Fast path for modulo +/-2, which is very common.
2026 __ Dsra32(TMP, dividend, 31);
2027 __ Dsubu(out, dividend, TMP);
2028 __ Andi(out, out, 1);
2029 __ Daddu(out, out, TMP);
2030 } else {
2031 __ Dsra32(TMP, dividend, 31);
2032 if (ctz_imm > 32) {
2033 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2034 } else {
2035 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2036 }
2037 __ Daddu(out, dividend, TMP);
2038 if (IsUint<16>(abs_imm - 1)) {
2039 __ Andi(out, out, abs_imm - 1);
2040 } else {
2041 if (ctz_imm > 32) {
2042 __ Dsll(out, out, 64 - ctz_imm);
2043 __ Dsrl(out, out, 64 - ctz_imm);
2044 } else {
2045 __ Dsll32(out, out, 32 - ctz_imm);
2046 __ Dsrl32(out, out, 32 - ctz_imm);
2047 }
2048 }
2049 __ Dsubu(out, out, TMP);
2050 }
2051 }
2052 }
2053}
2054
2055void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2056 DCHECK(instruction->IsDiv() || instruction->IsRem());
2057
2058 LocationSummary* locations = instruction->GetLocations();
2059 Location second = locations->InAt(1);
2060 DCHECK(second.IsConstant());
2061
2062 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2063 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2064 int64_t imm = Int64FromConstant(second.GetConstant());
2065
2066 Primitive::Type type = instruction->GetResultType();
2067 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2068
2069 int64_t magic;
2070 int shift;
2071 CalculateMagicAndShiftForDivRem(imm,
2072 (type == Primitive::kPrimLong),
2073 &magic,
2074 &shift);
2075
2076 if (type == Primitive::kPrimInt) {
2077 __ LoadConst32(TMP, magic);
2078 __ MuhR6(TMP, dividend, TMP);
2079
2080 if (imm > 0 && magic < 0) {
2081 __ Addu(TMP, TMP, dividend);
2082 } else if (imm < 0 && magic > 0) {
2083 __ Subu(TMP, TMP, dividend);
2084 }
2085
2086 if (shift != 0) {
2087 __ Sra(TMP, TMP, shift);
2088 }
2089
2090 if (instruction->IsDiv()) {
2091 __ Sra(out, TMP, 31);
2092 __ Subu(out, TMP, out);
2093 } else {
2094 __ Sra(AT, TMP, 31);
2095 __ Subu(AT, TMP, AT);
2096 __ LoadConst32(TMP, imm);
2097 __ MulR6(TMP, AT, TMP);
2098 __ Subu(out, dividend, TMP);
2099 }
2100 } else {
2101 __ LoadConst64(TMP, magic);
2102 __ Dmuh(TMP, dividend, TMP);
2103
2104 if (imm > 0 && magic < 0) {
2105 __ Daddu(TMP, TMP, dividend);
2106 } else if (imm < 0 && magic > 0) {
2107 __ Dsubu(TMP, TMP, dividend);
2108 }
2109
2110 if (shift >= 32) {
2111 __ Dsra32(TMP, TMP, shift - 32);
2112 } else if (shift > 0) {
2113 __ Dsra(TMP, TMP, shift);
2114 }
2115
2116 if (instruction->IsDiv()) {
2117 __ Dsra32(out, TMP, 31);
2118 __ Dsubu(out, TMP, out);
2119 } else {
2120 __ Dsra32(AT, TMP, 31);
2121 __ Dsubu(AT, TMP, AT);
2122 __ LoadConst64(TMP, imm);
2123 __ Dmul(TMP, AT, TMP);
2124 __ Dsubu(out, dividend, TMP);
2125 }
2126 }
2127}
2128
2129void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2130 DCHECK(instruction->IsDiv() || instruction->IsRem());
2131 Primitive::Type type = instruction->GetResultType();
2132 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2133
2134 LocationSummary* locations = instruction->GetLocations();
2135 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2136 Location second = locations->InAt(1);
2137
2138 if (second.IsConstant()) {
2139 int64_t imm = Int64FromConstant(second.GetConstant());
2140 if (imm == 0) {
2141 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2142 } else if (imm == 1 || imm == -1) {
2143 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002144 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07002145 DivRemByPowerOfTwo(instruction);
2146 } else {
2147 DCHECK(imm <= -2 || imm >= 2);
2148 GenerateDivRemWithAnyConstant(instruction);
2149 }
2150 } else {
2151 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2152 GpuRegister divisor = second.AsRegister<GpuRegister>();
2153 if (instruction->IsDiv()) {
2154 if (type == Primitive::kPrimInt)
2155 __ DivR6(out, dividend, divisor);
2156 else
2157 __ Ddiv(out, dividend, divisor);
2158 } else {
2159 if (type == Primitive::kPrimInt)
2160 __ ModR6(out, dividend, divisor);
2161 else
2162 __ Dmod(out, dividend, divisor);
2163 }
2164 }
2165}
2166
Alexey Frunze4dda3372015-06-01 18:31:49 -07002167void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
2168 LocationSummary* locations =
2169 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2170 switch (div->GetResultType()) {
2171 case Primitive::kPrimInt:
2172 case Primitive::kPrimLong:
2173 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07002174 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002175 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2176 break;
2177
2178 case Primitive::kPrimFloat:
2179 case Primitive::kPrimDouble:
2180 locations->SetInAt(0, Location::RequiresFpuRegister());
2181 locations->SetInAt(1, Location::RequiresFpuRegister());
2182 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2183 break;
2184
2185 default:
2186 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2187 }
2188}
2189
2190void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
2191 Primitive::Type type = instruction->GetType();
2192 LocationSummary* locations = instruction->GetLocations();
2193
2194 switch (type) {
2195 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07002196 case Primitive::kPrimLong:
2197 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002199 case Primitive::kPrimFloat:
2200 case Primitive::kPrimDouble: {
2201 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2202 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2203 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2204 if (type == Primitive::kPrimFloat)
2205 __ DivS(dst, lhs, rhs);
2206 else
2207 __ DivD(dst, lhs, rhs);
2208 break;
2209 }
2210 default:
2211 LOG(FATAL) << "Unexpected div type " << type;
2212 }
2213}
2214
2215void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002216 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002217 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002218}
2219
2220void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2221 SlowPathCodeMIPS64* slow_path =
2222 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
2223 codegen_->AddSlowPath(slow_path);
2224 Location value = instruction->GetLocations()->InAt(0);
2225
2226 Primitive::Type type = instruction->GetType();
2227
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002228 if (!Primitive::IsIntegralType(type)) {
2229 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002230 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002231 }
2232
2233 if (value.IsConstant()) {
2234 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
2235 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002236 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002237 } else {
2238 // A division by a non-null constant is valid. We don't need to perform
2239 // any check, so simply fall through.
2240 }
2241 } else {
2242 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2243 }
2244}
2245
2246void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
2247 LocationSummary* locations =
2248 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2249 locations->SetOut(Location::ConstantLocation(constant));
2250}
2251
2252void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
2253 // Will be generated at use site.
2254}
2255
2256void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
2257 exit->SetLocations(nullptr);
2258}
2259
2260void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2261}
2262
2263void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
2264 LocationSummary* locations =
2265 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2266 locations->SetOut(Location::ConstantLocation(constant));
2267}
2268
2269void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
2270 // Will be generated at use site.
2271}
2272
David Brazdilfc6a86a2015-06-26 10:33:45 +00002273void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002274 DCHECK(!successor->IsExitBlock());
2275 HBasicBlock* block = got->GetBlock();
2276 HInstruction* previous = got->GetPrevious();
2277 HLoopInformation* info = block->GetLoopInformation();
2278
2279 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
2280 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2281 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2282 return;
2283 }
2284 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2285 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2286 }
2287 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002288 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002289 }
2290}
2291
David Brazdilfc6a86a2015-06-26 10:33:45 +00002292void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
2293 got->SetLocations(nullptr);
2294}
2295
2296void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
2297 HandleGoto(got, got->GetSuccessor());
2298}
2299
2300void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2301 try_boundary->SetLocations(nullptr);
2302}
2303
2304void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2305 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2306 if (!successor->IsExitBlock()) {
2307 HandleGoto(try_boundary, successor);
2308 }
2309}
2310
Alexey Frunze299a9392015-12-08 16:08:02 -08002311void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
2312 bool is64bit,
2313 LocationSummary* locations) {
2314 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2315 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2316 Location rhs_location = locations->InAt(1);
2317 GpuRegister rhs_reg = ZERO;
2318 int64_t rhs_imm = 0;
2319 bool use_imm = rhs_location.IsConstant();
2320 if (use_imm) {
2321 if (is64bit) {
2322 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2323 } else {
2324 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2325 }
2326 } else {
2327 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2328 }
2329 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
2330
2331 switch (cond) {
2332 case kCondEQ:
2333 case kCondNE:
2334 if (use_imm && IsUint<16>(rhs_imm)) {
2335 __ Xori(dst, lhs, rhs_imm);
2336 } else {
2337 if (use_imm) {
2338 rhs_reg = TMP;
2339 __ LoadConst64(rhs_reg, rhs_imm);
2340 }
2341 __ Xor(dst, lhs, rhs_reg);
2342 }
2343 if (cond == kCondEQ) {
2344 __ Sltiu(dst, dst, 1);
2345 } else {
2346 __ Sltu(dst, ZERO, dst);
2347 }
2348 break;
2349
2350 case kCondLT:
2351 case kCondGE:
2352 if (use_imm && IsInt<16>(rhs_imm)) {
2353 __ Slti(dst, lhs, rhs_imm);
2354 } else {
2355 if (use_imm) {
2356 rhs_reg = TMP;
2357 __ LoadConst64(rhs_reg, rhs_imm);
2358 }
2359 __ Slt(dst, lhs, rhs_reg);
2360 }
2361 if (cond == kCondGE) {
2362 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2363 // only the slt instruction but no sge.
2364 __ Xori(dst, dst, 1);
2365 }
2366 break;
2367
2368 case kCondLE:
2369 case kCondGT:
2370 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
2371 // Simulate lhs <= rhs via lhs < rhs + 1.
2372 __ Slti(dst, lhs, rhs_imm_plus_one);
2373 if (cond == kCondGT) {
2374 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2375 // only the slti instruction but no sgti.
2376 __ Xori(dst, dst, 1);
2377 }
2378 } else {
2379 if (use_imm) {
2380 rhs_reg = TMP;
2381 __ LoadConst64(rhs_reg, rhs_imm);
2382 }
2383 __ Slt(dst, rhs_reg, lhs);
2384 if (cond == kCondLE) {
2385 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2386 // only the slt instruction but no sle.
2387 __ Xori(dst, dst, 1);
2388 }
2389 }
2390 break;
2391
2392 case kCondB:
2393 case kCondAE:
2394 if (use_imm && IsInt<16>(rhs_imm)) {
2395 // Sltiu sign-extends its 16-bit immediate operand before
2396 // the comparison and thus lets us compare directly with
2397 // unsigned values in the ranges [0, 0x7fff] and
2398 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2399 __ Sltiu(dst, lhs, rhs_imm);
2400 } else {
2401 if (use_imm) {
2402 rhs_reg = TMP;
2403 __ LoadConst64(rhs_reg, rhs_imm);
2404 }
2405 __ Sltu(dst, lhs, rhs_reg);
2406 }
2407 if (cond == kCondAE) {
2408 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2409 // only the sltu instruction but no sgeu.
2410 __ Xori(dst, dst, 1);
2411 }
2412 break;
2413
2414 case kCondBE:
2415 case kCondA:
2416 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
2417 // Simulate lhs <= rhs via lhs < rhs + 1.
2418 // Note that this only works if rhs + 1 does not overflow
2419 // to 0, hence the check above.
2420 // Sltiu sign-extends its 16-bit immediate operand before
2421 // the comparison and thus lets us compare directly with
2422 // unsigned values in the ranges [0, 0x7fff] and
2423 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2424 __ Sltiu(dst, lhs, rhs_imm_plus_one);
2425 if (cond == kCondA) {
2426 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2427 // only the sltiu instruction but no sgtiu.
2428 __ Xori(dst, dst, 1);
2429 }
2430 } else {
2431 if (use_imm) {
2432 rhs_reg = TMP;
2433 __ LoadConst64(rhs_reg, rhs_imm);
2434 }
2435 __ Sltu(dst, rhs_reg, lhs);
2436 if (cond == kCondBE) {
2437 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2438 // only the sltu instruction but no sleu.
2439 __ Xori(dst, dst, 1);
2440 }
2441 }
2442 break;
2443 }
2444}
2445
2446void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
2447 bool is64bit,
2448 LocationSummary* locations,
2449 Mips64Label* label) {
2450 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2451 Location rhs_location = locations->InAt(1);
2452 GpuRegister rhs_reg = ZERO;
2453 int64_t rhs_imm = 0;
2454 bool use_imm = rhs_location.IsConstant();
2455 if (use_imm) {
2456 if (is64bit) {
2457 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2458 } else {
2459 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2460 }
2461 } else {
2462 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2463 }
2464
2465 if (use_imm && rhs_imm == 0) {
2466 switch (cond) {
2467 case kCondEQ:
2468 case kCondBE: // <= 0 if zero
2469 __ Beqzc(lhs, label);
2470 break;
2471 case kCondNE:
2472 case kCondA: // > 0 if non-zero
2473 __ Bnezc(lhs, label);
2474 break;
2475 case kCondLT:
2476 __ Bltzc(lhs, label);
2477 break;
2478 case kCondGE:
2479 __ Bgezc(lhs, label);
2480 break;
2481 case kCondLE:
2482 __ Blezc(lhs, label);
2483 break;
2484 case kCondGT:
2485 __ Bgtzc(lhs, label);
2486 break;
2487 case kCondB: // always false
2488 break;
2489 case kCondAE: // always true
2490 __ Bc(label);
2491 break;
2492 }
2493 } else {
2494 if (use_imm) {
2495 rhs_reg = TMP;
2496 __ LoadConst64(rhs_reg, rhs_imm);
2497 }
2498 switch (cond) {
2499 case kCondEQ:
2500 __ Beqc(lhs, rhs_reg, label);
2501 break;
2502 case kCondNE:
2503 __ Bnec(lhs, rhs_reg, label);
2504 break;
2505 case kCondLT:
2506 __ Bltc(lhs, rhs_reg, label);
2507 break;
2508 case kCondGE:
2509 __ Bgec(lhs, rhs_reg, label);
2510 break;
2511 case kCondLE:
2512 __ Bgec(rhs_reg, lhs, label);
2513 break;
2514 case kCondGT:
2515 __ Bltc(rhs_reg, lhs, label);
2516 break;
2517 case kCondB:
2518 __ Bltuc(lhs, rhs_reg, label);
2519 break;
2520 case kCondAE:
2521 __ Bgeuc(lhs, rhs_reg, label);
2522 break;
2523 case kCondBE:
2524 __ Bgeuc(rhs_reg, lhs, label);
2525 break;
2526 case kCondA:
2527 __ Bltuc(rhs_reg, lhs, label);
2528 break;
2529 }
2530 }
2531}
2532
2533void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
2534 bool gt_bias,
2535 Primitive::Type type,
2536 LocationSummary* locations,
2537 Mips64Label* label) {
2538 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2539 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2540 if (type == Primitive::kPrimFloat) {
2541 switch (cond) {
2542 case kCondEQ:
2543 __ CmpEqS(FTMP, lhs, rhs);
2544 __ Bc1nez(FTMP, label);
2545 break;
2546 case kCondNE:
2547 __ CmpEqS(FTMP, lhs, rhs);
2548 __ Bc1eqz(FTMP, label);
2549 break;
2550 case kCondLT:
2551 if (gt_bias) {
2552 __ CmpLtS(FTMP, lhs, rhs);
2553 } else {
2554 __ CmpUltS(FTMP, lhs, rhs);
2555 }
2556 __ Bc1nez(FTMP, label);
2557 break;
2558 case kCondLE:
2559 if (gt_bias) {
2560 __ CmpLeS(FTMP, lhs, rhs);
2561 } else {
2562 __ CmpUleS(FTMP, lhs, rhs);
2563 }
2564 __ Bc1nez(FTMP, label);
2565 break;
2566 case kCondGT:
2567 if (gt_bias) {
2568 __ CmpUltS(FTMP, rhs, lhs);
2569 } else {
2570 __ CmpLtS(FTMP, rhs, lhs);
2571 }
2572 __ Bc1nez(FTMP, label);
2573 break;
2574 case kCondGE:
2575 if (gt_bias) {
2576 __ CmpUleS(FTMP, rhs, lhs);
2577 } else {
2578 __ CmpLeS(FTMP, rhs, lhs);
2579 }
2580 __ Bc1nez(FTMP, label);
2581 break;
2582 default:
2583 LOG(FATAL) << "Unexpected non-floating-point condition";
2584 }
2585 } else {
2586 DCHECK_EQ(type, Primitive::kPrimDouble);
2587 switch (cond) {
2588 case kCondEQ:
2589 __ CmpEqD(FTMP, lhs, rhs);
2590 __ Bc1nez(FTMP, label);
2591 break;
2592 case kCondNE:
2593 __ CmpEqD(FTMP, lhs, rhs);
2594 __ Bc1eqz(FTMP, label);
2595 break;
2596 case kCondLT:
2597 if (gt_bias) {
2598 __ CmpLtD(FTMP, lhs, rhs);
2599 } else {
2600 __ CmpUltD(FTMP, lhs, rhs);
2601 }
2602 __ Bc1nez(FTMP, label);
2603 break;
2604 case kCondLE:
2605 if (gt_bias) {
2606 __ CmpLeD(FTMP, lhs, rhs);
2607 } else {
2608 __ CmpUleD(FTMP, lhs, rhs);
2609 }
2610 __ Bc1nez(FTMP, label);
2611 break;
2612 case kCondGT:
2613 if (gt_bias) {
2614 __ CmpUltD(FTMP, rhs, lhs);
2615 } else {
2616 __ CmpLtD(FTMP, rhs, lhs);
2617 }
2618 __ Bc1nez(FTMP, label);
2619 break;
2620 case kCondGE:
2621 if (gt_bias) {
2622 __ CmpUleD(FTMP, rhs, lhs);
2623 } else {
2624 __ CmpLeD(FTMP, rhs, lhs);
2625 }
2626 __ Bc1nez(FTMP, label);
2627 break;
2628 default:
2629 LOG(FATAL) << "Unexpected non-floating-point condition";
2630 }
2631 }
2632}
2633
Alexey Frunze4dda3372015-06-01 18:31:49 -07002634void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002635 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002636 Mips64Label* true_target,
2637 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002638 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002639
David Brazdil0debae72015-11-12 18:37:00 +00002640 if (true_target == nullptr && false_target == nullptr) {
2641 // Nothing to do. The code always falls through.
2642 return;
2643 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002644 // Constant condition, statically compared against "true" (integer value 1).
2645 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002646 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002647 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002648 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002649 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002650 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002651 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002652 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002653 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002654 }
David Brazdil0debae72015-11-12 18:37:00 +00002655 return;
2656 }
2657
2658 // The following code generates these patterns:
2659 // (1) true_target == nullptr && false_target != nullptr
2660 // - opposite condition true => branch to false_target
2661 // (2) true_target != nullptr && false_target == nullptr
2662 // - condition true => branch to true_target
2663 // (3) true_target != nullptr && false_target != nullptr
2664 // - condition true => branch to true_target
2665 // - branch to false_target
2666 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002667 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002668 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002669 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002670 if (true_target == nullptr) {
2671 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
2672 } else {
2673 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
2674 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002675 } else {
2676 // The condition instruction has not been materialized, use its inputs as
2677 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002678 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002679 Primitive::Type type = condition->InputAt(0)->GetType();
2680 LocationSummary* locations = cond->GetLocations();
2681 IfCondition if_cond = condition->GetCondition();
2682 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00002683
David Brazdil0debae72015-11-12 18:37:00 +00002684 if (true_target == nullptr) {
2685 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002686 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00002687 }
2688
Alexey Frunze299a9392015-12-08 16:08:02 -08002689 switch (type) {
2690 default:
2691 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
2692 break;
2693 case Primitive::kPrimLong:
2694 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
2695 break;
2696 case Primitive::kPrimFloat:
2697 case Primitive::kPrimDouble:
2698 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
2699 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002700 }
2701 }
David Brazdil0debae72015-11-12 18:37:00 +00002702
2703 // If neither branch falls through (case 3), the conditional branch to `true_target`
2704 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2705 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002706 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002707 }
2708}
2709
2710void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
2711 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002712 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002713 locations->SetInAt(0, Location::RequiresRegister());
2714 }
2715}
2716
2717void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002718 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2719 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002720 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002721 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002722 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002723 nullptr : codegen_->GetLabelOf(false_successor);
2724 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002725}
2726
2727void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
2728 LocationSummary* locations = new (GetGraph()->GetArena())
2729 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01002730 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00002731 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002732 locations->SetInAt(0, Location::RequiresRegister());
2733 }
2734}
2735
2736void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002737 SlowPathCodeMIPS64* slow_path =
2738 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002739 GenerateTestAndBranch(deoptimize,
2740 /* condition_input_index */ 0,
2741 slow_path->GetEntryLabel(),
2742 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002743}
2744
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002745void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2746 LocationSummary* locations = new (GetGraph()->GetArena())
2747 LocationSummary(flag, LocationSummary::kNoCall);
2748 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07002749}
2750
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002751void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2752 __ LoadFromOffset(kLoadWord,
2753 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
2754 SP,
2755 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07002756}
2757
David Brazdil74eb1b22015-12-14 11:44:01 +00002758void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
2759 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
2760 if (Primitive::IsFloatingPointType(select->GetType())) {
2761 locations->SetInAt(0, Location::RequiresFpuRegister());
2762 locations->SetInAt(1, Location::RequiresFpuRegister());
2763 } else {
2764 locations->SetInAt(0, Location::RequiresRegister());
2765 locations->SetInAt(1, Location::RequiresRegister());
2766 }
2767 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2768 locations->SetInAt(2, Location::RequiresRegister());
2769 }
2770 locations->SetOut(Location::SameAsFirstInput());
2771}
2772
2773void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
2774 LocationSummary* locations = select->GetLocations();
2775 Mips64Label false_target;
2776 GenerateTestAndBranch(select,
2777 /* condition_input_index */ 2,
2778 /* true_target */ nullptr,
2779 &false_target);
2780 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2781 __ Bind(&false_target);
2782}
2783
David Srbecky0cf44932015-12-09 14:09:59 +00002784void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
2785 new (GetGraph()->GetArena()) LocationSummary(info);
2786}
2787
David Srbeckyd28f4a02016-03-14 17:14:24 +00002788void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2789 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002790}
2791
2792void CodeGeneratorMIPS64::GenerateNop() {
2793 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002794}
2795
Alexey Frunze4dda3372015-06-01 18:31:49 -07002796void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
2797 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2798 LocationSummary* locations =
2799 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2800 locations->SetInAt(0, Location::RequiresRegister());
2801 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2802 locations->SetOut(Location::RequiresFpuRegister());
2803 } else {
2804 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2805 }
2806}
2807
2808void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
2809 const FieldInfo& field_info) {
2810 Primitive::Type type = field_info.GetFieldType();
2811 LocationSummary* locations = instruction->GetLocations();
2812 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2813 LoadOperandType load_type = kLoadUnsignedByte;
2814 switch (type) {
2815 case Primitive::kPrimBoolean:
2816 load_type = kLoadUnsignedByte;
2817 break;
2818 case Primitive::kPrimByte:
2819 load_type = kLoadSignedByte;
2820 break;
2821 case Primitive::kPrimShort:
2822 load_type = kLoadSignedHalfword;
2823 break;
2824 case Primitive::kPrimChar:
2825 load_type = kLoadUnsignedHalfword;
2826 break;
2827 case Primitive::kPrimInt:
2828 case Primitive::kPrimFloat:
2829 load_type = kLoadWord;
2830 break;
2831 case Primitive::kPrimLong:
2832 case Primitive::kPrimDouble:
2833 load_type = kLoadDoubleword;
2834 break;
2835 case Primitive::kPrimNot:
2836 load_type = kLoadUnsignedWord;
2837 break;
2838 case Primitive::kPrimVoid:
2839 LOG(FATAL) << "Unreachable type " << type;
2840 UNREACHABLE();
2841 }
2842 if (!Primitive::IsFloatingPointType(type)) {
2843 DCHECK(locations->Out().IsRegister());
2844 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2845 __ LoadFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
2846 } else {
2847 DCHECK(locations->Out().IsFpuRegister());
2848 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2849 __ LoadFpuFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
2850 }
2851
2852 codegen_->MaybeRecordImplicitNullCheck(instruction);
2853 // TODO: memory barrier?
2854}
2855
2856void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
2857 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2858 LocationSummary* locations =
2859 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2860 locations->SetInAt(0, Location::RequiresRegister());
2861 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
2862 locations->SetInAt(1, Location::RequiresFpuRegister());
2863 } else {
2864 locations->SetInAt(1, Location::RequiresRegister());
2865 }
2866}
2867
2868void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002869 const FieldInfo& field_info,
2870 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002871 Primitive::Type type = field_info.GetFieldType();
2872 LocationSummary* locations = instruction->GetLocations();
2873 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2874 StoreOperandType store_type = kStoreByte;
2875 switch (type) {
2876 case Primitive::kPrimBoolean:
2877 case Primitive::kPrimByte:
2878 store_type = kStoreByte;
2879 break;
2880 case Primitive::kPrimShort:
2881 case Primitive::kPrimChar:
2882 store_type = kStoreHalfword;
2883 break;
2884 case Primitive::kPrimInt:
2885 case Primitive::kPrimFloat:
2886 case Primitive::kPrimNot:
2887 store_type = kStoreWord;
2888 break;
2889 case Primitive::kPrimLong:
2890 case Primitive::kPrimDouble:
2891 store_type = kStoreDoubleword;
2892 break;
2893 case Primitive::kPrimVoid:
2894 LOG(FATAL) << "Unreachable type " << type;
2895 UNREACHABLE();
2896 }
2897 if (!Primitive::IsFloatingPointType(type)) {
2898 DCHECK(locations->InAt(1).IsRegister());
2899 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
2900 __ StoreToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
2901 } else {
2902 DCHECK(locations->InAt(1).IsFpuRegister());
2903 FpuRegister src = locations->InAt(1).AsFpuRegister<FpuRegister>();
2904 __ StoreFpuToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
2905 }
2906
2907 codegen_->MaybeRecordImplicitNullCheck(instruction);
2908 // TODO: memory barriers?
2909 if (CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1))) {
2910 DCHECK(locations->InAt(1).IsRegister());
2911 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002912 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002913 }
2914}
2915
2916void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2917 HandleFieldGet(instruction, instruction->GetFieldInfo());
2918}
2919
2920void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2921 HandleFieldGet(instruction, instruction->GetFieldInfo());
2922}
2923
2924void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2925 HandleFieldSet(instruction, instruction->GetFieldInfo());
2926}
2927
2928void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002929 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002930}
2931
2932void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
2933 LocationSummary::CallKind call_kind =
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002934 instruction->IsExactCheck() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002935 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2936 locations->SetInAt(0, Location::RequiresRegister());
2937 locations->SetInAt(1, Location::RequiresRegister());
2938 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002939 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002940 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2941}
2942
2943void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
2944 LocationSummary* locations = instruction->GetLocations();
2945 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2946 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
2947 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2948
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002949 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002950
2951 // Return 0 if `obj` is null.
2952 // TODO: Avoid this check if we know `obj` is not null.
2953 __ Move(out, ZERO);
2954 __ Beqzc(obj, &done);
2955
2956 // Compare the class of `obj` with `cls`.
2957 __ LoadFromOffset(kLoadUnsignedWord, out, obj, mirror::Object::ClassOffset().Int32Value());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002958 if (instruction->IsExactCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002959 // Classes must be equal for the instanceof to succeed.
2960 __ Xor(out, out, cls);
2961 __ Sltiu(out, out, 1);
2962 } else {
2963 // If the classes are not equal, we go into a slow path.
2964 DCHECK(locations->OnlyCallsOnSlowPath());
2965 SlowPathCodeMIPS64* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002966 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002967 codegen_->AddSlowPath(slow_path);
2968 __ Bnec(out, cls, slow_path->GetEntryLabel());
2969 __ LoadConst32(out, 1);
2970 __ Bind(slow_path->GetExitLabel());
2971 }
2972
2973 __ Bind(&done);
2974}
2975
2976void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
2977 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2978 locations->SetOut(Location::ConstantLocation(constant));
2979}
2980
2981void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
2982 // Will be generated at use site.
2983}
2984
2985void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
2986 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2987 locations->SetOut(Location::ConstantLocation(constant));
2988}
2989
2990void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
2991 // Will be generated at use site.
2992}
2993
Calin Juravle175dc732015-08-25 15:42:32 +01002994void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2995 // The trampoline uses the same calling convention as dex calling conventions,
2996 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2997 // the method_idx.
2998 HandleInvoke(invoke);
2999}
3000
3001void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3002 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3003}
3004
Alexey Frunze4dda3372015-06-01 18:31:49 -07003005void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
3006 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
3007 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3008}
3009
3010void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3011 HandleInvoke(invoke);
3012 // The register T0 is required to be used for the hidden argument in
3013 // art_quick_imt_conflict_trampoline, so add the hidden argument.
3014 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
3015}
3016
3017void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3018 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
3019 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003020 Location receiver = invoke->GetLocations()->InAt(0);
3021 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003022 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003023
3024 // Set the hidden argument.
3025 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
3026 invoke->GetDexMethodIndex());
3027
3028 // temp = object->GetClass();
3029 if (receiver.IsStackSlot()) {
3030 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
3031 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
3032 } else {
3033 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
3034 }
3035 codegen_->MaybeRecordImplicitNullCheck(invoke);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003036 __ LoadFromOffset(kLoadDoubleword, temp, temp,
3037 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
3038 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003039 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003040 // temp = temp->GetImtEntryAt(method_offset);
3041 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3042 // T9 = temp->GetEntryPoint();
3043 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3044 // T9();
3045 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003046 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003047 DCHECK(!codegen_->IsLeafMethod());
3048 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3049}
3050
3051void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07003052 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3053 if (intrinsic.TryDispatch(invoke)) {
3054 return;
3055 }
3056
Alexey Frunze4dda3372015-06-01 18:31:49 -07003057 HandleInvoke(invoke);
3058}
3059
3060void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003061 // Explicit clinit checks triggered by static invokes must have been pruned by
3062 // art::PrepareForRegisterAllocation.
3063 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003064
Chris Larsen3039e382015-08-26 07:54:08 -07003065 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3066 if (intrinsic.TryDispatch(invoke)) {
3067 return;
3068 }
3069
Alexey Frunze4dda3372015-06-01 18:31:49 -07003070 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003071}
3072
Chris Larsen3039e382015-08-26 07:54:08 -07003073static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003074 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07003075 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
3076 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003077 return true;
3078 }
3079 return false;
3080}
3081
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003082HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
3083 HLoadString::LoadKind desired_string_load_kind ATTRIBUTE_UNUSED) {
3084 // TODO: Implement other kinds.
3085 return HLoadString::LoadKind::kDexCacheViaMethod;
3086}
3087
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003088HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
3089 HLoadClass::LoadKind desired_class_load_kind) {
3090 DCHECK_NE(desired_class_load_kind, HLoadClass::LoadKind::kReferrersClass);
3091 // TODO: Implement other kinds.
3092 return HLoadClass::LoadKind::kDexCacheViaMethod;
3093}
3094
Vladimir Markodc151b22015-10-15 18:02:30 +01003095HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
3096 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003097 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08003098 // On MIPS64 we support all dispatch types.
3099 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01003100}
3101
Alexey Frunze4dda3372015-06-01 18:31:49 -07003102void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
3103 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003104 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08003105 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
3106 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
3107
3108 // For better instruction scheduling we load the direct code pointer before the method pointer.
3109 switch (code_ptr_location) {
3110 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3111 // T9 = invoke->GetDirectCodePtr();
3112 __ LoadLiteral(T9, kLoadDoubleword, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3113 break;
3114 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3115 // T9 = code address from literal pool with link-time patch.
3116 __ LoadLiteral(T9,
3117 kLoadUnsignedWord,
3118 DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3119 break;
3120 default:
3121 break;
3122 }
3123
3124 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003125 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00003126 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003127 uint32_t offset =
3128 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00003129 __ LoadFromOffset(kLoadDoubleword,
3130 temp.AsRegister<GpuRegister>(),
3131 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003132 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003133 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003134 }
Vladimir Marko58155012015-08-19 12:49:41 +00003135 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003136 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003137 break;
3138 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003139 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
3140 kLoadDoubleword,
3141 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003142 break;
3143 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003144 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
3145 kLoadUnsignedWord,
3146 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3147 break;
3148 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3149 uint32_t offset = invoke->GetDexCacheArrayOffset();
3150 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3151 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset);
3152 EmitPcRelativeAddressPlaceholderHigh(info, AT);
3153 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
3154 break;
3155 }
Vladimir Marko58155012015-08-19 12:49:41 +00003156 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003157 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003158 GpuRegister reg = temp.AsRegister<GpuRegister>();
3159 GpuRegister method_reg;
3160 if (current_method.IsRegister()) {
3161 method_reg = current_method.AsRegister<GpuRegister>();
3162 } else {
3163 // TODO: use the appropriate DCHECK() here if possible.
3164 // DCHECK(invoke->GetLocations()->Intrinsified());
3165 DCHECK(!current_method.IsValid());
3166 method_reg = reg;
3167 __ Ld(reg, SP, kCurrentMethodStackOffset);
3168 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003169
Vladimir Marko58155012015-08-19 12:49:41 +00003170 // temp = temp->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003171 __ LoadFromOffset(kLoadDoubleword,
Vladimir Marko58155012015-08-19 12:49:41 +00003172 reg,
3173 method_reg,
Vladimir Marko05792b92015-08-03 11:56:49 +01003174 ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01003175 // temp = temp[index_in_cache];
3176 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3177 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003178 __ LoadFromOffset(kLoadDoubleword,
3179 reg,
3180 reg,
3181 CodeGenerator::GetCachePointerOffset(index_in_cache));
3182 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003183 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003184 }
3185
Alexey Frunze19f6c692016-11-30 19:19:55 -08003186 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00003187 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003188 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00003189 break;
3190 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003191 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3192 // T9 prepared above for better instruction scheduling.
3193 // T9()
Vladimir Marko58155012015-08-19 12:49:41 +00003194 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003195 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003196 break;
Alexey Frunze19f6c692016-11-30 19:19:55 -08003197 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3198 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3199 NewPcRelativeCallPatch(*invoke->GetTargetMethod().dex_file,
3200 invoke->GetTargetMethod().dex_method_index);
3201 EmitPcRelativeAddressPlaceholderHigh(info, AT);
3202 __ Jialc(AT, /* placeholder */ 0x5678);
3203 break;
3204 }
Vladimir Marko58155012015-08-19 12:49:41 +00003205 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3206 // T9 = callee_method->entry_point_from_quick_compiled_code_;
3207 __ LoadFromOffset(kLoadDoubleword,
3208 T9,
3209 callee_method.AsRegister<GpuRegister>(),
3210 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07003211 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00003212 // T9()
3213 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003214 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003215 break;
3216 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003217 DCHECK(!IsLeafMethod());
3218}
3219
3220void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003221 // Explicit clinit checks triggered by static invokes must have been pruned by
3222 // art::PrepareForRegisterAllocation.
3223 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003224
3225 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3226 return;
3227 }
3228
3229 LocationSummary* locations = invoke->GetLocations();
3230 codegen_->GenerateStaticOrDirectCall(invoke,
3231 locations->HasTemps()
3232 ? locations->GetTemp(0)
3233 : Location::NoLocation());
3234 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3235}
3236
Alexey Frunze53afca12015-11-05 16:34:23 -08003237void CodeGeneratorMIPS64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003238 // Use the calling convention instead of the location of the receiver, as
3239 // intrinsics may have put the receiver in a different register. In the intrinsics
3240 // slow path, the arguments have been moved to the right place, so here we are
3241 // guaranteed that the receiver is the first register of the calling convention.
3242 InvokeDexCallingConvention calling_convention;
3243 GpuRegister receiver = calling_convention.GetRegisterAt(0);
3244
Alexey Frunze53afca12015-11-05 16:34:23 -08003245 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003246 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3247 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
3248 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003249 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003250
3251 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003252 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08003253 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003254 // temp = temp->GetMethodAt(method_offset);
3255 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3256 // T9 = temp->GetEntryPoint();
3257 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3258 // T9();
3259 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003260 __ Nop();
Alexey Frunze53afca12015-11-05 16:34:23 -08003261}
3262
3263void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
3264 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3265 return;
3266 }
3267
3268 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003269 DCHECK(!codegen_->IsLeafMethod());
3270 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3271}
3272
3273void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003274 InvokeRuntimeCallingConvention calling_convention;
3275 CodeGenerator::CreateLoadClassLocationSummary(
3276 cls,
3277 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze00580bd2015-11-11 13:31:12 -08003278 calling_convention.GetReturnLocation(cls->GetType()));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003279}
3280
3281void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
3282 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01003283 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08003284 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003285 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003286 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003287 return;
3288 }
3289
3290 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3291 GpuRegister current_method = locations->InAt(0).AsRegister<GpuRegister>();
3292 if (cls->IsReferrersClass()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003293 DCHECK(!cls->CanCallRuntime());
3294 DCHECK(!cls->MustGenerateClinitCheck());
3295 __ LoadFromOffset(kLoadUnsignedWord, out, current_method,
3296 ArtMethod::DeclaringClassOffset().Int32Value());
3297 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003298 __ LoadFromOffset(kLoadDoubleword, out, current_method,
3299 ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value());
Roland Levillain698fa972015-12-16 17:06:47 +00003300 __ LoadFromOffset(
Andreas Gampea5b09a62016-11-17 15:21:22 -08003301 kLoadUnsignedWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_));
Vladimir Marko05792b92015-08-03 11:56:49 +01003302 // TODO: We will need a read barrier here.
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003303 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3304 DCHECK(cls->CanCallRuntime());
3305 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
3306 cls,
3307 cls,
3308 cls->GetDexPc(),
3309 cls->MustGenerateClinitCheck());
3310 codegen_->AddSlowPath(slow_path);
3311 if (!cls->IsInDexCache()) {
3312 __ Beqzc(out, slow_path->GetEntryLabel());
3313 }
3314 if (cls->MustGenerateClinitCheck()) {
3315 GenerateClassInitializationCheck(slow_path, out);
3316 } else {
3317 __ Bind(slow_path->GetExitLabel());
3318 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003319 }
3320 }
3321}
3322
David Brazdilcb1c0552015-08-04 16:22:25 +01003323static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07003324 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01003325}
3326
Alexey Frunze4dda3372015-06-01 18:31:49 -07003327void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
3328 LocationSummary* locations =
3329 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3330 locations->SetOut(Location::RequiresRegister());
3331}
3332
3333void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
3334 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01003335 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
3336}
3337
3338void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
3339 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3340}
3341
3342void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3343 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003344}
3345
Alexey Frunze4dda3372015-06-01 18:31:49 -07003346void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003347 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
3348 ? LocationSummary::kCallOnSlowPath
3349 : LocationSummary::kNoCall;
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003350 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003351 locations->SetInAt(0, Location::RequiresRegister());
3352 locations->SetOut(Location::RequiresRegister());
3353}
3354
3355void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) {
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07003356 // TODO: Re-add the compiler code to do string dex cache lookup again.
3357 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
3358 codegen_->AddSlowPath(slow_path);
3359 __ Bc(slow_path->GetEntryLabel());
3360 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003361}
3362
Alexey Frunze4dda3372015-06-01 18:31:49 -07003363void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
3364 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3365 locations->SetOut(Location::ConstantLocation(constant));
3366}
3367
3368void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3369 // Will be generated at use site.
3370}
3371
3372void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
3373 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003374 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003375 InvokeRuntimeCallingConvention calling_convention;
3376 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3377}
3378
3379void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003380 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07003381 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01003382 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003383 if (instruction->IsEnter()) {
3384 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
3385 } else {
3386 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
3387 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003388}
3389
3390void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
3391 LocationSummary* locations =
3392 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3393 switch (mul->GetResultType()) {
3394 case Primitive::kPrimInt:
3395 case Primitive::kPrimLong:
3396 locations->SetInAt(0, Location::RequiresRegister());
3397 locations->SetInAt(1, Location::RequiresRegister());
3398 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3399 break;
3400
3401 case Primitive::kPrimFloat:
3402 case Primitive::kPrimDouble:
3403 locations->SetInAt(0, Location::RequiresFpuRegister());
3404 locations->SetInAt(1, Location::RequiresFpuRegister());
3405 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3406 break;
3407
3408 default:
3409 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3410 }
3411}
3412
3413void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
3414 Primitive::Type type = instruction->GetType();
3415 LocationSummary* locations = instruction->GetLocations();
3416
3417 switch (type) {
3418 case Primitive::kPrimInt:
3419 case Primitive::kPrimLong: {
3420 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3421 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3422 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
3423 if (type == Primitive::kPrimInt)
3424 __ MulR6(dst, lhs, rhs);
3425 else
3426 __ Dmul(dst, lhs, rhs);
3427 break;
3428 }
3429 case Primitive::kPrimFloat:
3430 case Primitive::kPrimDouble: {
3431 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3432 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3433 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3434 if (type == Primitive::kPrimFloat)
3435 __ MulS(dst, lhs, rhs);
3436 else
3437 __ MulD(dst, lhs, rhs);
3438 break;
3439 }
3440 default:
3441 LOG(FATAL) << "Unexpected mul type " << type;
3442 }
3443}
3444
3445void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
3446 LocationSummary* locations =
3447 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3448 switch (neg->GetResultType()) {
3449 case Primitive::kPrimInt:
3450 case Primitive::kPrimLong:
3451 locations->SetInAt(0, Location::RequiresRegister());
3452 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3453 break;
3454
3455 case Primitive::kPrimFloat:
3456 case Primitive::kPrimDouble:
3457 locations->SetInAt(0, Location::RequiresFpuRegister());
3458 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3459 break;
3460
3461 default:
3462 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3463 }
3464}
3465
3466void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
3467 Primitive::Type type = instruction->GetType();
3468 LocationSummary* locations = instruction->GetLocations();
3469
3470 switch (type) {
3471 case Primitive::kPrimInt:
3472 case Primitive::kPrimLong: {
3473 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3474 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3475 if (type == Primitive::kPrimInt)
3476 __ Subu(dst, ZERO, src);
3477 else
3478 __ Dsubu(dst, ZERO, src);
3479 break;
3480 }
3481 case Primitive::kPrimFloat:
3482 case Primitive::kPrimDouble: {
3483 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3484 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3485 if (type == Primitive::kPrimFloat)
3486 __ NegS(dst, src);
3487 else
3488 __ NegD(dst, src);
3489 break;
3490 }
3491 default:
3492 LOG(FATAL) << "Unexpected neg type " << type;
3493 }
3494}
3495
3496void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
3497 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003498 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003499 InvokeRuntimeCallingConvention calling_convention;
3500 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3501 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3502 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3503 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3504}
3505
3506void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
3507 LocationSummary* locations = instruction->GetLocations();
3508 // Move an uint16_t value to a register.
Andreas Gampea5b09a62016-11-17 15:21:22 -08003509 __ LoadConst32(locations->GetTemp(0).AsRegister<GpuRegister>(),
3510 instruction->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003511 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003512 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
3513}
3514
3515void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
3516 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003517 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003518 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003519 if (instruction->IsStringAlloc()) {
3520 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3521 } else {
3522 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3523 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3524 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003525 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3526}
3527
3528void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
David Brazdil6de19382016-01-08 17:37:10 +00003529 if (instruction->IsStringAlloc()) {
3530 // String is allocated through StringFactory. Call NewEmptyString entry point.
3531 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02003532 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07003533 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003534 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
3535 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
3536 __ Jalr(T9);
3537 __ Nop();
3538 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3539 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01003540 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003541 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3542 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003543}
3544
3545void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
3546 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3547 locations->SetInAt(0, Location::RequiresRegister());
3548 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3549}
3550
3551void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
3552 Primitive::Type type = instruction->GetType();
3553 LocationSummary* locations = instruction->GetLocations();
3554
3555 switch (type) {
3556 case Primitive::kPrimInt:
3557 case Primitive::kPrimLong: {
3558 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3559 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3560 __ Nor(dst, src, ZERO);
3561 break;
3562 }
3563
3564 default:
3565 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
3566 }
3567}
3568
3569void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3570 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3571 locations->SetInAt(0, Location::RequiresRegister());
3572 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3573}
3574
3575void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3576 LocationSummary* locations = instruction->GetLocations();
3577 __ Xori(locations->Out().AsRegister<GpuRegister>(),
3578 locations->InAt(0).AsRegister<GpuRegister>(),
3579 1);
3580}
3581
3582void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003583 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
3584 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003585}
3586
Calin Juravle2ae48182016-03-16 14:05:09 +00003587void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
3588 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003589 return;
3590 }
3591 Location obj = instruction->GetLocations()->InAt(0);
3592
3593 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00003594 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003595}
3596
Calin Juravle2ae48182016-03-16 14:05:09 +00003597void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003598 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00003599 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003600
3601 Location obj = instruction->GetLocations()->InAt(0);
3602
3603 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3604}
3605
3606void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00003607 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003608}
3609
3610void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
3611 HandleBinaryOp(instruction);
3612}
3613
3614void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
3615 HandleBinaryOp(instruction);
3616}
3617
3618void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
3619 LOG(FATAL) << "Unreachable";
3620}
3621
3622void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
3623 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3624}
3625
3626void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
3627 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3628 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3629 if (location.IsStackSlot()) {
3630 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3631 } else if (location.IsDoubleStackSlot()) {
3632 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3633 }
3634 locations->SetOut(location);
3635}
3636
3637void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
3638 ATTRIBUTE_UNUSED) {
3639 // Nothing to do, the parameter is already at its location.
3640}
3641
3642void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
3643 LocationSummary* locations =
3644 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3645 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3646}
3647
3648void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
3649 ATTRIBUTE_UNUSED) {
3650 // Nothing to do, the method is already at its location.
3651}
3652
3653void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
3654 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003655 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003656 locations->SetInAt(i, Location::Any());
3657 }
3658 locations->SetOut(Location::Any());
3659}
3660
3661void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3662 LOG(FATAL) << "Unreachable";
3663}
3664
3665void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
3666 Primitive::Type type = rem->GetResultType();
3667 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003668 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
3669 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003670 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
3671
3672 switch (type) {
3673 case Primitive::kPrimInt:
3674 case Primitive::kPrimLong:
3675 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003676 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003677 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3678 break;
3679
3680 case Primitive::kPrimFloat:
3681 case Primitive::kPrimDouble: {
3682 InvokeRuntimeCallingConvention calling_convention;
3683 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
3684 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
3685 locations->SetOut(calling_convention.GetReturnLocation(type));
3686 break;
3687 }
3688
3689 default:
3690 LOG(FATAL) << "Unexpected rem type " << type;
3691 }
3692}
3693
3694void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
3695 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003696
3697 switch (type) {
3698 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003699 case Primitive::kPrimLong:
3700 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003701 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003702
3703 case Primitive::kPrimFloat:
3704 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01003705 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
3706 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003707 if (type == Primitive::kPrimFloat) {
3708 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
3709 } else {
3710 CheckEntrypointTypes<kQuickFmod, double, double, double>();
3711 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003712 break;
3713 }
3714 default:
3715 LOG(FATAL) << "Unexpected rem type " << type;
3716 }
3717}
3718
3719void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3720 memory_barrier->SetLocations(nullptr);
3721}
3722
3723void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3724 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
3725}
3726
3727void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
3728 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
3729 Primitive::Type return_type = ret->InputAt(0)->GetType();
3730 locations->SetInAt(0, Mips64ReturnLocation(return_type));
3731}
3732
3733void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
3734 codegen_->GenerateFrameExit();
3735}
3736
3737void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
3738 ret->SetLocations(nullptr);
3739}
3740
3741void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
3742 codegen_->GenerateFrameExit();
3743}
3744
Alexey Frunze92d90602015-12-18 18:16:36 -08003745void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
3746 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003747}
3748
Alexey Frunze92d90602015-12-18 18:16:36 -08003749void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
3750 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003751}
3752
Alexey Frunze4dda3372015-06-01 18:31:49 -07003753void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
3754 HandleShift(shl);
3755}
3756
3757void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
3758 HandleShift(shl);
3759}
3760
3761void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
3762 HandleShift(shr);
3763}
3764
3765void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
3766 HandleShift(shr);
3767}
3768
Alexey Frunze4dda3372015-06-01 18:31:49 -07003769void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
3770 HandleBinaryOp(instruction);
3771}
3772
3773void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
3774 HandleBinaryOp(instruction);
3775}
3776
3777void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3778 HandleFieldGet(instruction, instruction->GetFieldInfo());
3779}
3780
3781void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3782 HandleFieldGet(instruction, instruction->GetFieldInfo());
3783}
3784
3785void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3786 HandleFieldSet(instruction, instruction->GetFieldInfo());
3787}
3788
3789void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003790 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003791}
3792
Calin Juravlee460d1d2015-09-29 04:52:17 +01003793void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
3794 HUnresolvedInstanceFieldGet* instruction) {
3795 FieldAccessCallingConventionMIPS64 calling_convention;
3796 codegen_->CreateUnresolvedFieldLocationSummary(
3797 instruction, instruction->GetFieldType(), calling_convention);
3798}
3799
3800void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
3801 HUnresolvedInstanceFieldGet* instruction) {
3802 FieldAccessCallingConventionMIPS64 calling_convention;
3803 codegen_->GenerateUnresolvedFieldAccess(instruction,
3804 instruction->GetFieldType(),
3805 instruction->GetFieldIndex(),
3806 instruction->GetDexPc(),
3807 calling_convention);
3808}
3809
3810void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
3811 HUnresolvedInstanceFieldSet* instruction) {
3812 FieldAccessCallingConventionMIPS64 calling_convention;
3813 codegen_->CreateUnresolvedFieldLocationSummary(
3814 instruction, instruction->GetFieldType(), calling_convention);
3815}
3816
3817void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
3818 HUnresolvedInstanceFieldSet* instruction) {
3819 FieldAccessCallingConventionMIPS64 calling_convention;
3820 codegen_->GenerateUnresolvedFieldAccess(instruction,
3821 instruction->GetFieldType(),
3822 instruction->GetFieldIndex(),
3823 instruction->GetDexPc(),
3824 calling_convention);
3825}
3826
3827void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
3828 HUnresolvedStaticFieldGet* instruction) {
3829 FieldAccessCallingConventionMIPS64 calling_convention;
3830 codegen_->CreateUnresolvedFieldLocationSummary(
3831 instruction, instruction->GetFieldType(), calling_convention);
3832}
3833
3834void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
3835 HUnresolvedStaticFieldGet* instruction) {
3836 FieldAccessCallingConventionMIPS64 calling_convention;
3837 codegen_->GenerateUnresolvedFieldAccess(instruction,
3838 instruction->GetFieldType(),
3839 instruction->GetFieldIndex(),
3840 instruction->GetDexPc(),
3841 calling_convention);
3842}
3843
3844void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
3845 HUnresolvedStaticFieldSet* instruction) {
3846 FieldAccessCallingConventionMIPS64 calling_convention;
3847 codegen_->CreateUnresolvedFieldLocationSummary(
3848 instruction, instruction->GetFieldType(), calling_convention);
3849}
3850
3851void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
3852 HUnresolvedStaticFieldSet* instruction) {
3853 FieldAccessCallingConventionMIPS64 calling_convention;
3854 codegen_->GenerateUnresolvedFieldAccess(instruction,
3855 instruction->GetFieldType(),
3856 instruction->GetFieldIndex(),
3857 instruction->GetDexPc(),
3858 calling_convention);
3859}
3860
Alexey Frunze4dda3372015-06-01 18:31:49 -07003861void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01003862 LocationSummary* locations =
3863 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003864 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07003865}
3866
3867void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
3868 HBasicBlock* block = instruction->GetBlock();
3869 if (block->GetLoopInformation() != nullptr) {
3870 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3871 // The back edge will generate the suspend check.
3872 return;
3873 }
3874 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3875 // The goto will generate the suspend check.
3876 return;
3877 }
3878 GenerateSuspendCheck(instruction, nullptr);
3879}
3880
Alexey Frunze4dda3372015-06-01 18:31:49 -07003881void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
3882 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003883 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003884 InvokeRuntimeCallingConvention calling_convention;
3885 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3886}
3887
3888void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003889 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003890 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
3891}
3892
3893void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
3894 Primitive::Type input_type = conversion->GetInputType();
3895 Primitive::Type result_type = conversion->GetResultType();
3896 DCHECK_NE(input_type, result_type);
3897
3898 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
3899 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
3900 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
3901 }
3902
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003903 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
3904
3905 if (Primitive::IsFloatingPointType(input_type)) {
3906 locations->SetInAt(0, Location::RequiresFpuRegister());
3907 } else {
3908 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003909 }
3910
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003911 if (Primitive::IsFloatingPointType(result_type)) {
3912 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003913 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003914 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003915 }
3916}
3917
3918void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
3919 LocationSummary* locations = conversion->GetLocations();
3920 Primitive::Type result_type = conversion->GetResultType();
3921 Primitive::Type input_type = conversion->GetInputType();
3922
3923 DCHECK_NE(input_type, result_type);
3924
3925 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
3926 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3927 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3928
3929 switch (result_type) {
3930 case Primitive::kPrimChar:
3931 __ Andi(dst, src, 0xFFFF);
3932 break;
3933 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00003934 if (input_type == Primitive::kPrimLong) {
3935 // Type conversion from long to types narrower than int is a result of code
3936 // transformations. To avoid unpredictable results for SEB and SEH, we first
3937 // need to sign-extend the low 32-bit value into bits 32 through 63.
3938 __ Sll(dst, src, 0);
3939 __ Seb(dst, dst);
3940 } else {
3941 __ Seb(dst, src);
3942 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003943 break;
3944 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00003945 if (input_type == Primitive::kPrimLong) {
3946 // Type conversion from long to types narrower than int is a result of code
3947 // transformations. To avoid unpredictable results for SEB and SEH, we first
3948 // need to sign-extend the low 32-bit value into bits 32 through 63.
3949 __ Sll(dst, src, 0);
3950 __ Seh(dst, dst);
3951 } else {
3952 __ Seh(dst, src);
3953 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003954 break;
3955 case Primitive::kPrimInt:
3956 case Primitive::kPrimLong:
3957 // Sign-extend 32-bit int into bits 32 through 63 for
3958 // int-to-long and long-to-int conversions
3959 __ Sll(dst, src, 0);
3960 break;
3961
3962 default:
3963 LOG(FATAL) << "Unexpected type conversion from " << input_type
3964 << " to " << result_type;
3965 }
3966 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003967 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3968 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3969 if (input_type == Primitive::kPrimLong) {
3970 __ Dmtc1(src, FTMP);
3971 if (result_type == Primitive::kPrimFloat) {
3972 __ Cvtsl(dst, FTMP);
3973 } else {
3974 __ Cvtdl(dst, FTMP);
3975 }
3976 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003977 __ Mtc1(src, FTMP);
3978 if (result_type == Primitive::kPrimFloat) {
3979 __ Cvtsw(dst, FTMP);
3980 } else {
3981 __ Cvtdw(dst, FTMP);
3982 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003983 }
3984 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
3985 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003986 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3987 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3988 Mips64Label truncate;
3989 Mips64Label done;
3990
3991 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
3992 // value when the input is either a NaN or is outside of the range of the output type
3993 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
3994 // the same result.
3995 //
3996 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
3997 // value of the output type if the input is outside of the range after the truncation or
3998 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
3999 // results. This matches the desired float/double-to-int/long conversion exactly.
4000 //
4001 // So, NAN2008 affects handling of negative values and NaNs by the truncate instruction.
4002 //
4003 // The following code supports both NAN2008=0 and NAN2008=1 behaviors of the truncate
4004 // instruction, the reason being that the emulator implements NAN2008=0 on MIPS64R6,
4005 // even though it must be NAN2008=1 on R6.
4006 //
4007 // The code takes care of the different behaviors by first comparing the input to the
4008 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
4009 // If the input is greater than or equal to the minimum, it procedes to the truncate
4010 // instruction, which will handle such an input the same way irrespective of NAN2008.
4011 // Otherwise the input is compared to itself to determine whether it is a NaN or not
4012 // in order to return either zero or the minimum value.
4013 //
4014 // TODO: simplify this when the emulator correctly implements NAN2008=1 behavior of the
4015 // truncate instruction for MIPS64R6.
4016 if (input_type == Primitive::kPrimFloat) {
4017 uint32_t min_val = (result_type == Primitive::kPrimLong)
4018 ? bit_cast<uint32_t, float>(std::numeric_limits<int64_t>::min())
4019 : bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
4020 __ LoadConst32(TMP, min_val);
4021 __ Mtc1(TMP, FTMP);
4022 __ CmpLeS(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004023 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004024 uint64_t min_val = (result_type == Primitive::kPrimLong)
4025 ? bit_cast<uint64_t, double>(std::numeric_limits<int64_t>::min())
4026 : bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
4027 __ LoadConst64(TMP, min_val);
4028 __ Dmtc1(TMP, FTMP);
4029 __ CmpLeD(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004030 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004031
4032 __ Bc1nez(FTMP, &truncate);
4033
4034 if (input_type == Primitive::kPrimFloat) {
4035 __ CmpEqS(FTMP, src, src);
4036 } else {
4037 __ CmpEqD(FTMP, src, src);
4038 }
4039 if (result_type == Primitive::kPrimLong) {
4040 __ LoadConst64(dst, std::numeric_limits<int64_t>::min());
4041 } else {
4042 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
4043 }
4044 __ Mfc1(TMP, FTMP);
4045 __ And(dst, dst, TMP);
4046
4047 __ Bc(&done);
4048
4049 __ Bind(&truncate);
4050
4051 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00004052 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004053 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004054 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004055 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004056 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004057 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004058 } else {
4059 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004060 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004061 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004062 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004063 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004064 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004065 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004066
4067 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004068 } else if (Primitive::IsFloatingPointType(result_type) &&
4069 Primitive::IsFloatingPointType(input_type)) {
4070 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4071 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4072 if (result_type == Primitive::kPrimFloat) {
4073 __ Cvtsd(dst, src);
4074 } else {
4075 __ Cvtds(dst, src);
4076 }
4077 } else {
4078 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4079 << " to " << result_type;
4080 }
4081}
4082
4083void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
4084 HandleShift(ushr);
4085}
4086
4087void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
4088 HandleShift(ushr);
4089}
4090
4091void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
4092 HandleBinaryOp(instruction);
4093}
4094
4095void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
4096 HandleBinaryOp(instruction);
4097}
4098
4099void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4100 // Nothing to do, this should be removed during prepare for register allocator.
4101 LOG(FATAL) << "Unreachable";
4102}
4103
4104void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4105 // Nothing to do, this should be removed during prepare for register allocator.
4106 LOG(FATAL) << "Unreachable";
4107}
4108
4109void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004110 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004111}
4112
4113void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004114 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004115}
4116
4117void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004118 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004119}
4120
4121void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004122 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004123}
4124
4125void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004126 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004127}
4128
4129void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004130 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004131}
4132
4133void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004134 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004135}
4136
4137void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004138 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004139}
4140
4141void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004142 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004143}
4144
4145void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004146 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004147}
4148
4149void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004150 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004151}
4152
4153void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004154 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004155}
4156
Aart Bike9f37602015-10-09 11:15:55 -07004157void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004158 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004159}
4160
4161void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004162 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004163}
4164
4165void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004166 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004167}
4168
4169void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004170 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004171}
4172
4173void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004174 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004175}
4176
4177void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004178 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004179}
4180
4181void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004182 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004183}
4184
4185void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004186 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004187}
4188
Mark Mendellfe57faa2015-09-18 09:26:15 -04004189// Simple implementation of packed switch - generate cascaded compare/jumps.
4190void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4191 LocationSummary* locations =
4192 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4193 locations->SetInAt(0, Location::RequiresRegister());
4194}
4195
4196void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4197 int32_t lower_bound = switch_instr->GetStartValue();
4198 int32_t num_entries = switch_instr->GetNumEntries();
4199 LocationSummary* locations = switch_instr->GetLocations();
4200 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
4201 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4202
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004203 // Create a set of compare/jumps.
4204 GpuRegister temp_reg = TMP;
4205 if (IsInt<16>(-lower_bound)) {
4206 __ Addiu(temp_reg, value_reg, -lower_bound);
4207 } else {
4208 __ LoadConst32(AT, -lower_bound);
4209 __ Addu(temp_reg, value_reg, AT);
4210 }
4211 // Jump to default if index is negative
4212 // Note: We don't check the case that index is positive while value < lower_bound, because in
4213 // this case, index >= num_entries must be true. So that we can save one branch instruction.
4214 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
4215
Mark Mendellfe57faa2015-09-18 09:26:15 -04004216 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004217 // Jump to successors[0] if value == lower_bound.
4218 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
4219 int32_t last_index = 0;
4220 for (; num_entries - last_index > 2; last_index += 2) {
4221 __ Addiu(temp_reg, temp_reg, -2);
4222 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4223 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
4224 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4225 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
4226 }
4227 if (num_entries - last_index == 2) {
4228 // The last missing case_value.
4229 __ Addiu(temp_reg, temp_reg, -1);
4230 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004231 }
4232
4233 // And the default for any other value.
4234 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004235 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004236 }
4237}
4238
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004239void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet*) {
4240 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4241}
4242
4243void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet*) {
4244 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4245}
4246
Alexey Frunze4dda3372015-06-01 18:31:49 -07004247} // namespace mips64
4248} // namespace art