blob: 95be3d7fd2a1dd53b5f1eed059d5fc3c78a30799 [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
19#include "arch/mips/entrypoints_direct_mips.h"
20#include "arch/mips/instruction_set_features_mips.h"
21#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070022#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010023#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
27#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070028#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020029#include "mirror/array-inl.h"
30#include "mirror/class-inl.h"
31#include "offsets.h"
32#include "thread.h"
33#include "utils/assembler.h"
34#include "utils/mips/assembler_mips.h"
35#include "utils/stack_checks.h"
36
37namespace art {
38namespace mips {
39
40static constexpr int kCurrentMethodStackOffset = 0;
41static constexpr Register kMethodRegisterArgument = A0;
42
Alexey Frunzee3fb2452016-05-10 16:08:05 -070043// We'll maximize the range of a single load instruction for dex cache array accesses
44// by aligning offset -32768 with the offset of the first used element.
45static constexpr uint32_t kDexCacheArrayLwOffset = 0x8000;
46
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020047Location MipsReturnLocation(Primitive::Type return_type) {
48 switch (return_type) {
49 case Primitive::kPrimBoolean:
50 case Primitive::kPrimByte:
51 case Primitive::kPrimChar:
52 case Primitive::kPrimShort:
53 case Primitive::kPrimInt:
54 case Primitive::kPrimNot:
55 return Location::RegisterLocation(V0);
56
57 case Primitive::kPrimLong:
58 return Location::RegisterPairLocation(V0, V1);
59
60 case Primitive::kPrimFloat:
61 case Primitive::kPrimDouble:
62 return Location::FpuRegisterLocation(F0);
63
64 case Primitive::kPrimVoid:
65 return Location();
66 }
67 UNREACHABLE();
68}
69
70Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
71 return MipsReturnLocation(type);
72}
73
74Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
75 return Location::RegisterLocation(kMethodRegisterArgument);
76}
77
78Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
79 Location next_location;
80
81 switch (type) {
82 case Primitive::kPrimBoolean:
83 case Primitive::kPrimByte:
84 case Primitive::kPrimChar:
85 case Primitive::kPrimShort:
86 case Primitive::kPrimInt:
87 case Primitive::kPrimNot: {
88 uint32_t gp_index = gp_index_++;
89 if (gp_index < calling_convention.GetNumberOfRegisters()) {
90 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
91 } else {
92 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
93 next_location = Location::StackSlot(stack_offset);
94 }
95 break;
96 }
97
98 case Primitive::kPrimLong: {
99 uint32_t gp_index = gp_index_;
100 gp_index_ += 2;
101 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800102 Register reg = calling_convention.GetRegisterAt(gp_index);
103 if (reg == A1 || reg == A3) {
104 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200105 gp_index++;
106 }
107 Register low_even = calling_convention.GetRegisterAt(gp_index);
108 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
109 DCHECK_EQ(low_even + 1, high_odd);
110 next_location = Location::RegisterPairLocation(low_even, high_odd);
111 } else {
112 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
113 next_location = Location::DoubleStackSlot(stack_offset);
114 }
115 break;
116 }
117
118 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
119 // will take up the even/odd pair, while floats are stored in even regs only.
120 // On 64 bit FPU, both double and float are stored in even registers only.
121 case Primitive::kPrimFloat:
122 case Primitive::kPrimDouble: {
123 uint32_t float_index = float_index_++;
124 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
125 next_location = Location::FpuRegisterLocation(
126 calling_convention.GetFpuRegisterAt(float_index));
127 } else {
128 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
129 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
130 : Location::StackSlot(stack_offset);
131 }
132 break;
133 }
134
135 case Primitive::kPrimVoid:
136 LOG(FATAL) << "Unexpected parameter type " << type;
137 break;
138 }
139
140 // Space on the stack is reserved for all arguments.
141 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
142
143 return next_location;
144}
145
146Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
147 return MipsReturnLocation(type);
148}
149
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100150// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
151#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700152#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200153
154class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
155 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000156 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200157
158 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
159 LocationSummary* locations = instruction_->GetLocations();
160 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
161 __ Bind(GetEntryLabel());
162 if (instruction_->CanThrowIntoCatchBlock()) {
163 // Live registers will be restored in the catch block if caught.
164 SaveLiveRegisters(codegen, instruction_->GetLocations());
165 }
166 // We're moving two locations to locations that could overlap, so we need a parallel
167 // move resolver.
168 InvokeRuntimeCallingConvention calling_convention;
169 codegen->EmitParallelMoves(locations->InAt(0),
170 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
171 Primitive::kPrimInt,
172 locations->InAt(1),
173 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
174 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100175 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
176 ? kQuickThrowStringBounds
177 : kQuickThrowArrayBounds;
178 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100179 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200180 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
181 }
182
183 bool IsFatal() const OVERRIDE { return true; }
184
185 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
186
187 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200188 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
189};
190
191class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
192 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000193 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200194
195 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
196 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
197 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100198 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200199 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
200 }
201
202 bool IsFatal() const OVERRIDE { return true; }
203
204 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
205
206 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200207 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
208};
209
210class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
211 public:
212 LoadClassSlowPathMIPS(HLoadClass* cls,
213 HInstruction* at,
214 uint32_t dex_pc,
215 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000216 : SlowPathCodeMIPS(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200217 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
218 }
219
220 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000221 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700222 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200223 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700224 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
225 const bool r2_baker_or_no_read_barriers = !isR6 && (!kUseReadBarrier || kUseBakerReadBarrier);
226 InvokeRuntimeCallingConvention calling_convention;
227 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
228 const bool is_load_class_bss_entry =
229 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 __ Bind(GetEntryLabel());
231 SaveLiveRegisters(codegen, locations);
232
Alexey Frunzec61c0762017-04-10 13:54:23 -0700233 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
234 Register entry_address = kNoRegister;
235 if (is_load_class_bss_entry && r2_baker_or_no_read_barriers) {
236 Register temp = locations->GetTemp(0).AsRegister<Register>();
237 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
238 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
239 // kSaveEverything call.
240 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
241 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
242 if (temp_is_a0) {
243 __ Move(entry_address, temp);
244 }
245 }
246
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000247 dex::TypeIndex type_index = cls_->GetTypeIndex();
248 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100249 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
250 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000251 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200252 if (do_clinit_) {
253 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
254 } else {
255 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
256 }
257
Alexey Frunzec61c0762017-04-10 13:54:23 -0700258 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
259 if (is_load_class_bss_entry && r2_baker_or_no_read_barriers) {
260 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
261 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(0), entry_address, 0);
262 }
263
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200264 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200265 if (out.IsValid()) {
266 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000267 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700268 mips_codegen->MoveLocation(out,
269 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
270 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200271 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200272 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700273
274 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
275 if (is_load_class_bss_entry && !r2_baker_or_no_read_barriers) {
276 // For non-Baker read barriers (or on R6), we need to re-calculate the address of
277 // the class entry.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000278 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000279 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000280 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800281 bool reordering = __ SetReorder(false);
282 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info, TMP, base);
283 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
284 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000285 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200286 __ B(GetExitLabel());
287 }
288
289 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
290
291 private:
292 // The class this slow path will load.
293 HLoadClass* const cls_;
294
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200295 // The dex PC of `at_`.
296 const uint32_t dex_pc_;
297
298 // Whether to initialize the class.
299 const bool do_clinit_;
300
301 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
302};
303
304class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
305 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000306 explicit LoadStringSlowPathMIPS(HLoadString* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200307
308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700309 DCHECK(instruction_->IsLoadString());
310 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200311 LocationSummary* locations = instruction_->GetLocations();
312 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700313 HLoadString* load = instruction_->AsLoadString();
314 const dex::StringIndex string_index = load->GetStringIndex();
315 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200316 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700317 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
318 const bool r2_baker_or_no_read_barriers = !isR6 && (!kUseReadBarrier || kUseBakerReadBarrier);
319 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200320 __ Bind(GetEntryLabel());
321 SaveLiveRegisters(codegen, locations);
322
Alexey Frunzec61c0762017-04-10 13:54:23 -0700323 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
324 Register entry_address = kNoRegister;
325 if (r2_baker_or_no_read_barriers) {
326 Register temp = locations->GetTemp(0).AsRegister<Register>();
327 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
328 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
329 // kSaveEverything call.
330 entry_address = temp_is_a0 ? out : temp;
331 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
332 if (temp_is_a0) {
333 __ Move(entry_address, temp);
334 }
335 }
336
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000337 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100338 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200339 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700340
341 // Store the resolved string to the BSS entry.
342 if (r2_baker_or_no_read_barriers) {
343 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
344 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(0), entry_address, 0);
345 }
346
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200347 Primitive::Type type = instruction_->GetType();
348 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700349 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200350 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200351 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000352
Alexey Frunzec61c0762017-04-10 13:54:23 -0700353 // Store the resolved string to the BSS entry.
354 if (!r2_baker_or_no_read_barriers) {
355 // For non-Baker read barriers (or on R6), we need to re-calculate the address of
356 // the string entry.
357 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
358 CodeGeneratorMIPS::PcRelativePatchInfo* info =
359 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
360 bool reordering = __ SetReorder(false);
361 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info, TMP, base);
362 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
363 __ SetReorder(reordering);
364 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200365 __ B(GetExitLabel());
366 }
367
368 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
369
370 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200371 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
372};
373
374class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
375 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000376 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200377
378 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
379 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
380 __ Bind(GetEntryLabel());
381 if (instruction_->CanThrowIntoCatchBlock()) {
382 // Live registers will be restored in the catch block if caught.
383 SaveLiveRegisters(codegen, instruction_->GetLocations());
384 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100385 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200386 instruction_,
387 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100388 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200389 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
390 }
391
392 bool IsFatal() const OVERRIDE { return true; }
393
394 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
395
396 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200397 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
398};
399
400class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
401 public:
402 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200404
405 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
406 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
407 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100408 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200409 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200410 if (successor_ == nullptr) {
411 __ B(GetReturnLabel());
412 } else {
413 __ B(mips_codegen->GetLabelOf(successor_));
414 }
415 }
416
417 MipsLabel* GetReturnLabel() {
418 DCHECK(successor_ == nullptr);
419 return &return_label_;
420 }
421
422 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
423
424 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200425 // If not null, the block to branch to after the suspend check.
426 HBasicBlock* const successor_;
427
428 // If `successor_` is null, the label to branch to after the suspend check.
429 MipsLabel return_label_;
430
431 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
432};
433
434class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
435 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800436 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
437 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200438
439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
440 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441 uint32_t dex_pc = instruction_->GetDexPc();
442 DCHECK(instruction_->IsCheckCast()
443 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
444 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
445
446 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800447 if (!is_fatal_) {
448 SaveLiveRegisters(codegen, locations);
449 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200450
451 // We're moving two locations to locations that could overlap, so we need a parallel
452 // move resolver.
453 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800454 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200455 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
456 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800457 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200458 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
459 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200460 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100461 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800462 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200463 Primitive::Type ret_type = instruction_->GetType();
464 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
465 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200466 } else {
467 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800468 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
469 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200470 }
471
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800472 if (!is_fatal_) {
473 RestoreLiveRegisters(codegen, locations);
474 __ B(GetExitLabel());
475 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200476 }
477
478 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
479
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800480 bool IsFatal() const OVERRIDE { return is_fatal_; }
481
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200482 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800483 const bool is_fatal_;
484
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200485 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
486};
487
488class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
489 public:
Aart Bik42249c32016-01-07 15:33:50 -0800490 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000491 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200492
493 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800494 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200495 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100496 LocationSummary* locations = instruction_->GetLocations();
497 SaveLiveRegisters(codegen, locations);
498 InvokeRuntimeCallingConvention calling_convention;
499 __ LoadConst32(calling_convention.GetRegisterAt(0),
500 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100501 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100502 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200503 }
504
505 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
506
507 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200508 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
509};
510
Alexey Frunze15958152017-02-09 19:08:30 -0800511class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
512 public:
513 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
514
515 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
516 LocationSummary* locations = instruction_->GetLocations();
517 __ Bind(GetEntryLabel());
518 SaveLiveRegisters(codegen, locations);
519
520 InvokeRuntimeCallingConvention calling_convention;
521 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
522 parallel_move.AddMove(
523 locations->InAt(0),
524 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
525 Primitive::kPrimNot,
526 nullptr);
527 parallel_move.AddMove(
528 locations->InAt(1),
529 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
530 Primitive::kPrimInt,
531 nullptr);
532 parallel_move.AddMove(
533 locations->InAt(2),
534 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
535 Primitive::kPrimNot,
536 nullptr);
537 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
538
539 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
540 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
541 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
542 RestoreLiveRegisters(codegen, locations);
543 __ B(GetExitLabel());
544 }
545
546 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
547
548 private:
549 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
550};
551
552// Slow path marking an object reference `ref` during a read
553// barrier. The field `obj.field` in the object `obj` holding this
554// reference does not get updated by this slow path after marking (see
555// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
556//
557// This means that after the execution of this slow path, `ref` will
558// always be up-to-date, but `obj.field` may not; i.e., after the
559// flip, `ref` will be a to-space reference, but `obj.field` will
560// probably still be a from-space reference (unless it gets updated by
561// another thread, or if another thread installed another object
562// reference (different from `ref`) in `obj.field`).
563//
564// If `entrypoint` is a valid location it is assumed to already be
565// holding the entrypoint. The case where the entrypoint is passed in
566// is for the GcRoot read barrier.
567class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
568 public:
569 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
570 Location ref,
571 Location entrypoint = Location::NoLocation())
572 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
573 DCHECK(kEmitCompilerReadBarrier);
574 }
575
576 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
577
578 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
579 LocationSummary* locations = instruction_->GetLocations();
580 Register ref_reg = ref_.AsRegister<Register>();
581 DCHECK(locations->CanCall());
582 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
583 DCHECK(instruction_->IsInstanceFieldGet() ||
584 instruction_->IsStaticFieldGet() ||
585 instruction_->IsArrayGet() ||
586 instruction_->IsArraySet() ||
587 instruction_->IsLoadClass() ||
588 instruction_->IsLoadString() ||
589 instruction_->IsInstanceOf() ||
590 instruction_->IsCheckCast() ||
591 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
592 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
593 << "Unexpected instruction in read barrier marking slow path: "
594 << instruction_->DebugName();
595
596 __ Bind(GetEntryLabel());
597 // No need to save live registers; it's taken care of by the
598 // entrypoint. Also, there is no need to update the stack mask,
599 // as this runtime call will not trigger a garbage collection.
600 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
601 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
602 (S2 <= ref_reg && ref_reg <= S7) ||
603 (ref_reg == FP)) << ref_reg;
604 // "Compact" slow path, saving two moves.
605 //
606 // Instead of using the standard runtime calling convention (input
607 // and output in A0 and V0 respectively):
608 //
609 // A0 <- ref
610 // V0 <- ReadBarrierMark(A0)
611 // ref <- V0
612 //
613 // we just use rX (the register containing `ref`) as input and output
614 // of a dedicated entrypoint:
615 //
616 // rX <- ReadBarrierMarkRegX(rX)
617 //
618 if (entrypoint_.IsValid()) {
619 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
620 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
621 __ Jalr(entrypoint_.AsRegister<Register>());
622 __ NopIfNoReordering();
623 } else {
624 int32_t entry_point_offset =
625 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
626 // This runtime call does not require a stack map.
627 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
628 instruction_,
629 this,
630 /* direct */ false);
631 }
632 __ B(GetExitLabel());
633 }
634
635 private:
636 // The location (register) of the marked object reference.
637 const Location ref_;
638
639 // The location of the entrypoint if already loaded.
640 const Location entrypoint_;
641
642 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
643};
644
645// Slow path marking an object reference `ref` during a read barrier,
646// and if needed, atomically updating the field `obj.field` in the
647// object `obj` holding this reference after marking (contrary to
648// ReadBarrierMarkSlowPathMIPS above, which never tries to update
649// `obj.field`).
650//
651// This means that after the execution of this slow path, both `ref`
652// and `obj.field` will be up-to-date; i.e., after the flip, both will
653// hold the same to-space reference (unless another thread installed
654// another object reference (different from `ref`) in `obj.field`).
655class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
656 public:
657 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
658 Location ref,
659 Register obj,
660 Location field_offset,
661 Register temp1)
662 : SlowPathCodeMIPS(instruction),
663 ref_(ref),
664 obj_(obj),
665 field_offset_(field_offset),
666 temp1_(temp1) {
667 DCHECK(kEmitCompilerReadBarrier);
668 }
669
670 const char* GetDescription() const OVERRIDE {
671 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
672 }
673
674 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
675 LocationSummary* locations = instruction_->GetLocations();
676 Register ref_reg = ref_.AsRegister<Register>();
677 DCHECK(locations->CanCall());
678 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
679 // This slow path is only used by the UnsafeCASObject intrinsic.
680 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
681 << "Unexpected instruction in read barrier marking and field updating slow path: "
682 << instruction_->DebugName();
683 DCHECK(instruction_->GetLocations()->Intrinsified());
684 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
685 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
686
687 __ Bind(GetEntryLabel());
688
689 // Save the old reference.
690 // Note that we cannot use AT or TMP to save the old reference, as those
691 // are used by the code that follows, but we need the old reference after
692 // the call to the ReadBarrierMarkRegX entry point.
693 DCHECK_NE(temp1_, AT);
694 DCHECK_NE(temp1_, TMP);
695 __ Move(temp1_, ref_reg);
696
697 // No need to save live registers; it's taken care of by the
698 // entrypoint. Also, there is no need to update the stack mask,
699 // as this runtime call will not trigger a garbage collection.
700 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
701 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
702 (S2 <= ref_reg && ref_reg <= S7) ||
703 (ref_reg == FP)) << ref_reg;
704 // "Compact" slow path, saving two moves.
705 //
706 // Instead of using the standard runtime calling convention (input
707 // and output in A0 and V0 respectively):
708 //
709 // A0 <- ref
710 // V0 <- ReadBarrierMark(A0)
711 // ref <- V0
712 //
713 // we just use rX (the register containing `ref`) as input and output
714 // of a dedicated entrypoint:
715 //
716 // rX <- ReadBarrierMarkRegX(rX)
717 //
718 int32_t entry_point_offset =
719 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
720 // This runtime call does not require a stack map.
721 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
722 instruction_,
723 this,
724 /* direct */ false);
725
726 // If the new reference is different from the old reference,
727 // update the field in the holder (`*(obj_ + field_offset_)`).
728 //
729 // Note that this field could also hold a different object, if
730 // another thread had concurrently changed it. In that case, the
731 // the compare-and-set (CAS) loop below would abort, leaving the
732 // field as-is.
733 MipsLabel done;
734 __ Beq(temp1_, ref_reg, &done);
735
736 // Update the the holder's field atomically. This may fail if
737 // mutator updates before us, but it's OK. This is achieved
738 // using a strong compare-and-set (CAS) operation with relaxed
739 // memory synchronization ordering, where the expected value is
740 // the old reference and the desired value is the new reference.
741
742 // Convenience aliases.
743 Register base = obj_;
744 // The UnsafeCASObject intrinsic uses a register pair as field
745 // offset ("long offset"), of which only the low part contains
746 // data.
747 Register offset = field_offset_.AsRegisterPairLow<Register>();
748 Register expected = temp1_;
749 Register value = ref_reg;
750 Register tmp_ptr = TMP; // Pointer to actual memory.
751 Register tmp = AT; // Value in memory.
752
753 __ Addu(tmp_ptr, base, offset);
754
755 if (kPoisonHeapReferences) {
756 __ PoisonHeapReference(expected);
757 // Do not poison `value` if it is the same register as
758 // `expected`, which has just been poisoned.
759 if (value != expected) {
760 __ PoisonHeapReference(value);
761 }
762 }
763
764 // do {
765 // tmp = [r_ptr] - expected;
766 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
767
768 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
769 MipsLabel loop_head, exit_loop;
770 __ Bind(&loop_head);
771 if (is_r6) {
772 __ LlR6(tmp, tmp_ptr);
773 } else {
774 __ LlR2(tmp, tmp_ptr);
775 }
776 __ Bne(tmp, expected, &exit_loop);
777 __ Move(tmp, value);
778 if (is_r6) {
779 __ ScR6(tmp, tmp_ptr);
780 } else {
781 __ ScR2(tmp, tmp_ptr);
782 }
783 __ Beqz(tmp, &loop_head);
784 __ Bind(&exit_loop);
785
786 if (kPoisonHeapReferences) {
787 __ UnpoisonHeapReference(expected);
788 // Do not unpoison `value` if it is the same register as
789 // `expected`, which has just been unpoisoned.
790 if (value != expected) {
791 __ UnpoisonHeapReference(value);
792 }
793 }
794
795 __ Bind(&done);
796 __ B(GetExitLabel());
797 }
798
799 private:
800 // The location (register) of the marked object reference.
801 const Location ref_;
802 // The register containing the object holding the marked object reference field.
803 const Register obj_;
804 // The location of the offset of the marked reference field within `obj_`.
805 Location field_offset_;
806
807 const Register temp1_;
808
809 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
810};
811
812// Slow path generating a read barrier for a heap reference.
813class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
814 public:
815 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
816 Location out,
817 Location ref,
818 Location obj,
819 uint32_t offset,
820 Location index)
821 : SlowPathCodeMIPS(instruction),
822 out_(out),
823 ref_(ref),
824 obj_(obj),
825 offset_(offset),
826 index_(index) {
827 DCHECK(kEmitCompilerReadBarrier);
828 // If `obj` is equal to `out` or `ref`, it means the initial object
829 // has been overwritten by (or after) the heap object reference load
830 // to be instrumented, e.g.:
831 //
832 // __ LoadFromOffset(kLoadWord, out, out, offset);
833 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
834 //
835 // In that case, we have lost the information about the original
836 // object, and the emitted read barrier cannot work properly.
837 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
838 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
839 }
840
841 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
842 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
843 LocationSummary* locations = instruction_->GetLocations();
844 Register reg_out = out_.AsRegister<Register>();
845 DCHECK(locations->CanCall());
846 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
847 DCHECK(instruction_->IsInstanceFieldGet() ||
848 instruction_->IsStaticFieldGet() ||
849 instruction_->IsArrayGet() ||
850 instruction_->IsInstanceOf() ||
851 instruction_->IsCheckCast() ||
852 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
853 << "Unexpected instruction in read barrier for heap reference slow path: "
854 << instruction_->DebugName();
855
856 __ Bind(GetEntryLabel());
857 SaveLiveRegisters(codegen, locations);
858
859 // We may have to change the index's value, but as `index_` is a
860 // constant member (like other "inputs" of this slow path),
861 // introduce a copy of it, `index`.
862 Location index = index_;
863 if (index_.IsValid()) {
864 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
865 if (instruction_->IsArrayGet()) {
866 // Compute the actual memory offset and store it in `index`.
867 Register index_reg = index_.AsRegister<Register>();
868 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
869 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
870 // We are about to change the value of `index_reg` (see the
871 // calls to art::mips::MipsAssembler::Sll and
872 // art::mips::MipsAssembler::Addiu32 below), but it has
873 // not been saved by the previous call to
874 // art::SlowPathCode::SaveLiveRegisters, as it is a
875 // callee-save register --
876 // art::SlowPathCode::SaveLiveRegisters does not consider
877 // callee-save registers, as it has been designed with the
878 // assumption that callee-save registers are supposed to be
879 // handled by the called function. So, as a callee-save
880 // register, `index_reg` _would_ eventually be saved onto
881 // the stack, but it would be too late: we would have
882 // changed its value earlier. Therefore, we manually save
883 // it here into another freely available register,
884 // `free_reg`, chosen of course among the caller-save
885 // registers (as a callee-save `free_reg` register would
886 // exhibit the same problem).
887 //
888 // Note we could have requested a temporary register from
889 // the register allocator instead; but we prefer not to, as
890 // this is a slow path, and we know we can find a
891 // caller-save register that is available.
892 Register free_reg = FindAvailableCallerSaveRegister(codegen);
893 __ Move(free_reg, index_reg);
894 index_reg = free_reg;
895 index = Location::RegisterLocation(index_reg);
896 } else {
897 // The initial register stored in `index_` has already been
898 // saved in the call to art::SlowPathCode::SaveLiveRegisters
899 // (as it is not a callee-save register), so we can freely
900 // use it.
901 }
902 // Shifting the index value contained in `index_reg` by the scale
903 // factor (2) cannot overflow in practice, as the runtime is
904 // unable to allocate object arrays with a size larger than
905 // 2^26 - 1 (that is, 2^28 - 4 bytes).
906 __ Sll(index_reg, index_reg, TIMES_4);
907 static_assert(
908 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
909 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
910 __ Addiu32(index_reg, index_reg, offset_);
911 } else {
912 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
913 // intrinsics, `index_` is not shifted by a scale factor of 2
914 // (as in the case of ArrayGet), as it is actually an offset
915 // to an object field within an object.
916 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
917 DCHECK(instruction_->GetLocations()->Intrinsified());
918 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
919 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
920 << instruction_->AsInvoke()->GetIntrinsic();
921 DCHECK_EQ(offset_, 0U);
922 DCHECK(index_.IsRegisterPair());
923 // UnsafeGet's offset location is a register pair, the low
924 // part contains the correct offset.
925 index = index_.ToLow();
926 }
927 }
928
929 // We're moving two or three locations to locations that could
930 // overlap, so we need a parallel move resolver.
931 InvokeRuntimeCallingConvention calling_convention;
932 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
933 parallel_move.AddMove(ref_,
934 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
935 Primitive::kPrimNot,
936 nullptr);
937 parallel_move.AddMove(obj_,
938 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
939 Primitive::kPrimNot,
940 nullptr);
941 if (index.IsValid()) {
942 parallel_move.AddMove(index,
943 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
944 Primitive::kPrimInt,
945 nullptr);
946 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
947 } else {
948 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
949 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
950 }
951 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
952 instruction_,
953 instruction_->GetDexPc(),
954 this);
955 CheckEntrypointTypes<
956 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
957 mips_codegen->Move32(out_, calling_convention.GetReturnLocation(Primitive::kPrimNot));
958
959 RestoreLiveRegisters(codegen, locations);
960 __ B(GetExitLabel());
961 }
962
963 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
964
965 private:
966 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
967 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
968 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
969 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
970 if (i != ref &&
971 i != obj &&
972 !codegen->IsCoreCalleeSaveRegister(i) &&
973 !codegen->IsBlockedCoreRegister(i)) {
974 return static_cast<Register>(i);
975 }
976 }
977 // We shall never fail to find a free caller-save register, as
978 // there are more than two core caller-save registers on MIPS
979 // (meaning it is possible to find one which is different from
980 // `ref` and `obj`).
981 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
982 LOG(FATAL) << "Could not find a free caller-save register";
983 UNREACHABLE();
984 }
985
986 const Location out_;
987 const Location ref_;
988 const Location obj_;
989 const uint32_t offset_;
990 // An additional location containing an index to an array.
991 // Only used for HArrayGet and the UnsafeGetObject &
992 // UnsafeGetObjectVolatile intrinsics.
993 const Location index_;
994
995 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
996};
997
998// Slow path generating a read barrier for a GC root.
999class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1000 public:
1001 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1002 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1003 DCHECK(kEmitCompilerReadBarrier);
1004 }
1005
1006 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1007 LocationSummary* locations = instruction_->GetLocations();
1008 Register reg_out = out_.AsRegister<Register>();
1009 DCHECK(locations->CanCall());
1010 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1011 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1012 << "Unexpected instruction in read barrier for GC root slow path: "
1013 << instruction_->DebugName();
1014
1015 __ Bind(GetEntryLabel());
1016 SaveLiveRegisters(codegen, locations);
1017
1018 InvokeRuntimeCallingConvention calling_convention;
1019 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
1020 mips_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
1021 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1022 instruction_,
1023 instruction_->GetDexPc(),
1024 this);
1025 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1026 mips_codegen->Move32(out_, calling_convention.GetReturnLocation(Primitive::kPrimNot));
1027
1028 RestoreLiveRegisters(codegen, locations);
1029 __ B(GetExitLabel());
1030 }
1031
1032 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1033
1034 private:
1035 const Location out_;
1036 const Location root_;
1037
1038 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1039};
1040
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001041CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1042 const MipsInstructionSetFeatures& isa_features,
1043 const CompilerOptions& compiler_options,
1044 OptimizingCompilerStats* stats)
1045 : CodeGenerator(graph,
1046 kNumberOfCoreRegisters,
1047 kNumberOfFRegisters,
1048 kNumberOfRegisterPairs,
1049 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1050 arraysize(kCoreCalleeSaves)),
1051 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1052 arraysize(kFpuCalleeSaves)),
1053 compiler_options,
1054 stats),
1055 block_labels_(nullptr),
1056 location_builder_(graph, this),
1057 instruction_visitor_(graph, this),
1058 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001059 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001060 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001061 uint32_literals_(std::less<uint32_t>(),
1062 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001063 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001064 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001065 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001066 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001067 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1068 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001069 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001070 // Save RA (containing the return address) to mimic Quick.
1071 AddAllocatedRegister(Location::RegisterLocation(RA));
1072}
1073
1074#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001075// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1076#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001077#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001078
1079void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1080 // Ensure that we fix up branches.
1081 __ FinalizeCode();
1082
1083 // Adjust native pc offsets in stack maps.
1084 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001085 uint32_t old_position =
1086 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001087 uint32_t new_position = __ GetAdjustedPosition(old_position);
1088 DCHECK_GE(new_position, old_position);
1089 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1090 }
1091
1092 // Adjust pc offsets for the disassembly information.
1093 if (disasm_info_ != nullptr) {
1094 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1095 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1096 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1097 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1098 it.second.start = __ GetAdjustedPosition(it.second.start);
1099 it.second.end = __ GetAdjustedPosition(it.second.end);
1100 }
1101 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1102 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1103 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1104 }
1105 }
1106
1107 CodeGenerator::Finalize(allocator);
1108}
1109
1110MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1111 return codegen_->GetAssembler();
1112}
1113
1114void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1115 DCHECK_LT(index, moves_.size());
1116 MoveOperands* move = moves_[index];
1117 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1118}
1119
1120void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1121 DCHECK_LT(index, moves_.size());
1122 MoveOperands* move = moves_[index];
1123 Primitive::Type type = move->GetType();
1124 Location loc1 = move->GetDestination();
1125 Location loc2 = move->GetSource();
1126
1127 DCHECK(!loc1.IsConstant());
1128 DCHECK(!loc2.IsConstant());
1129
1130 if (loc1.Equals(loc2)) {
1131 return;
1132 }
1133
1134 if (loc1.IsRegister() && loc2.IsRegister()) {
1135 // Swap 2 GPRs.
1136 Register r1 = loc1.AsRegister<Register>();
1137 Register r2 = loc2.AsRegister<Register>();
1138 __ Move(TMP, r2);
1139 __ Move(r2, r1);
1140 __ Move(r1, TMP);
1141 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1142 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1143 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1144 if (type == Primitive::kPrimFloat) {
1145 __ MovS(FTMP, f2);
1146 __ MovS(f2, f1);
1147 __ MovS(f1, FTMP);
1148 } else {
1149 DCHECK_EQ(type, Primitive::kPrimDouble);
1150 __ MovD(FTMP, f2);
1151 __ MovD(f2, f1);
1152 __ MovD(f1, FTMP);
1153 }
1154 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1155 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1156 // Swap FPR and GPR.
1157 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1158 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1159 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001160 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001161 __ Move(TMP, r2);
1162 __ Mfc1(r2, f1);
1163 __ Mtc1(TMP, f1);
1164 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1165 // Swap 2 GPR register pairs.
1166 Register r1 = loc1.AsRegisterPairLow<Register>();
1167 Register r2 = loc2.AsRegisterPairLow<Register>();
1168 __ Move(TMP, r2);
1169 __ Move(r2, r1);
1170 __ Move(r1, TMP);
1171 r1 = loc1.AsRegisterPairHigh<Register>();
1172 r2 = loc2.AsRegisterPairHigh<Register>();
1173 __ Move(TMP, r2);
1174 __ Move(r2, r1);
1175 __ Move(r1, TMP);
1176 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1177 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1178 // Swap FPR and GPR register pair.
1179 DCHECK_EQ(type, Primitive::kPrimDouble);
1180 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1181 : loc2.AsFpuRegister<FRegister>();
1182 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1183 : loc2.AsRegisterPairLow<Register>();
1184 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1185 : loc2.AsRegisterPairHigh<Register>();
1186 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1187 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1188 // unpredictable and the following mfch1 will fail.
1189 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001190 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001191 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001192 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001193 __ Move(r2_l, TMP);
1194 __ Move(r2_h, AT);
1195 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1196 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1197 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1198 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001199 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1200 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001201 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1202 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001203 __ Move(TMP, reg);
1204 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1205 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1206 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1207 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1208 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1209 : loc2.AsRegisterPairLow<Register>();
1210 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1211 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001212 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001213 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1214 : loc2.GetHighStackIndex(kMipsWordSize);
1215 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001216 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001217 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001218 __ Move(TMP, reg_h);
1219 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1220 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001221 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1222 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1223 : loc2.AsFpuRegister<FRegister>();
1224 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1225 if (type == Primitive::kPrimFloat) {
1226 __ MovS(FTMP, reg);
1227 __ LoadSFromOffset(reg, SP, offset);
1228 __ StoreSToOffset(FTMP, SP, offset);
1229 } else {
1230 DCHECK_EQ(type, Primitive::kPrimDouble);
1231 __ MovD(FTMP, reg);
1232 __ LoadDFromOffset(reg, SP, offset);
1233 __ StoreDToOffset(FTMP, SP, offset);
1234 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001235 } else {
1236 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1237 }
1238}
1239
1240void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1241 __ Pop(static_cast<Register>(reg));
1242}
1243
1244void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1245 __ Push(static_cast<Register>(reg));
1246}
1247
1248void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1249 // Allocate a scratch register other than TMP, if available.
1250 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1251 // automatically unspilled when the scratch scope object is destroyed).
1252 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1253 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1254 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1255 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1256 __ LoadFromOffset(kLoadWord,
1257 Register(ensure_scratch.GetRegister()),
1258 SP,
1259 index1 + stack_offset);
1260 __ LoadFromOffset(kLoadWord,
1261 TMP,
1262 SP,
1263 index2 + stack_offset);
1264 __ StoreToOffset(kStoreWord,
1265 Register(ensure_scratch.GetRegister()),
1266 SP,
1267 index2 + stack_offset);
1268 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1269 }
1270}
1271
Alexey Frunze73296a72016-06-03 22:51:46 -07001272void CodeGeneratorMIPS::ComputeSpillMask() {
1273 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1274 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1275 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1276 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1277 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1278 // within the stack frame.
1279 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1280 core_spill_mask_ |= (1 << ZERO);
1281 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001282}
1283
1284bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001285 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001286 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1287 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1288 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001289 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001290}
1291
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001292static dwarf::Reg DWARFReg(Register reg) {
1293 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1294}
1295
1296// TODO: mapping of floating-point registers to DWARF.
1297
1298void CodeGeneratorMIPS::GenerateFrameEntry() {
1299 __ Bind(&frame_entry_label_);
1300
1301 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1302
1303 if (do_overflow_check) {
1304 __ LoadFromOffset(kLoadWord,
1305 ZERO,
1306 SP,
1307 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1308 RecordPcInfo(nullptr, 0);
1309 }
1310
1311 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001312 CHECK_EQ(fpu_spill_mask_, 0u);
1313 CHECK_EQ(core_spill_mask_, 1u << RA);
1314 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001315 return;
1316 }
1317
1318 // Make sure the frame size isn't unreasonably large.
1319 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1320 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1321 }
1322
1323 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001324
Alexey Frunze73296a72016-06-03 22:51:46 -07001325 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001326 __ IncreaseFrameSize(ofs);
1327
Alexey Frunze73296a72016-06-03 22:51:46 -07001328 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1329 Register reg = static_cast<Register>(MostSignificantBit(mask));
1330 mask ^= 1u << reg;
1331 ofs -= kMipsWordSize;
1332 // The ZERO register is only included for alignment.
1333 if (reg != ZERO) {
1334 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001335 __ cfi().RelOffset(DWARFReg(reg), ofs);
1336 }
1337 }
1338
Alexey Frunze73296a72016-06-03 22:51:46 -07001339 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1340 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1341 mask ^= 1u << reg;
1342 ofs -= kMipsDoublewordSize;
1343 __ StoreDToOffset(reg, SP, ofs);
1344 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001345 }
1346
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001347 // Save the current method if we need it. Note that we do not
1348 // do this in HCurrentMethod, as the instruction might have been removed
1349 // in the SSA graph.
1350 if (RequiresCurrentMethod()) {
1351 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1352 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001353
1354 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1355 // Initialize should deoptimize flag to 0.
1356 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1357 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001358}
1359
1360void CodeGeneratorMIPS::GenerateFrameExit() {
1361 __ cfi().RememberState();
1362
1363 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001364 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001365
Alexey Frunze73296a72016-06-03 22:51:46 -07001366 // For better instruction scheduling restore RA before other registers.
1367 uint32_t ofs = GetFrameSize();
1368 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1369 Register reg = static_cast<Register>(MostSignificantBit(mask));
1370 mask ^= 1u << reg;
1371 ofs -= kMipsWordSize;
1372 // The ZERO register is only included for alignment.
1373 if (reg != ZERO) {
1374 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001375 __ cfi().Restore(DWARFReg(reg));
1376 }
1377 }
1378
Alexey Frunze73296a72016-06-03 22:51:46 -07001379 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1380 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1381 mask ^= 1u << reg;
1382 ofs -= kMipsDoublewordSize;
1383 __ LoadDFromOffset(reg, SP, ofs);
1384 // TODO: __ cfi().Restore(DWARFReg(reg));
1385 }
1386
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001387 size_t frame_size = GetFrameSize();
1388 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1389 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1390 bool reordering = __ SetReorder(false);
1391 if (exchange) {
1392 __ Jr(RA);
1393 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1394 } else {
1395 __ DecreaseFrameSize(frame_size);
1396 __ Jr(RA);
1397 __ Nop(); // In delay slot.
1398 }
1399 __ SetReorder(reordering);
1400 } else {
1401 __ Jr(RA);
1402 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001403 }
1404
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001405 __ cfi().RestoreState();
1406 __ cfi().DefCFAOffset(GetFrameSize());
1407}
1408
1409void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1410 __ Bind(GetLabelOf(block));
1411}
1412
1413void CodeGeneratorMIPS::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
1414 if (src.Equals(dst)) {
1415 return;
1416 }
1417
1418 if (src.IsConstant()) {
1419 MoveConstant(dst, src.GetConstant());
1420 } else {
1421 if (Primitive::Is64BitType(dst_type)) {
1422 Move64(dst, src);
1423 } else {
1424 Move32(dst, src);
1425 }
1426 }
1427}
1428
1429void CodeGeneratorMIPS::Move32(Location destination, Location source) {
1430 if (source.Equals(destination)) {
1431 return;
1432 }
1433
1434 if (destination.IsRegister()) {
1435 if (source.IsRegister()) {
1436 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1437 } else if (source.IsFpuRegister()) {
1438 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1439 } else {
1440 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1441 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
1442 }
1443 } else if (destination.IsFpuRegister()) {
1444 if (source.IsRegister()) {
1445 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1446 } else if (source.IsFpuRegister()) {
1447 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1448 } else {
1449 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1450 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1451 }
1452 } else {
1453 DCHECK(destination.IsStackSlot()) << destination;
1454 if (source.IsRegister()) {
1455 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
1456 } else if (source.IsFpuRegister()) {
1457 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1458 } else {
1459 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1460 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1461 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1462 }
1463 }
1464}
1465
1466void CodeGeneratorMIPS::Move64(Location destination, Location source) {
1467 if (source.Equals(destination)) {
1468 return;
1469 }
1470
1471 if (destination.IsRegisterPair()) {
1472 if (source.IsRegisterPair()) {
1473 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1474 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1475 } else if (source.IsFpuRegister()) {
1476 Register dst_high = destination.AsRegisterPairHigh<Register>();
1477 Register dst_low = destination.AsRegisterPairLow<Register>();
1478 FRegister src = source.AsFpuRegister<FRegister>();
1479 __ Mfc1(dst_low, src);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001480 __ MoveFromFpuHigh(dst_high, src);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001481 } else {
1482 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1483 int32_t off = source.GetStackIndex();
1484 Register r = destination.AsRegisterPairLow<Register>();
1485 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1486 }
1487 } else if (destination.IsFpuRegister()) {
1488 if (source.IsRegisterPair()) {
1489 FRegister dst = destination.AsFpuRegister<FRegister>();
1490 Register src_high = source.AsRegisterPairHigh<Register>();
1491 Register src_low = source.AsRegisterPairLow<Register>();
1492 __ Mtc1(src_low, dst);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001493 __ MoveToFpuHigh(src_high, dst);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001494 } else if (source.IsFpuRegister()) {
1495 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1496 } else {
1497 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1498 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1499 }
1500 } else {
1501 DCHECK(destination.IsDoubleStackSlot()) << destination;
1502 int32_t off = destination.GetStackIndex();
1503 if (source.IsRegisterPair()) {
1504 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, off);
1505 } else if (source.IsFpuRegister()) {
1506 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, off);
1507 } else {
1508 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1509 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1510 __ StoreToOffset(kStoreWord, TMP, SP, off);
1511 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1512 __ StoreToOffset(kStoreWord, TMP, SP, off + 4);
1513 }
1514 }
1515}
1516
1517void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1518 if (c->IsIntConstant() || c->IsNullConstant()) {
1519 // Move 32 bit constant.
1520 int32_t value = GetInt32ValueOf(c);
1521 if (destination.IsRegister()) {
1522 Register dst = destination.AsRegister<Register>();
1523 __ LoadConst32(dst, value);
1524 } else {
1525 DCHECK(destination.IsStackSlot())
1526 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001527 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001528 }
1529 } else if (c->IsLongConstant()) {
1530 // Move 64 bit constant.
1531 int64_t value = GetInt64ValueOf(c);
1532 if (destination.IsRegisterPair()) {
1533 Register r_h = destination.AsRegisterPairHigh<Register>();
1534 Register r_l = destination.AsRegisterPairLow<Register>();
1535 __ LoadConst64(r_h, r_l, value);
1536 } else {
1537 DCHECK(destination.IsDoubleStackSlot())
1538 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001539 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001540 }
1541 } else if (c->IsFloatConstant()) {
1542 // Move 32 bit float constant.
1543 int32_t value = GetInt32ValueOf(c);
1544 if (destination.IsFpuRegister()) {
1545 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1546 } else {
1547 DCHECK(destination.IsStackSlot())
1548 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001549 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001550 }
1551 } else {
1552 // Move 64 bit double constant.
1553 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1554 int64_t value = GetInt64ValueOf(c);
1555 if (destination.IsFpuRegister()) {
1556 FRegister fd = destination.AsFpuRegister<FRegister>();
1557 __ LoadDConst64(fd, value, TMP);
1558 } else {
1559 DCHECK(destination.IsDoubleStackSlot())
1560 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001561 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001562 }
1563 }
1564}
1565
1566void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1567 DCHECK(destination.IsRegister());
1568 Register dst = destination.AsRegister<Register>();
1569 __ LoadConst32(dst, value);
1570}
1571
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001572void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1573 if (location.IsRegister()) {
1574 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001575 } else if (location.IsRegisterPair()) {
1576 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1577 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001578 } else {
1579 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1580 }
1581}
1582
Vladimir Markoaad75c62016-10-03 08:46:48 +00001583template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1584inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1585 const ArenaDeque<PcRelativePatchInfo>& infos,
1586 ArenaVector<LinkerPatch>* linker_patches) {
1587 for (const PcRelativePatchInfo& info : infos) {
1588 const DexFile& dex_file = info.target_dex_file;
1589 size_t offset_or_index = info.offset_or_index;
1590 DCHECK(info.high_label.IsBound());
1591 uint32_t high_offset = __ GetLabelLocation(&info.high_label);
1592 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1593 // the assembler's base label used for PC-relative addressing.
1594 uint32_t pc_rel_offset = info.pc_rel_label.IsBound()
1595 ? __ GetLabelLocation(&info.pc_rel_label)
1596 : __ GetPcRelBaseLabelLocation();
1597 linker_patches->push_back(Factory(high_offset, &dex_file, pc_rel_offset, offset_or_index));
1598 }
1599}
1600
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001601void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1602 DCHECK(linker_patches->empty());
1603 size_t size =
Alexey Frunze06a46c42016-07-19 15:00:40 -07001604 pc_relative_dex_cache_patches_.size() +
1605 pc_relative_string_patches_.size() +
1606 pc_relative_type_patches_.size() +
Vladimir Marko764d4542017-05-16 10:31:41 +01001607 type_bss_entry_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001608 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001609 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1610 linker_patches);
1611 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001612 DCHECK(pc_relative_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00001613 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1614 linker_patches);
1615 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001616 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1617 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001618 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1619 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001620 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00001621 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1622 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001623 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001624}
1625
1626CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001627 const DexFile& dex_file, dex::StringIndex string_index) {
1628 return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001629}
1630
1631CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Andreas Gampea5b09a62016-11-17 15:21:22 -08001632 const DexFile& dex_file, dex::TypeIndex type_index) {
1633 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001634}
1635
Vladimir Marko1998cd02017-01-13 13:02:58 +00001636CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
1637 const DexFile& dex_file, dex::TypeIndex type_index) {
1638 return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
1639}
1640
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001641CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeDexCacheArrayPatch(
1642 const DexFile& dex_file, uint32_t element_offset) {
1643 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
1644}
1645
1646CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
1647 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
1648 patches->emplace_back(dex_file, offset_or_index);
1649 return &patches->back();
1650}
1651
Alexey Frunze06a46c42016-07-19 15:00:40 -07001652Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1653 return map->GetOrCreate(
1654 value,
1655 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1656}
1657
Alexey Frunze06a46c42016-07-19 15:00:40 -07001658Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001659 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001660}
1661
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001662void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
1663 Register out,
1664 Register base) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001665 if (GetInstructionSetFeatures().IsR6()) {
1666 DCHECK_EQ(base, ZERO);
1667 __ Bind(&info->high_label);
1668 __ Bind(&info->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001669 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001670 __ Auipc(out, /* placeholder */ 0x1234);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001671 } else {
1672 // If base is ZERO, emit NAL to obtain the actual base.
1673 if (base == ZERO) {
1674 // Generate a dummy PC-relative call to obtain PC.
1675 __ Nal();
1676 }
1677 __ Bind(&info->high_label);
1678 __ Lui(out, /* placeholder */ 0x1234);
1679 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1680 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1681 if (base == ZERO) {
1682 __ Bind(&info->pc_rel_label);
1683 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001684 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001685 __ Addu(out, out, (base == ZERO) ? RA : base);
1686 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001687 // The immediately following instruction will add the sign-extended low half of the 32-bit
1688 // offset to `out` (e.g. lw, jialc, addiu).
Vladimir Markoaad75c62016-10-03 08:46:48 +00001689}
1690
Alexey Frunze627c1a02017-01-30 19:28:14 -08001691CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1692 const DexFile& dex_file,
1693 dex::StringIndex dex_index,
1694 Handle<mirror::String> handle) {
1695 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1696 reinterpret_cast64<uint64_t>(handle.GetReference()));
1697 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1698 return &jit_string_patches_.back();
1699}
1700
1701CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1702 const DexFile& dex_file,
1703 dex::TypeIndex dex_index,
1704 Handle<mirror::Class> handle) {
1705 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1706 reinterpret_cast64<uint64_t>(handle.GetReference()));
1707 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1708 return &jit_class_patches_.back();
1709}
1710
1711void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1712 const uint8_t* roots_data,
1713 const CodeGeneratorMIPS::JitPatchInfo& info,
1714 uint64_t index_in_table) const {
1715 uint32_t literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1716 uintptr_t address =
1717 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1718 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1719 // lui reg, addr32_high
1720 DCHECK_EQ(code[literal_offset + 0], 0x34);
1721 DCHECK_EQ(code[literal_offset + 1], 0x12);
1722 DCHECK_EQ((code[literal_offset + 2] & 0xE0), 0x00);
1723 DCHECK_EQ(code[literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001724 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001725 DCHECK_EQ(code[literal_offset + 4], 0x78);
1726 DCHECK_EQ(code[literal_offset + 5], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001727 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001728 // lui reg, addr32_high
1729 code[literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1730 code[literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001731 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001732 code[literal_offset + 4] = static_cast<uint8_t>(addr32 >> 0);
1733 code[literal_offset + 5] = static_cast<uint8_t>(addr32 >> 8);
1734}
1735
1736void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1737 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001738 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1739 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001740 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001741 uint64_t index_in_table = it->second;
1742 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001743 }
1744 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001745 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1746 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001747 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001748 uint64_t index_in_table = it->second;
1749 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001750 }
1751}
1752
Goran Jakovljevice114da22016-12-26 14:21:43 +01001753void CodeGeneratorMIPS::MarkGCCard(Register object,
1754 Register value,
1755 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001756 MipsLabel done;
1757 Register card = AT;
1758 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001759 if (value_can_be_null) {
1760 __ Beqz(value, &done);
1761 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001762 __ LoadFromOffset(kLoadWord,
1763 card,
1764 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001765 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001766 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1767 __ Addu(temp, card, temp);
1768 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001769 if (value_can_be_null) {
1770 __ Bind(&done);
1771 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001772}
1773
David Brazdil58282f42016-01-14 12:45:10 +00001774void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001775 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1776 blocked_core_registers_[ZERO] = true;
1777 blocked_core_registers_[K0] = true;
1778 blocked_core_registers_[K1] = true;
1779 blocked_core_registers_[GP] = true;
1780 blocked_core_registers_[SP] = true;
1781 blocked_core_registers_[RA] = true;
1782
1783 // AT and TMP(T8) are used as temporary/scratch registers
1784 // (similar to how AT is used by MIPS assemblers).
1785 blocked_core_registers_[AT] = true;
1786 blocked_core_registers_[TMP] = true;
1787 blocked_fpu_registers_[FTMP] = true;
1788
1789 // Reserve suspend and thread registers.
1790 blocked_core_registers_[S0] = true;
1791 blocked_core_registers_[TR] = true;
1792
1793 // Reserve T9 for function calls
1794 blocked_core_registers_[T9] = true;
1795
1796 // Reserve odd-numbered FPU registers.
1797 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1798 blocked_fpu_registers_[i] = true;
1799 }
1800
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001801 if (GetGraph()->IsDebuggable()) {
1802 // Stubs do not save callee-save floating point registers. If the graph
1803 // is debuggable, we need to deal with these registers differently. For
1804 // now, just block them.
1805 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1806 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1807 }
1808 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001809}
1810
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001811size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1812 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1813 return kMipsWordSize;
1814}
1815
1816size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1817 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1818 return kMipsWordSize;
1819}
1820
1821size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1822 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1823 return kMipsDoublewordSize;
1824}
1825
1826size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1827 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1828 return kMipsDoublewordSize;
1829}
1830
1831void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001832 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001833}
1834
1835void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001836 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001837}
1838
Serban Constantinescufca16662016-07-14 09:21:59 +01001839constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1840
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001841void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1842 HInstruction* instruction,
1843 uint32_t dex_pc,
1844 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001845 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001846 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1847 IsDirectEntrypoint(entrypoint));
1848 if (EntrypointRequiresStackMap(entrypoint)) {
1849 RecordPcInfo(instruction, dex_pc, slow_path);
1850 }
1851}
1852
1853void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1854 HInstruction* instruction,
1855 SlowPathCode* slow_path,
1856 bool direct) {
1857 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1858 GenerateInvokeRuntime(entry_point_offset, direct);
1859}
1860
1861void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001862 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001863 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001864 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001865 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001866 // Reserve argument space on stack (for $a0-$a3) for
1867 // entrypoints that directly reference native implementations.
1868 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001869 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001870 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001871 } else {
1872 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001873 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001874 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001875}
1876
1877void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1878 Register class_reg) {
1879 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1880 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1881 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1882 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1883 __ Sync(0);
1884 __ Bind(slow_path->GetExitLabel());
1885}
1886
1887void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1888 __ Sync(0); // Only stype 0 is supported.
1889}
1890
1891void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1892 HBasicBlock* successor) {
1893 SuspendCheckSlowPathMIPS* slow_path =
1894 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1895 codegen_->AddSlowPath(slow_path);
1896
1897 __ LoadFromOffset(kLoadUnsignedHalfword,
1898 TMP,
1899 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001900 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001901 if (successor == nullptr) {
1902 __ Bnez(TMP, slow_path->GetEntryLabel());
1903 __ Bind(slow_path->GetReturnLabel());
1904 } else {
1905 __ Beqz(TMP, codegen_->GetLabelOf(successor));
1906 __ B(slow_path->GetEntryLabel());
1907 // slow_path will return to GetLabelOf(successor).
1908 }
1909}
1910
1911InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
1912 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001913 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001914 assembler_(codegen->GetAssembler()),
1915 codegen_(codegen) {}
1916
1917void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
1918 DCHECK_EQ(instruction->InputCount(), 2U);
1919 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1920 Primitive::Type type = instruction->GetResultType();
1921 switch (type) {
1922 case Primitive::kPrimInt: {
1923 locations->SetInAt(0, Location::RequiresRegister());
1924 HInstruction* right = instruction->InputAt(1);
1925 bool can_use_imm = false;
1926 if (right->IsConstant()) {
1927 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
1928 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1929 can_use_imm = IsUint<16>(imm);
1930 } else if (instruction->IsAdd()) {
1931 can_use_imm = IsInt<16>(imm);
1932 } else {
1933 DCHECK(instruction->IsSub());
1934 can_use_imm = IsInt<16>(-imm);
1935 }
1936 }
1937 if (can_use_imm)
1938 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1939 else
1940 locations->SetInAt(1, Location::RequiresRegister());
1941 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1942 break;
1943 }
1944
1945 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001946 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001947 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1948 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001949 break;
1950 }
1951
1952 case Primitive::kPrimFloat:
1953 case Primitive::kPrimDouble:
1954 DCHECK(instruction->IsAdd() || instruction->IsSub());
1955 locations->SetInAt(0, Location::RequiresFpuRegister());
1956 locations->SetInAt(1, Location::RequiresFpuRegister());
1957 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1958 break;
1959
1960 default:
1961 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1962 }
1963}
1964
1965void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
1966 Primitive::Type type = instruction->GetType();
1967 LocationSummary* locations = instruction->GetLocations();
1968
1969 switch (type) {
1970 case Primitive::kPrimInt: {
1971 Register dst = locations->Out().AsRegister<Register>();
1972 Register lhs = locations->InAt(0).AsRegister<Register>();
1973 Location rhs_location = locations->InAt(1);
1974
1975 Register rhs_reg = ZERO;
1976 int32_t rhs_imm = 0;
1977 bool use_imm = rhs_location.IsConstant();
1978 if (use_imm) {
1979 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
1980 } else {
1981 rhs_reg = rhs_location.AsRegister<Register>();
1982 }
1983
1984 if (instruction->IsAnd()) {
1985 if (use_imm)
1986 __ Andi(dst, lhs, rhs_imm);
1987 else
1988 __ And(dst, lhs, rhs_reg);
1989 } else if (instruction->IsOr()) {
1990 if (use_imm)
1991 __ Ori(dst, lhs, rhs_imm);
1992 else
1993 __ Or(dst, lhs, rhs_reg);
1994 } else if (instruction->IsXor()) {
1995 if (use_imm)
1996 __ Xori(dst, lhs, rhs_imm);
1997 else
1998 __ Xor(dst, lhs, rhs_reg);
1999 } else if (instruction->IsAdd()) {
2000 if (use_imm)
2001 __ Addiu(dst, lhs, rhs_imm);
2002 else
2003 __ Addu(dst, lhs, rhs_reg);
2004 } else {
2005 DCHECK(instruction->IsSub());
2006 if (use_imm)
2007 __ Addiu(dst, lhs, -rhs_imm);
2008 else
2009 __ Subu(dst, lhs, rhs_reg);
2010 }
2011 break;
2012 }
2013
2014 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002015 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2016 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2017 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2018 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002019 Location rhs_location = locations->InAt(1);
2020 bool use_imm = rhs_location.IsConstant();
2021 if (!use_imm) {
2022 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2023 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2024 if (instruction->IsAnd()) {
2025 __ And(dst_low, lhs_low, rhs_low);
2026 __ And(dst_high, lhs_high, rhs_high);
2027 } else if (instruction->IsOr()) {
2028 __ Or(dst_low, lhs_low, rhs_low);
2029 __ Or(dst_high, lhs_high, rhs_high);
2030 } else if (instruction->IsXor()) {
2031 __ Xor(dst_low, lhs_low, rhs_low);
2032 __ Xor(dst_high, lhs_high, rhs_high);
2033 } else if (instruction->IsAdd()) {
2034 if (lhs_low == rhs_low) {
2035 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2036 __ Slt(TMP, lhs_low, ZERO);
2037 __ Addu(dst_low, lhs_low, rhs_low);
2038 } else {
2039 __ Addu(dst_low, lhs_low, rhs_low);
2040 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2041 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2042 }
2043 __ Addu(dst_high, lhs_high, rhs_high);
2044 __ Addu(dst_high, dst_high, TMP);
2045 } else {
2046 DCHECK(instruction->IsSub());
2047 __ Sltu(TMP, lhs_low, rhs_low);
2048 __ Subu(dst_low, lhs_low, rhs_low);
2049 __ Subu(dst_high, lhs_high, rhs_high);
2050 __ Subu(dst_high, dst_high, TMP);
2051 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002052 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002053 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2054 if (instruction->IsOr()) {
2055 uint32_t low = Low32Bits(value);
2056 uint32_t high = High32Bits(value);
2057 if (IsUint<16>(low)) {
2058 if (dst_low != lhs_low || low != 0) {
2059 __ Ori(dst_low, lhs_low, low);
2060 }
2061 } else {
2062 __ LoadConst32(TMP, low);
2063 __ Or(dst_low, lhs_low, TMP);
2064 }
2065 if (IsUint<16>(high)) {
2066 if (dst_high != lhs_high || high != 0) {
2067 __ Ori(dst_high, lhs_high, high);
2068 }
2069 } else {
2070 if (high != low) {
2071 __ LoadConst32(TMP, high);
2072 }
2073 __ Or(dst_high, lhs_high, TMP);
2074 }
2075 } else if (instruction->IsXor()) {
2076 uint32_t low = Low32Bits(value);
2077 uint32_t high = High32Bits(value);
2078 if (IsUint<16>(low)) {
2079 if (dst_low != lhs_low || low != 0) {
2080 __ Xori(dst_low, lhs_low, low);
2081 }
2082 } else {
2083 __ LoadConst32(TMP, low);
2084 __ Xor(dst_low, lhs_low, TMP);
2085 }
2086 if (IsUint<16>(high)) {
2087 if (dst_high != lhs_high || high != 0) {
2088 __ Xori(dst_high, lhs_high, high);
2089 }
2090 } else {
2091 if (high != low) {
2092 __ LoadConst32(TMP, high);
2093 }
2094 __ Xor(dst_high, lhs_high, TMP);
2095 }
2096 } else if (instruction->IsAnd()) {
2097 uint32_t low = Low32Bits(value);
2098 uint32_t high = High32Bits(value);
2099 if (IsUint<16>(low)) {
2100 __ Andi(dst_low, lhs_low, low);
2101 } else if (low != 0xFFFFFFFF) {
2102 __ LoadConst32(TMP, low);
2103 __ And(dst_low, lhs_low, TMP);
2104 } else if (dst_low != lhs_low) {
2105 __ Move(dst_low, lhs_low);
2106 }
2107 if (IsUint<16>(high)) {
2108 __ Andi(dst_high, lhs_high, high);
2109 } else if (high != 0xFFFFFFFF) {
2110 if (high != low) {
2111 __ LoadConst32(TMP, high);
2112 }
2113 __ And(dst_high, lhs_high, TMP);
2114 } else if (dst_high != lhs_high) {
2115 __ Move(dst_high, lhs_high);
2116 }
2117 } else {
2118 if (instruction->IsSub()) {
2119 value = -value;
2120 } else {
2121 DCHECK(instruction->IsAdd());
2122 }
2123 int32_t low = Low32Bits(value);
2124 int32_t high = High32Bits(value);
2125 if (IsInt<16>(low)) {
2126 if (dst_low != lhs_low || low != 0) {
2127 __ Addiu(dst_low, lhs_low, low);
2128 }
2129 if (low != 0) {
2130 __ Sltiu(AT, dst_low, low);
2131 }
2132 } else {
2133 __ LoadConst32(TMP, low);
2134 __ Addu(dst_low, lhs_low, TMP);
2135 __ Sltu(AT, dst_low, TMP);
2136 }
2137 if (IsInt<16>(high)) {
2138 if (dst_high != lhs_high || high != 0) {
2139 __ Addiu(dst_high, lhs_high, high);
2140 }
2141 } else {
2142 if (high != low) {
2143 __ LoadConst32(TMP, high);
2144 }
2145 __ Addu(dst_high, lhs_high, TMP);
2146 }
2147 if (low != 0) {
2148 __ Addu(dst_high, dst_high, AT);
2149 }
2150 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002151 }
2152 break;
2153 }
2154
2155 case Primitive::kPrimFloat:
2156 case Primitive::kPrimDouble: {
2157 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2158 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2159 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2160 if (instruction->IsAdd()) {
2161 if (type == Primitive::kPrimFloat) {
2162 __ AddS(dst, lhs, rhs);
2163 } else {
2164 __ AddD(dst, lhs, rhs);
2165 }
2166 } else {
2167 DCHECK(instruction->IsSub());
2168 if (type == Primitive::kPrimFloat) {
2169 __ SubS(dst, lhs, rhs);
2170 } else {
2171 __ SubD(dst, lhs, rhs);
2172 }
2173 }
2174 break;
2175 }
2176
2177 default:
2178 LOG(FATAL) << "Unexpected binary operation type " << type;
2179 }
2180}
2181
2182void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002183 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002184
2185 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2186 Primitive::Type type = instr->GetResultType();
2187 switch (type) {
2188 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002189 locations->SetInAt(0, Location::RequiresRegister());
2190 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2191 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2192 break;
2193 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002194 locations->SetInAt(0, Location::RequiresRegister());
2195 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2196 locations->SetOut(Location::RequiresRegister());
2197 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002198 default:
2199 LOG(FATAL) << "Unexpected shift type " << type;
2200 }
2201}
2202
2203static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2204
2205void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002206 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002207 LocationSummary* locations = instr->GetLocations();
2208 Primitive::Type type = instr->GetType();
2209
2210 Location rhs_location = locations->InAt(1);
2211 bool use_imm = rhs_location.IsConstant();
2212 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2213 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002214 const uint32_t shift_mask =
2215 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002216 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002217 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2218 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002219
2220 switch (type) {
2221 case Primitive::kPrimInt: {
2222 Register dst = locations->Out().AsRegister<Register>();
2223 Register lhs = locations->InAt(0).AsRegister<Register>();
2224 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002225 if (shift_value == 0) {
2226 if (dst != lhs) {
2227 __ Move(dst, lhs);
2228 }
2229 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002230 __ Sll(dst, lhs, shift_value);
2231 } else if (instr->IsShr()) {
2232 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002233 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002234 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002235 } else {
2236 if (has_ins_rotr) {
2237 __ Rotr(dst, lhs, shift_value);
2238 } else {
2239 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2240 __ Srl(dst, lhs, shift_value);
2241 __ Or(dst, dst, TMP);
2242 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002243 }
2244 } else {
2245 if (instr->IsShl()) {
2246 __ Sllv(dst, lhs, rhs_reg);
2247 } else if (instr->IsShr()) {
2248 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002249 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002250 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002251 } else {
2252 if (has_ins_rotr) {
2253 __ Rotrv(dst, lhs, rhs_reg);
2254 } else {
2255 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002256 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2257 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2258 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2259 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2260 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002261 __ Sllv(TMP, lhs, TMP);
2262 __ Srlv(dst, lhs, rhs_reg);
2263 __ Or(dst, dst, TMP);
2264 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002265 }
2266 }
2267 break;
2268 }
2269
2270 case Primitive::kPrimLong: {
2271 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2272 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2273 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2274 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2275 if (use_imm) {
2276 if (shift_value == 0) {
2277 codegen_->Move64(locations->Out(), locations->InAt(0));
2278 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002279 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002280 if (instr->IsShl()) {
2281 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2282 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2283 __ Sll(dst_low, lhs_low, shift_value);
2284 } else if (instr->IsShr()) {
2285 __ Srl(dst_low, lhs_low, shift_value);
2286 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2287 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002288 } else if (instr->IsUShr()) {
2289 __ Srl(dst_low, lhs_low, shift_value);
2290 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2291 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002292 } else {
2293 __ Srl(dst_low, lhs_low, shift_value);
2294 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2295 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002296 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002297 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002298 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002299 if (instr->IsShl()) {
2300 __ Sll(dst_low, lhs_low, shift_value);
2301 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2302 __ Sll(dst_high, lhs_high, shift_value);
2303 __ Or(dst_high, dst_high, TMP);
2304 } else if (instr->IsShr()) {
2305 __ Sra(dst_high, lhs_high, shift_value);
2306 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2307 __ Srl(dst_low, lhs_low, shift_value);
2308 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002309 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002310 __ Srl(dst_high, lhs_high, shift_value);
2311 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2312 __ Srl(dst_low, lhs_low, shift_value);
2313 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002314 } else {
2315 __ Srl(TMP, lhs_low, shift_value);
2316 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2317 __ Or(dst_low, dst_low, TMP);
2318 __ Srl(TMP, lhs_high, shift_value);
2319 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2320 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002321 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002322 }
2323 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002324 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002325 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002326 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002327 __ Move(dst_low, ZERO);
2328 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002329 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002330 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002331 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002332 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002333 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002334 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002335 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002336 // 64-bit rotation by 32 is just a swap.
2337 __ Move(dst_low, lhs_high);
2338 __ Move(dst_high, lhs_low);
2339 } else {
2340 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002341 __ Srl(dst_low, lhs_high, shift_value_high);
2342 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2343 __ Srl(dst_high, lhs_low, shift_value_high);
2344 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002345 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002346 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2347 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002348 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002349 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2350 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002351 __ Or(dst_high, dst_high, TMP);
2352 }
2353 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002354 }
2355 }
2356 } else {
2357 MipsLabel done;
2358 if (instr->IsShl()) {
2359 __ Sllv(dst_low, lhs_low, rhs_reg);
2360 __ Nor(AT, ZERO, rhs_reg);
2361 __ Srl(TMP, lhs_low, 1);
2362 __ Srlv(TMP, TMP, AT);
2363 __ Sllv(dst_high, lhs_high, rhs_reg);
2364 __ Or(dst_high, dst_high, TMP);
2365 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2366 __ Beqz(TMP, &done);
2367 __ Move(dst_high, dst_low);
2368 __ Move(dst_low, ZERO);
2369 } else if (instr->IsShr()) {
2370 __ Srav(dst_high, lhs_high, rhs_reg);
2371 __ Nor(AT, ZERO, rhs_reg);
2372 __ Sll(TMP, lhs_high, 1);
2373 __ Sllv(TMP, TMP, AT);
2374 __ Srlv(dst_low, lhs_low, rhs_reg);
2375 __ Or(dst_low, dst_low, TMP);
2376 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2377 __ Beqz(TMP, &done);
2378 __ Move(dst_low, dst_high);
2379 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002380 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002381 __ Srlv(dst_high, lhs_high, rhs_reg);
2382 __ Nor(AT, ZERO, rhs_reg);
2383 __ Sll(TMP, lhs_high, 1);
2384 __ Sllv(TMP, TMP, AT);
2385 __ Srlv(dst_low, lhs_low, rhs_reg);
2386 __ Or(dst_low, dst_low, TMP);
2387 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2388 __ Beqz(TMP, &done);
2389 __ Move(dst_low, dst_high);
2390 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002391 } else {
2392 __ Nor(AT, ZERO, rhs_reg);
2393 __ Srlv(TMP, lhs_low, rhs_reg);
2394 __ Sll(dst_low, lhs_high, 1);
2395 __ Sllv(dst_low, dst_low, AT);
2396 __ Or(dst_low, dst_low, TMP);
2397 __ Srlv(TMP, lhs_high, rhs_reg);
2398 __ Sll(dst_high, lhs_low, 1);
2399 __ Sllv(dst_high, dst_high, AT);
2400 __ Or(dst_high, dst_high, TMP);
2401 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2402 __ Beqz(TMP, &done);
2403 __ Move(TMP, dst_high);
2404 __ Move(dst_high, dst_low);
2405 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002406 }
2407 __ Bind(&done);
2408 }
2409 break;
2410 }
2411
2412 default:
2413 LOG(FATAL) << "Unexpected shift operation type " << type;
2414 }
2415}
2416
2417void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2418 HandleBinaryOp(instruction);
2419}
2420
2421void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2422 HandleBinaryOp(instruction);
2423}
2424
2425void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2426 HandleBinaryOp(instruction);
2427}
2428
2429void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2430 HandleBinaryOp(instruction);
2431}
2432
2433void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002434 Primitive::Type type = instruction->GetType();
2435 bool object_array_get_with_read_barrier =
2436 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002437 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002438 new (GetGraph()->GetArena()) LocationSummary(instruction,
2439 object_array_get_with_read_barrier
2440 ? LocationSummary::kCallOnSlowPath
2441 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002442 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2443 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2444 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002445 locations->SetInAt(0, Location::RequiresRegister());
2446 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002447 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002448 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2449 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002450 // The output overlaps in the case of an object array get with
2451 // read barriers enabled: we do not want the move to overwrite the
2452 // array's location, as we need it to emit the read barrier.
2453 locations->SetOut(Location::RequiresRegister(),
2454 object_array_get_with_read_barrier
2455 ? Location::kOutputOverlap
2456 : Location::kNoOutputOverlap);
2457 }
2458 // We need a temporary register for the read barrier marking slow
2459 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2460 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2461 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002462 }
2463}
2464
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002465static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2466 auto null_checker = [codegen, instruction]() {
2467 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002468 };
2469 return null_checker;
2470}
2471
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002472void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2473 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002474 Location obj_loc = locations->InAt(0);
2475 Register obj = obj_loc.AsRegister<Register>();
2476 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002477 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002478 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002479 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002480
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002481 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002482 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2483 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002484 switch (type) {
2485 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002486 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002487 if (index.IsConstant()) {
2488 size_t offset =
2489 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002490 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002491 } else {
2492 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002493 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002494 }
2495 break;
2496 }
2497
2498 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002499 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002500 if (index.IsConstant()) {
2501 size_t offset =
2502 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002503 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002504 } else {
2505 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002506 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002507 }
2508 break;
2509 }
2510
2511 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002512 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002513 if (index.IsConstant()) {
2514 size_t offset =
2515 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002516 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002517 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002518 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002519 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002520 }
2521 break;
2522 }
2523
2524 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002525 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002526 if (maybe_compressed_char_at) {
2527 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2528 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2529 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2530 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2531 "Expecting 0=compressed, 1=uncompressed");
2532 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002533 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002534 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2535 if (maybe_compressed_char_at) {
2536 MipsLabel uncompressed_load, done;
2537 __ Bnez(TMP, &uncompressed_load);
2538 __ LoadFromOffset(kLoadUnsignedByte,
2539 out,
2540 obj,
2541 data_offset + (const_index << TIMES_1));
2542 __ B(&done);
2543 __ Bind(&uncompressed_load);
2544 __ LoadFromOffset(kLoadUnsignedHalfword,
2545 out,
2546 obj,
2547 data_offset + (const_index << TIMES_2));
2548 __ Bind(&done);
2549 } else {
2550 __ LoadFromOffset(kLoadUnsignedHalfword,
2551 out,
2552 obj,
2553 data_offset + (const_index << TIMES_2),
2554 null_checker);
2555 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002556 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002557 Register index_reg = index.AsRegister<Register>();
2558 if (maybe_compressed_char_at) {
2559 MipsLabel uncompressed_load, done;
2560 __ Bnez(TMP, &uncompressed_load);
2561 __ Addu(TMP, obj, index_reg);
2562 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2563 __ B(&done);
2564 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002565 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002566 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2567 __ Bind(&done);
2568 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002569 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002570 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2571 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002572 }
2573 break;
2574 }
2575
Alexey Frunze15958152017-02-09 19:08:30 -08002576 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002577 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002578 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002579 if (index.IsConstant()) {
2580 size_t offset =
2581 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002582 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002583 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002584 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002585 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002586 }
2587 break;
2588 }
2589
Alexey Frunze15958152017-02-09 19:08:30 -08002590 case Primitive::kPrimNot: {
2591 static_assert(
2592 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2593 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2594 // /* HeapReference<Object> */ out =
2595 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2596 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2597 Location temp = locations->GetTemp(0);
2598 // Note that a potential implicit null check is handled in this
2599 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
2600 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2601 out_loc,
2602 obj,
2603 data_offset,
2604 index,
2605 temp,
2606 /* needs_null_check */ true);
2607 } else {
2608 Register out = out_loc.AsRegister<Register>();
2609 if (index.IsConstant()) {
2610 size_t offset =
2611 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2612 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2613 // If read barriers are enabled, emit read barriers other than
2614 // Baker's using a slow path (and also unpoison the loaded
2615 // reference, if heap poisoning is enabled).
2616 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2617 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002618 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002619 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2620 // If read barriers are enabled, emit read barriers other than
2621 // Baker's using a slow path (and also unpoison the loaded
2622 // reference, if heap poisoning is enabled).
2623 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2624 out_loc,
2625 out_loc,
2626 obj_loc,
2627 data_offset,
2628 index);
2629 }
2630 }
2631 break;
2632 }
2633
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002634 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002635 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002636 if (index.IsConstant()) {
2637 size_t offset =
2638 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002639 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002640 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002641 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002642 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002643 }
2644 break;
2645 }
2646
2647 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002648 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002649 if (index.IsConstant()) {
2650 size_t offset =
2651 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002652 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002653 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002654 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002655 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002656 }
2657 break;
2658 }
2659
2660 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002661 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002662 if (index.IsConstant()) {
2663 size_t offset =
2664 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002665 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002666 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002667 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002668 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002669 }
2670 break;
2671 }
2672
2673 case Primitive::kPrimVoid:
2674 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2675 UNREACHABLE();
2676 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002677}
2678
2679void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2680 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2681 locations->SetInAt(0, Location::RequiresRegister());
2682 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2683}
2684
2685void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2686 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002687 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002688 Register obj = locations->InAt(0).AsRegister<Register>();
2689 Register out = locations->Out().AsRegister<Register>();
2690 __ LoadFromOffset(kLoadWord, out, obj, offset);
2691 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002692 // Mask out compression flag from String's array length.
2693 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2694 __ Srl(out, out, 1u);
2695 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002696}
2697
Alexey Frunzef58b2482016-09-02 22:14:06 -07002698Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2699 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2700 ? Location::ConstantLocation(instruction->AsConstant())
2701 : Location::RequiresRegister();
2702}
2703
2704Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2705 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2706 // We can store a non-zero float or double constant without first loading it into the FPU,
2707 // but we should only prefer this if the constant has a single use.
2708 if (instruction->IsConstant() &&
2709 (instruction->AsConstant()->IsZeroBitPattern() ||
2710 instruction->GetUses().HasExactlyOneElement())) {
2711 return Location::ConstantLocation(instruction->AsConstant());
2712 // Otherwise fall through and require an FPU register for the constant.
2713 }
2714 return Location::RequiresFpuRegister();
2715}
2716
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002717void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002718 Primitive::Type value_type = instruction->GetComponentType();
2719
2720 bool needs_write_barrier =
2721 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2722 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2723
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002724 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2725 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002726 may_need_runtime_call_for_type_check ?
2727 LocationSummary::kCallOnSlowPath :
2728 LocationSummary::kNoCall);
2729
2730 locations->SetInAt(0, Location::RequiresRegister());
2731 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2732 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2733 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002734 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002735 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2736 }
2737 if (needs_write_barrier) {
2738 // Temporary register for the write barrier.
2739 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002740 }
2741}
2742
2743void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2744 LocationSummary* locations = instruction->GetLocations();
2745 Register obj = locations->InAt(0).AsRegister<Register>();
2746 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002747 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002748 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002749 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002750 bool needs_write_barrier =
2751 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002752 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002753 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002754
2755 switch (value_type) {
2756 case Primitive::kPrimBoolean:
2757 case Primitive::kPrimByte: {
2758 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002759 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002760 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002761 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002762 __ Addu(base_reg, obj, index.AsRegister<Register>());
2763 }
2764 if (value_location.IsConstant()) {
2765 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2766 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2767 } else {
2768 Register value = value_location.AsRegister<Register>();
2769 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002770 }
2771 break;
2772 }
2773
2774 case Primitive::kPrimShort:
2775 case Primitive::kPrimChar: {
2776 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002777 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002778 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002779 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002780 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002781 }
2782 if (value_location.IsConstant()) {
2783 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2784 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2785 } else {
2786 Register value = value_location.AsRegister<Register>();
2787 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002788 }
2789 break;
2790 }
2791
Alexey Frunze15958152017-02-09 19:08:30 -08002792 case Primitive::kPrimInt: {
2793 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2794 if (index.IsConstant()) {
2795 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2796 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002797 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002798 }
2799 if (value_location.IsConstant()) {
2800 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2801 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2802 } else {
2803 Register value = value_location.AsRegister<Register>();
2804 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2805 }
2806 break;
2807 }
2808
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002809 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002810 if (value_location.IsConstant()) {
2811 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002812 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002813 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002814 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002815 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002816 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002817 }
Alexey Frunze15958152017-02-09 19:08:30 -08002818 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2819 DCHECK_EQ(value, 0);
2820 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2821 DCHECK(!needs_write_barrier);
2822 DCHECK(!may_need_runtime_call_for_type_check);
2823 break;
2824 }
2825
2826 DCHECK(needs_write_barrier);
2827 Register value = value_location.AsRegister<Register>();
2828 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2829 Register temp2 = TMP; // Doesn't need to survive slow path.
2830 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2831 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2832 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2833 MipsLabel done;
2834 SlowPathCodeMIPS* slow_path = nullptr;
2835
2836 if (may_need_runtime_call_for_type_check) {
2837 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2838 codegen_->AddSlowPath(slow_path);
2839 if (instruction->GetValueCanBeNull()) {
2840 MipsLabel non_zero;
2841 __ Bnez(value, &non_zero);
2842 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2843 if (index.IsConstant()) {
2844 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002845 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002846 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002847 }
Alexey Frunze15958152017-02-09 19:08:30 -08002848 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2849 __ B(&done);
2850 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002851 }
Alexey Frunze15958152017-02-09 19:08:30 -08002852
2853 // Note that when read barriers are enabled, the type checks
2854 // are performed without read barriers. This is fine, even in
2855 // the case where a class object is in the from-space after
2856 // the flip, as a comparison involving such a type would not
2857 // produce a false positive; it may of course produce a false
2858 // negative, in which case we would take the ArraySet slow
2859 // path.
2860
2861 // /* HeapReference<Class> */ temp1 = obj->klass_
2862 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2863 __ MaybeUnpoisonHeapReference(temp1);
2864
2865 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2866 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2867 // /* HeapReference<Class> */ temp2 = value->klass_
2868 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2869 // If heap poisoning is enabled, no need to unpoison `temp1`
2870 // nor `temp2`, as we are comparing two poisoned references.
2871
2872 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2873 MipsLabel do_put;
2874 __ Beq(temp1, temp2, &do_put);
2875 // If heap poisoning is enabled, the `temp1` reference has
2876 // not been unpoisoned yet; unpoison it now.
2877 __ MaybeUnpoisonHeapReference(temp1);
2878
2879 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2880 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
2881 // If heap poisoning is enabled, no need to unpoison
2882 // `temp1`, as we are comparing against null below.
2883 __ Bnez(temp1, slow_path->GetEntryLabel());
2884 __ Bind(&do_put);
2885 } else {
2886 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
2887 }
2888 }
2889
2890 Register source = value;
2891 if (kPoisonHeapReferences) {
2892 // Note that in the case where `value` is a null reference,
2893 // we do not enter this block, as a null reference does not
2894 // need poisoning.
2895 __ Move(temp1, value);
2896 __ PoisonHeapReference(temp1);
2897 source = temp1;
2898 }
2899
2900 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2901 if (index.IsConstant()) {
2902 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002903 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002904 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002905 }
2906 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2907
2908 if (!may_need_runtime_call_for_type_check) {
2909 codegen_->MaybeRecordImplicitNullCheck(instruction);
2910 }
2911
2912 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2913
2914 if (done.IsLinked()) {
2915 __ Bind(&done);
2916 }
2917
2918 if (slow_path != nullptr) {
2919 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002920 }
2921 break;
2922 }
2923
2924 case Primitive::kPrimLong: {
2925 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002926 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002927 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002928 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002929 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002930 }
2931 if (value_location.IsConstant()) {
2932 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2933 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2934 } else {
2935 Register value = value_location.AsRegisterPairLow<Register>();
2936 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002937 }
2938 break;
2939 }
2940
2941 case Primitive::kPrimFloat: {
2942 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002943 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002944 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002945 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002946 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002947 }
2948 if (value_location.IsConstant()) {
2949 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2950 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2951 } else {
2952 FRegister value = value_location.AsFpuRegister<FRegister>();
2953 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002954 }
2955 break;
2956 }
2957
2958 case Primitive::kPrimDouble: {
2959 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002960 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002961 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002962 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002963 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002964 }
2965 if (value_location.IsConstant()) {
2966 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2967 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2968 } else {
2969 FRegister value = value_location.AsFpuRegister<FRegister>();
2970 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002971 }
2972 break;
2973 }
2974
2975 case Primitive::kPrimVoid:
2976 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2977 UNREACHABLE();
2978 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002979}
2980
2981void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002982 RegisterSet caller_saves = RegisterSet::Empty();
2983 InvokeRuntimeCallingConvention calling_convention;
2984 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2985 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2986 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002987 locations->SetInAt(0, Location::RequiresRegister());
2988 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002989}
2990
2991void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
2992 LocationSummary* locations = instruction->GetLocations();
2993 BoundsCheckSlowPathMIPS* slow_path =
2994 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
2995 codegen_->AddSlowPath(slow_path);
2996
2997 Register index = locations->InAt(0).AsRegister<Register>();
2998 Register length = locations->InAt(1).AsRegister<Register>();
2999
3000 // length is limited by the maximum positive signed 32-bit integer.
3001 // Unsigned comparison of length and index checks for index < 0
3002 // and for length <= index simultaneously.
3003 __ Bgeu(index, length, slow_path->GetEntryLabel());
3004}
3005
Alexey Frunze15958152017-02-09 19:08:30 -08003006// Temp is used for read barrier.
3007static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3008 if (kEmitCompilerReadBarrier &&
3009 (kUseBakerReadBarrier ||
3010 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3011 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3012 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3013 return 1;
3014 }
3015 return 0;
3016}
3017
3018// Extra temp is used for read barrier.
3019static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3020 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3021}
3022
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003023void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003024 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3025 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3026
3027 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3028 switch (type_check_kind) {
3029 case TypeCheckKind::kExactCheck:
3030 case TypeCheckKind::kAbstractClassCheck:
3031 case TypeCheckKind::kClassHierarchyCheck:
3032 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003033 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003034 ? LocationSummary::kCallOnSlowPath
3035 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3036 break;
3037 case TypeCheckKind::kArrayCheck:
3038 case TypeCheckKind::kUnresolvedCheck:
3039 case TypeCheckKind::kInterfaceCheck:
3040 call_kind = LocationSummary::kCallOnSlowPath;
3041 break;
3042 }
3043
3044 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003045 locations->SetInAt(0, Location::RequiresRegister());
3046 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003047 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003048}
3049
3050void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003051 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003052 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003053 Location obj_loc = locations->InAt(0);
3054 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003055 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003056 Location temp_loc = locations->GetTemp(0);
3057 Register temp = temp_loc.AsRegister<Register>();
3058 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3059 DCHECK_LE(num_temps, 2u);
3060 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003061 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3062 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3063 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3064 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3065 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3066 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3067 const uint32_t object_array_data_offset =
3068 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3069 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003070
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003071 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3072 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3073 // read barriers is done for performance and code size reasons.
3074 bool is_type_check_slow_path_fatal = false;
3075 if (!kEmitCompilerReadBarrier) {
3076 is_type_check_slow_path_fatal =
3077 (type_check_kind == TypeCheckKind::kExactCheck ||
3078 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3079 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3080 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3081 !instruction->CanThrowIntoCatchBlock();
3082 }
3083 SlowPathCodeMIPS* slow_path =
3084 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3085 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003086 codegen_->AddSlowPath(slow_path);
3087
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003088 // Avoid this check if we know `obj` is not null.
3089 if (instruction->MustDoNullCheck()) {
3090 __ Beqz(obj, &done);
3091 }
3092
3093 switch (type_check_kind) {
3094 case TypeCheckKind::kExactCheck:
3095 case TypeCheckKind::kArrayCheck: {
3096 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003097 GenerateReferenceLoadTwoRegisters(instruction,
3098 temp_loc,
3099 obj_loc,
3100 class_offset,
3101 maybe_temp2_loc,
3102 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003103 // Jump to slow path for throwing the exception or doing a
3104 // more involved array check.
3105 __ Bne(temp, cls, slow_path->GetEntryLabel());
3106 break;
3107 }
3108
3109 case TypeCheckKind::kAbstractClassCheck: {
3110 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003111 GenerateReferenceLoadTwoRegisters(instruction,
3112 temp_loc,
3113 obj_loc,
3114 class_offset,
3115 maybe_temp2_loc,
3116 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003117 // If the class is abstract, we eagerly fetch the super class of the
3118 // object to avoid doing a comparison we know will fail.
3119 MipsLabel loop;
3120 __ Bind(&loop);
3121 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003122 GenerateReferenceLoadOneRegister(instruction,
3123 temp_loc,
3124 super_offset,
3125 maybe_temp2_loc,
3126 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003127 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3128 // exception.
3129 __ Beqz(temp, slow_path->GetEntryLabel());
3130 // Otherwise, compare the classes.
3131 __ Bne(temp, cls, &loop);
3132 break;
3133 }
3134
3135 case TypeCheckKind::kClassHierarchyCheck: {
3136 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003137 GenerateReferenceLoadTwoRegisters(instruction,
3138 temp_loc,
3139 obj_loc,
3140 class_offset,
3141 maybe_temp2_loc,
3142 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003143 // Walk over the class hierarchy to find a match.
3144 MipsLabel loop;
3145 __ Bind(&loop);
3146 __ Beq(temp, cls, &done);
3147 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003148 GenerateReferenceLoadOneRegister(instruction,
3149 temp_loc,
3150 super_offset,
3151 maybe_temp2_loc,
3152 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003153 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3154 // exception. Otherwise, jump to the beginning of the loop.
3155 __ Bnez(temp, &loop);
3156 __ B(slow_path->GetEntryLabel());
3157 break;
3158 }
3159
3160 case TypeCheckKind::kArrayObjectCheck: {
3161 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003162 GenerateReferenceLoadTwoRegisters(instruction,
3163 temp_loc,
3164 obj_loc,
3165 class_offset,
3166 maybe_temp2_loc,
3167 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003168 // Do an exact check.
3169 __ Beq(temp, cls, &done);
3170 // Otherwise, we need to check that the object's class is a non-primitive array.
3171 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003172 GenerateReferenceLoadOneRegister(instruction,
3173 temp_loc,
3174 component_offset,
3175 maybe_temp2_loc,
3176 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003177 // If the component type is null, jump to the slow path to throw the exception.
3178 __ Beqz(temp, slow_path->GetEntryLabel());
3179 // Otherwise, the object is indeed an array, further check that this component
3180 // type is not a primitive type.
3181 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3182 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3183 __ Bnez(temp, slow_path->GetEntryLabel());
3184 break;
3185 }
3186
3187 case TypeCheckKind::kUnresolvedCheck:
3188 // We always go into the type check slow path for the unresolved check case.
3189 // We cannot directly call the CheckCast runtime entry point
3190 // without resorting to a type checking slow path here (i.e. by
3191 // calling InvokeRuntime directly), as it would require to
3192 // assign fixed registers for the inputs of this HInstanceOf
3193 // instruction (following the runtime calling convention), which
3194 // might be cluttered by the potential first read barrier
3195 // emission at the beginning of this method.
3196 __ B(slow_path->GetEntryLabel());
3197 break;
3198
3199 case TypeCheckKind::kInterfaceCheck: {
3200 // Avoid read barriers to improve performance of the fast path. We can not get false
3201 // positives by doing this.
3202 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003203 GenerateReferenceLoadTwoRegisters(instruction,
3204 temp_loc,
3205 obj_loc,
3206 class_offset,
3207 maybe_temp2_loc,
3208 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003209 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003210 GenerateReferenceLoadTwoRegisters(instruction,
3211 temp_loc,
3212 temp_loc,
3213 iftable_offset,
3214 maybe_temp2_loc,
3215 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003216 // Iftable is never null.
3217 __ Lw(TMP, temp, array_length_offset);
3218 // Loop through the iftable and check if any class matches.
3219 MipsLabel loop;
3220 __ Bind(&loop);
3221 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3222 __ Beqz(TMP, slow_path->GetEntryLabel());
3223 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3224 __ MaybeUnpoisonHeapReference(AT);
3225 // Go to next interface.
3226 __ Addiu(TMP, TMP, -2);
3227 // Compare the classes and continue the loop if they do not match.
3228 __ Bne(AT, cls, &loop);
3229 break;
3230 }
3231 }
3232
3233 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003234 __ Bind(slow_path->GetExitLabel());
3235}
3236
3237void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3238 LocationSummary* locations =
3239 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3240 locations->SetInAt(0, Location::RequiresRegister());
3241 if (check->HasUses()) {
3242 locations->SetOut(Location::SameAsFirstInput());
3243 }
3244}
3245
3246void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3247 // We assume the class is not null.
3248 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3249 check->GetLoadClass(),
3250 check,
3251 check->GetDexPc(),
3252 true);
3253 codegen_->AddSlowPath(slow_path);
3254 GenerateClassInitializationCheck(slow_path,
3255 check->GetLocations()->InAt(0).AsRegister<Register>());
3256}
3257
3258void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3259 Primitive::Type in_type = compare->InputAt(0)->GetType();
3260
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003261 LocationSummary* locations =
3262 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003263
3264 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003265 case Primitive::kPrimBoolean:
3266 case Primitive::kPrimByte:
3267 case Primitive::kPrimShort:
3268 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003269 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003270 locations->SetInAt(0, Location::RequiresRegister());
3271 locations->SetInAt(1, Location::RequiresRegister());
3272 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3273 break;
3274
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003275 case Primitive::kPrimLong:
3276 locations->SetInAt(0, Location::RequiresRegister());
3277 locations->SetInAt(1, Location::RequiresRegister());
3278 // Output overlaps because it is written before doing the low comparison.
3279 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3280 break;
3281
3282 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003283 case Primitive::kPrimDouble:
3284 locations->SetInAt(0, Location::RequiresFpuRegister());
3285 locations->SetInAt(1, Location::RequiresFpuRegister());
3286 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003287 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003288
3289 default:
3290 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3291 }
3292}
3293
3294void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3295 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003296 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003297 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003298 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003299
3300 // 0 if: left == right
3301 // 1 if: left > right
3302 // -1 if: left < right
3303 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003304 case Primitive::kPrimBoolean:
3305 case Primitive::kPrimByte:
3306 case Primitive::kPrimShort:
3307 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003308 case Primitive::kPrimInt: {
3309 Register lhs = locations->InAt(0).AsRegister<Register>();
3310 Register rhs = locations->InAt(1).AsRegister<Register>();
3311 __ Slt(TMP, lhs, rhs);
3312 __ Slt(res, rhs, lhs);
3313 __ Subu(res, res, TMP);
3314 break;
3315 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003316 case Primitive::kPrimLong: {
3317 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003318 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3319 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3320 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3321 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3322 // TODO: more efficient (direct) comparison with a constant.
3323 __ Slt(TMP, lhs_high, rhs_high);
3324 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3325 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3326 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3327 __ Sltu(TMP, lhs_low, rhs_low);
3328 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3329 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3330 __ Bind(&done);
3331 break;
3332 }
3333
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003334 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003335 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003336 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3337 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3338 MipsLabel done;
3339 if (isR6) {
3340 __ CmpEqS(FTMP, lhs, rhs);
3341 __ LoadConst32(res, 0);
3342 __ Bc1nez(FTMP, &done);
3343 if (gt_bias) {
3344 __ CmpLtS(FTMP, lhs, rhs);
3345 __ LoadConst32(res, -1);
3346 __ Bc1nez(FTMP, &done);
3347 __ LoadConst32(res, 1);
3348 } else {
3349 __ CmpLtS(FTMP, rhs, lhs);
3350 __ LoadConst32(res, 1);
3351 __ Bc1nez(FTMP, &done);
3352 __ LoadConst32(res, -1);
3353 }
3354 } else {
3355 if (gt_bias) {
3356 __ ColtS(0, lhs, rhs);
3357 __ LoadConst32(res, -1);
3358 __ Bc1t(0, &done);
3359 __ CeqS(0, lhs, rhs);
3360 __ LoadConst32(res, 1);
3361 __ Movt(res, ZERO, 0);
3362 } else {
3363 __ ColtS(0, rhs, lhs);
3364 __ LoadConst32(res, 1);
3365 __ Bc1t(0, &done);
3366 __ CeqS(0, lhs, rhs);
3367 __ LoadConst32(res, -1);
3368 __ Movt(res, ZERO, 0);
3369 }
3370 }
3371 __ Bind(&done);
3372 break;
3373 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003374 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003375 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003376 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3377 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3378 MipsLabel done;
3379 if (isR6) {
3380 __ CmpEqD(FTMP, lhs, rhs);
3381 __ LoadConst32(res, 0);
3382 __ Bc1nez(FTMP, &done);
3383 if (gt_bias) {
3384 __ CmpLtD(FTMP, lhs, rhs);
3385 __ LoadConst32(res, -1);
3386 __ Bc1nez(FTMP, &done);
3387 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003388 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003389 __ CmpLtD(FTMP, rhs, lhs);
3390 __ LoadConst32(res, 1);
3391 __ Bc1nez(FTMP, &done);
3392 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003393 }
3394 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003395 if (gt_bias) {
3396 __ ColtD(0, lhs, rhs);
3397 __ LoadConst32(res, -1);
3398 __ Bc1t(0, &done);
3399 __ CeqD(0, lhs, rhs);
3400 __ LoadConst32(res, 1);
3401 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003402 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003403 __ ColtD(0, rhs, lhs);
3404 __ LoadConst32(res, 1);
3405 __ Bc1t(0, &done);
3406 __ CeqD(0, lhs, rhs);
3407 __ LoadConst32(res, -1);
3408 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003409 }
3410 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003411 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003412 break;
3413 }
3414
3415 default:
3416 LOG(FATAL) << "Unimplemented compare type " << in_type;
3417 }
3418}
3419
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003420void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003421 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003422 switch (instruction->InputAt(0)->GetType()) {
3423 default:
3424 case Primitive::kPrimLong:
3425 locations->SetInAt(0, Location::RequiresRegister());
3426 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3427 break;
3428
3429 case Primitive::kPrimFloat:
3430 case Primitive::kPrimDouble:
3431 locations->SetInAt(0, Location::RequiresFpuRegister());
3432 locations->SetInAt(1, Location::RequiresFpuRegister());
3433 break;
3434 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003435 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003436 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3437 }
3438}
3439
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003440void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003441 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003442 return;
3443 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003444
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003445 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003446 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003447
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003448 switch (type) {
3449 default:
3450 // Integer case.
3451 GenerateIntCompare(instruction->GetCondition(), locations);
3452 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003453
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003454 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003455 GenerateLongCompare(instruction->GetCondition(), locations);
3456 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003457
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003458 case Primitive::kPrimFloat:
3459 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003460 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3461 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003462 }
3463}
3464
Alexey Frunze7e99e052015-11-24 19:28:01 -08003465void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3466 DCHECK(instruction->IsDiv() || instruction->IsRem());
3467 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3468
3469 LocationSummary* locations = instruction->GetLocations();
3470 Location second = locations->InAt(1);
3471 DCHECK(second.IsConstant());
3472
3473 Register out = locations->Out().AsRegister<Register>();
3474 Register dividend = locations->InAt(0).AsRegister<Register>();
3475 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3476 DCHECK(imm == 1 || imm == -1);
3477
3478 if (instruction->IsRem()) {
3479 __ Move(out, ZERO);
3480 } else {
3481 if (imm == -1) {
3482 __ Subu(out, ZERO, dividend);
3483 } else if (out != dividend) {
3484 __ Move(out, dividend);
3485 }
3486 }
3487}
3488
3489void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3490 DCHECK(instruction->IsDiv() || instruction->IsRem());
3491 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3492
3493 LocationSummary* locations = instruction->GetLocations();
3494 Location second = locations->InAt(1);
3495 DCHECK(second.IsConstant());
3496
3497 Register out = locations->Out().AsRegister<Register>();
3498 Register dividend = locations->InAt(0).AsRegister<Register>();
3499 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003500 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003501 int ctz_imm = CTZ(abs_imm);
3502
3503 if (instruction->IsDiv()) {
3504 if (ctz_imm == 1) {
3505 // Fast path for division by +/-2, which is very common.
3506 __ Srl(TMP, dividend, 31);
3507 } else {
3508 __ Sra(TMP, dividend, 31);
3509 __ Srl(TMP, TMP, 32 - ctz_imm);
3510 }
3511 __ Addu(out, dividend, TMP);
3512 __ Sra(out, out, ctz_imm);
3513 if (imm < 0) {
3514 __ Subu(out, ZERO, out);
3515 }
3516 } else {
3517 if (ctz_imm == 1) {
3518 // Fast path for modulo +/-2, which is very common.
3519 __ Sra(TMP, dividend, 31);
3520 __ Subu(out, dividend, TMP);
3521 __ Andi(out, out, 1);
3522 __ Addu(out, out, TMP);
3523 } else {
3524 __ Sra(TMP, dividend, 31);
3525 __ Srl(TMP, TMP, 32 - ctz_imm);
3526 __ Addu(out, dividend, TMP);
3527 if (IsUint<16>(abs_imm - 1)) {
3528 __ Andi(out, out, abs_imm - 1);
3529 } else {
3530 __ Sll(out, out, 32 - ctz_imm);
3531 __ Srl(out, out, 32 - ctz_imm);
3532 }
3533 __ Subu(out, out, TMP);
3534 }
3535 }
3536}
3537
3538void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3539 DCHECK(instruction->IsDiv() || instruction->IsRem());
3540 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3541
3542 LocationSummary* locations = instruction->GetLocations();
3543 Location second = locations->InAt(1);
3544 DCHECK(second.IsConstant());
3545
3546 Register out = locations->Out().AsRegister<Register>();
3547 Register dividend = locations->InAt(0).AsRegister<Register>();
3548 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3549
3550 int64_t magic;
3551 int shift;
3552 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3553
3554 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3555
3556 __ LoadConst32(TMP, magic);
3557 if (isR6) {
3558 __ MuhR6(TMP, dividend, TMP);
3559 } else {
3560 __ MultR2(dividend, TMP);
3561 __ Mfhi(TMP);
3562 }
3563 if (imm > 0 && magic < 0) {
3564 __ Addu(TMP, TMP, dividend);
3565 } else if (imm < 0 && magic > 0) {
3566 __ Subu(TMP, TMP, dividend);
3567 }
3568
3569 if (shift != 0) {
3570 __ Sra(TMP, TMP, shift);
3571 }
3572
3573 if (instruction->IsDiv()) {
3574 __ Sra(out, TMP, 31);
3575 __ Subu(out, TMP, out);
3576 } else {
3577 __ Sra(AT, TMP, 31);
3578 __ Subu(AT, TMP, AT);
3579 __ LoadConst32(TMP, imm);
3580 if (isR6) {
3581 __ MulR6(TMP, AT, TMP);
3582 } else {
3583 __ MulR2(TMP, AT, TMP);
3584 }
3585 __ Subu(out, dividend, TMP);
3586 }
3587}
3588
3589void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3590 DCHECK(instruction->IsDiv() || instruction->IsRem());
3591 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3592
3593 LocationSummary* locations = instruction->GetLocations();
3594 Register out = locations->Out().AsRegister<Register>();
3595 Location second = locations->InAt(1);
3596
3597 if (second.IsConstant()) {
3598 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3599 if (imm == 0) {
3600 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3601 } else if (imm == 1 || imm == -1) {
3602 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003603 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003604 DivRemByPowerOfTwo(instruction);
3605 } else {
3606 DCHECK(imm <= -2 || imm >= 2);
3607 GenerateDivRemWithAnyConstant(instruction);
3608 }
3609 } else {
3610 Register dividend = locations->InAt(0).AsRegister<Register>();
3611 Register divisor = second.AsRegister<Register>();
3612 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3613 if (instruction->IsDiv()) {
3614 if (isR6) {
3615 __ DivR6(out, dividend, divisor);
3616 } else {
3617 __ DivR2(out, dividend, divisor);
3618 }
3619 } else {
3620 if (isR6) {
3621 __ ModR6(out, dividend, divisor);
3622 } else {
3623 __ ModR2(out, dividend, divisor);
3624 }
3625 }
3626 }
3627}
3628
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003629void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3630 Primitive::Type type = div->GetResultType();
3631 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003632 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003633 : LocationSummary::kNoCall;
3634
3635 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3636
3637 switch (type) {
3638 case Primitive::kPrimInt:
3639 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003640 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003641 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3642 break;
3643
3644 case Primitive::kPrimLong: {
3645 InvokeRuntimeCallingConvention calling_convention;
3646 locations->SetInAt(0, Location::RegisterPairLocation(
3647 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3648 locations->SetInAt(1, Location::RegisterPairLocation(
3649 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3650 locations->SetOut(calling_convention.GetReturnLocation(type));
3651 break;
3652 }
3653
3654 case Primitive::kPrimFloat:
3655 case Primitive::kPrimDouble:
3656 locations->SetInAt(0, Location::RequiresFpuRegister());
3657 locations->SetInAt(1, Location::RequiresFpuRegister());
3658 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3659 break;
3660
3661 default:
3662 LOG(FATAL) << "Unexpected div type " << type;
3663 }
3664}
3665
3666void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3667 Primitive::Type type = instruction->GetType();
3668 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003669
3670 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003671 case Primitive::kPrimInt:
3672 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003673 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003674 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003675 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003676 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3677 break;
3678 }
3679 case Primitive::kPrimFloat:
3680 case Primitive::kPrimDouble: {
3681 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3682 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3683 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3684 if (type == Primitive::kPrimFloat) {
3685 __ DivS(dst, lhs, rhs);
3686 } else {
3687 __ DivD(dst, lhs, rhs);
3688 }
3689 break;
3690 }
3691 default:
3692 LOG(FATAL) << "Unexpected div type " << type;
3693 }
3694}
3695
3696void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003697 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003698 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003699}
3700
3701void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3702 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3703 codegen_->AddSlowPath(slow_path);
3704 Location value = instruction->GetLocations()->InAt(0);
3705 Primitive::Type type = instruction->GetType();
3706
3707 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003708 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003709 case Primitive::kPrimByte:
3710 case Primitive::kPrimChar:
3711 case Primitive::kPrimShort:
3712 case Primitive::kPrimInt: {
3713 if (value.IsConstant()) {
3714 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3715 __ B(slow_path->GetEntryLabel());
3716 } else {
3717 // A division by a non-null constant is valid. We don't need to perform
3718 // any check, so simply fall through.
3719 }
3720 } else {
3721 DCHECK(value.IsRegister()) << value;
3722 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3723 }
3724 break;
3725 }
3726 case Primitive::kPrimLong: {
3727 if (value.IsConstant()) {
3728 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3729 __ B(slow_path->GetEntryLabel());
3730 } else {
3731 // A division by a non-null constant is valid. We don't need to perform
3732 // any check, so simply fall through.
3733 }
3734 } else {
3735 DCHECK(value.IsRegisterPair()) << value;
3736 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3737 __ Beqz(TMP, slow_path->GetEntryLabel());
3738 }
3739 break;
3740 }
3741 default:
3742 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3743 }
3744}
3745
3746void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3747 LocationSummary* locations =
3748 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3749 locations->SetOut(Location::ConstantLocation(constant));
3750}
3751
3752void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3753 // Will be generated at use site.
3754}
3755
3756void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3757 exit->SetLocations(nullptr);
3758}
3759
3760void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3761}
3762
3763void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3764 LocationSummary* locations =
3765 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3766 locations->SetOut(Location::ConstantLocation(constant));
3767}
3768
3769void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3770 // Will be generated at use site.
3771}
3772
3773void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3774 got->SetLocations(nullptr);
3775}
3776
3777void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3778 DCHECK(!successor->IsExitBlock());
3779 HBasicBlock* block = got->GetBlock();
3780 HInstruction* previous = got->GetPrevious();
3781 HLoopInformation* info = block->GetLoopInformation();
3782
3783 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3784 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3785 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3786 return;
3787 }
3788 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3789 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3790 }
3791 if (!codegen_->GoesToNextBlock(block, successor)) {
3792 __ B(codegen_->GetLabelOf(successor));
3793 }
3794}
3795
3796void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3797 HandleGoto(got, got->GetSuccessor());
3798}
3799
3800void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3801 try_boundary->SetLocations(nullptr);
3802}
3803
3804void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3805 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3806 if (!successor->IsExitBlock()) {
3807 HandleGoto(try_boundary, successor);
3808 }
3809}
3810
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003811void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3812 LocationSummary* locations) {
3813 Register dst = locations->Out().AsRegister<Register>();
3814 Register lhs = locations->InAt(0).AsRegister<Register>();
3815 Location rhs_location = locations->InAt(1);
3816 Register rhs_reg = ZERO;
3817 int64_t rhs_imm = 0;
3818 bool use_imm = rhs_location.IsConstant();
3819 if (use_imm) {
3820 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3821 } else {
3822 rhs_reg = rhs_location.AsRegister<Register>();
3823 }
3824
3825 switch (cond) {
3826 case kCondEQ:
3827 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003828 if (use_imm && IsInt<16>(-rhs_imm)) {
3829 if (rhs_imm == 0) {
3830 if (cond == kCondEQ) {
3831 __ Sltiu(dst, lhs, 1);
3832 } else {
3833 __ Sltu(dst, ZERO, lhs);
3834 }
3835 } else {
3836 __ Addiu(dst, lhs, -rhs_imm);
3837 if (cond == kCondEQ) {
3838 __ Sltiu(dst, dst, 1);
3839 } else {
3840 __ Sltu(dst, ZERO, dst);
3841 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003842 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003843 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003844 if (use_imm && IsUint<16>(rhs_imm)) {
3845 __ Xori(dst, lhs, rhs_imm);
3846 } else {
3847 if (use_imm) {
3848 rhs_reg = TMP;
3849 __ LoadConst32(rhs_reg, rhs_imm);
3850 }
3851 __ Xor(dst, lhs, rhs_reg);
3852 }
3853 if (cond == kCondEQ) {
3854 __ Sltiu(dst, dst, 1);
3855 } else {
3856 __ Sltu(dst, ZERO, dst);
3857 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003858 }
3859 break;
3860
3861 case kCondLT:
3862 case kCondGE:
3863 if (use_imm && IsInt<16>(rhs_imm)) {
3864 __ Slti(dst, lhs, rhs_imm);
3865 } else {
3866 if (use_imm) {
3867 rhs_reg = TMP;
3868 __ LoadConst32(rhs_reg, rhs_imm);
3869 }
3870 __ Slt(dst, lhs, rhs_reg);
3871 }
3872 if (cond == kCondGE) {
3873 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3874 // only the slt instruction but no sge.
3875 __ Xori(dst, dst, 1);
3876 }
3877 break;
3878
3879 case kCondLE:
3880 case kCondGT:
3881 if (use_imm && IsInt<16>(rhs_imm + 1)) {
3882 // Simulate lhs <= rhs via lhs < rhs + 1.
3883 __ Slti(dst, lhs, rhs_imm + 1);
3884 if (cond == kCondGT) {
3885 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3886 // only the slti instruction but no sgti.
3887 __ Xori(dst, dst, 1);
3888 }
3889 } else {
3890 if (use_imm) {
3891 rhs_reg = TMP;
3892 __ LoadConst32(rhs_reg, rhs_imm);
3893 }
3894 __ Slt(dst, rhs_reg, lhs);
3895 if (cond == kCondLE) {
3896 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3897 // only the slt instruction but no sle.
3898 __ Xori(dst, dst, 1);
3899 }
3900 }
3901 break;
3902
3903 case kCondB:
3904 case kCondAE:
3905 if (use_imm && IsInt<16>(rhs_imm)) {
3906 // Sltiu sign-extends its 16-bit immediate operand before
3907 // the comparison and thus lets us compare directly with
3908 // unsigned values in the ranges [0, 0x7fff] and
3909 // [0xffff8000, 0xffffffff].
3910 __ Sltiu(dst, lhs, rhs_imm);
3911 } else {
3912 if (use_imm) {
3913 rhs_reg = TMP;
3914 __ LoadConst32(rhs_reg, rhs_imm);
3915 }
3916 __ Sltu(dst, lhs, rhs_reg);
3917 }
3918 if (cond == kCondAE) {
3919 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3920 // only the sltu instruction but no sgeu.
3921 __ Xori(dst, dst, 1);
3922 }
3923 break;
3924
3925 case kCondBE:
3926 case kCondA:
3927 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
3928 // Simulate lhs <= rhs via lhs < rhs + 1.
3929 // Note that this only works if rhs + 1 does not overflow
3930 // to 0, hence the check above.
3931 // Sltiu sign-extends its 16-bit immediate operand before
3932 // the comparison and thus lets us compare directly with
3933 // unsigned values in the ranges [0, 0x7fff] and
3934 // [0xffff8000, 0xffffffff].
3935 __ Sltiu(dst, lhs, rhs_imm + 1);
3936 if (cond == kCondA) {
3937 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3938 // only the sltiu instruction but no sgtiu.
3939 __ Xori(dst, dst, 1);
3940 }
3941 } else {
3942 if (use_imm) {
3943 rhs_reg = TMP;
3944 __ LoadConst32(rhs_reg, rhs_imm);
3945 }
3946 __ Sltu(dst, rhs_reg, lhs);
3947 if (cond == kCondBE) {
3948 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3949 // only the sltu instruction but no sleu.
3950 __ Xori(dst, dst, 1);
3951 }
3952 }
3953 break;
3954 }
3955}
3956
Alexey Frunze674b9ee2016-09-20 14:54:15 -07003957bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
3958 LocationSummary* input_locations,
3959 Register dst) {
3960 Register lhs = input_locations->InAt(0).AsRegister<Register>();
3961 Location rhs_location = input_locations->InAt(1);
3962 Register rhs_reg = ZERO;
3963 int64_t rhs_imm = 0;
3964 bool use_imm = rhs_location.IsConstant();
3965 if (use_imm) {
3966 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3967 } else {
3968 rhs_reg = rhs_location.AsRegister<Register>();
3969 }
3970
3971 switch (cond) {
3972 case kCondEQ:
3973 case kCondNE:
3974 if (use_imm && IsInt<16>(-rhs_imm)) {
3975 __ Addiu(dst, lhs, -rhs_imm);
3976 } else if (use_imm && IsUint<16>(rhs_imm)) {
3977 __ Xori(dst, lhs, rhs_imm);
3978 } else {
3979 if (use_imm) {
3980 rhs_reg = TMP;
3981 __ LoadConst32(rhs_reg, rhs_imm);
3982 }
3983 __ Xor(dst, lhs, rhs_reg);
3984 }
3985 return (cond == kCondEQ);
3986
3987 case kCondLT:
3988 case kCondGE:
3989 if (use_imm && IsInt<16>(rhs_imm)) {
3990 __ Slti(dst, lhs, rhs_imm);
3991 } else {
3992 if (use_imm) {
3993 rhs_reg = TMP;
3994 __ LoadConst32(rhs_reg, rhs_imm);
3995 }
3996 __ Slt(dst, lhs, rhs_reg);
3997 }
3998 return (cond == kCondGE);
3999
4000 case kCondLE:
4001 case kCondGT:
4002 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4003 // Simulate lhs <= rhs via lhs < rhs + 1.
4004 __ Slti(dst, lhs, rhs_imm + 1);
4005 return (cond == kCondGT);
4006 } else {
4007 if (use_imm) {
4008 rhs_reg = TMP;
4009 __ LoadConst32(rhs_reg, rhs_imm);
4010 }
4011 __ Slt(dst, rhs_reg, lhs);
4012 return (cond == kCondLE);
4013 }
4014
4015 case kCondB:
4016 case kCondAE:
4017 if (use_imm && IsInt<16>(rhs_imm)) {
4018 // Sltiu sign-extends its 16-bit immediate operand before
4019 // the comparison and thus lets us compare directly with
4020 // unsigned values in the ranges [0, 0x7fff] and
4021 // [0xffff8000, 0xffffffff].
4022 __ Sltiu(dst, lhs, rhs_imm);
4023 } else {
4024 if (use_imm) {
4025 rhs_reg = TMP;
4026 __ LoadConst32(rhs_reg, rhs_imm);
4027 }
4028 __ Sltu(dst, lhs, rhs_reg);
4029 }
4030 return (cond == kCondAE);
4031
4032 case kCondBE:
4033 case kCondA:
4034 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4035 // Simulate lhs <= rhs via lhs < rhs + 1.
4036 // Note that this only works if rhs + 1 does not overflow
4037 // to 0, hence the check above.
4038 // Sltiu sign-extends its 16-bit immediate operand before
4039 // the comparison and thus lets us compare directly with
4040 // unsigned values in the ranges [0, 0x7fff] and
4041 // [0xffff8000, 0xffffffff].
4042 __ Sltiu(dst, lhs, rhs_imm + 1);
4043 return (cond == kCondA);
4044 } else {
4045 if (use_imm) {
4046 rhs_reg = TMP;
4047 __ LoadConst32(rhs_reg, rhs_imm);
4048 }
4049 __ Sltu(dst, rhs_reg, lhs);
4050 return (cond == kCondBE);
4051 }
4052 }
4053}
4054
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004055void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4056 LocationSummary* locations,
4057 MipsLabel* label) {
4058 Register lhs = locations->InAt(0).AsRegister<Register>();
4059 Location rhs_location = locations->InAt(1);
4060 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004061 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004062 bool use_imm = rhs_location.IsConstant();
4063 if (use_imm) {
4064 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4065 } else {
4066 rhs_reg = rhs_location.AsRegister<Register>();
4067 }
4068
4069 if (use_imm && rhs_imm == 0) {
4070 switch (cond) {
4071 case kCondEQ:
4072 case kCondBE: // <= 0 if zero
4073 __ Beqz(lhs, label);
4074 break;
4075 case kCondNE:
4076 case kCondA: // > 0 if non-zero
4077 __ Bnez(lhs, label);
4078 break;
4079 case kCondLT:
4080 __ Bltz(lhs, label);
4081 break;
4082 case kCondGE:
4083 __ Bgez(lhs, label);
4084 break;
4085 case kCondLE:
4086 __ Blez(lhs, label);
4087 break;
4088 case kCondGT:
4089 __ Bgtz(lhs, label);
4090 break;
4091 case kCondB: // always false
4092 break;
4093 case kCondAE: // always true
4094 __ B(label);
4095 break;
4096 }
4097 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004098 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4099 if (isR6 || !use_imm) {
4100 if (use_imm) {
4101 rhs_reg = TMP;
4102 __ LoadConst32(rhs_reg, rhs_imm);
4103 }
4104 switch (cond) {
4105 case kCondEQ:
4106 __ Beq(lhs, rhs_reg, label);
4107 break;
4108 case kCondNE:
4109 __ Bne(lhs, rhs_reg, label);
4110 break;
4111 case kCondLT:
4112 __ Blt(lhs, rhs_reg, label);
4113 break;
4114 case kCondGE:
4115 __ Bge(lhs, rhs_reg, label);
4116 break;
4117 case kCondLE:
4118 __ Bge(rhs_reg, lhs, label);
4119 break;
4120 case kCondGT:
4121 __ Blt(rhs_reg, lhs, label);
4122 break;
4123 case kCondB:
4124 __ Bltu(lhs, rhs_reg, label);
4125 break;
4126 case kCondAE:
4127 __ Bgeu(lhs, rhs_reg, label);
4128 break;
4129 case kCondBE:
4130 __ Bgeu(rhs_reg, lhs, label);
4131 break;
4132 case kCondA:
4133 __ Bltu(rhs_reg, lhs, label);
4134 break;
4135 }
4136 } else {
4137 // Special cases for more efficient comparison with constants on R2.
4138 switch (cond) {
4139 case kCondEQ:
4140 __ LoadConst32(TMP, rhs_imm);
4141 __ Beq(lhs, TMP, label);
4142 break;
4143 case kCondNE:
4144 __ LoadConst32(TMP, rhs_imm);
4145 __ Bne(lhs, TMP, label);
4146 break;
4147 case kCondLT:
4148 if (IsInt<16>(rhs_imm)) {
4149 __ Slti(TMP, lhs, rhs_imm);
4150 __ Bnez(TMP, label);
4151 } else {
4152 __ LoadConst32(TMP, rhs_imm);
4153 __ Blt(lhs, TMP, label);
4154 }
4155 break;
4156 case kCondGE:
4157 if (IsInt<16>(rhs_imm)) {
4158 __ Slti(TMP, lhs, rhs_imm);
4159 __ Beqz(TMP, label);
4160 } else {
4161 __ LoadConst32(TMP, rhs_imm);
4162 __ Bge(lhs, TMP, label);
4163 }
4164 break;
4165 case kCondLE:
4166 if (IsInt<16>(rhs_imm + 1)) {
4167 // Simulate lhs <= rhs via lhs < rhs + 1.
4168 __ Slti(TMP, lhs, rhs_imm + 1);
4169 __ Bnez(TMP, label);
4170 } else {
4171 __ LoadConst32(TMP, rhs_imm);
4172 __ Bge(TMP, lhs, label);
4173 }
4174 break;
4175 case kCondGT:
4176 if (IsInt<16>(rhs_imm + 1)) {
4177 // Simulate lhs > rhs via !(lhs < rhs + 1).
4178 __ Slti(TMP, lhs, rhs_imm + 1);
4179 __ Beqz(TMP, label);
4180 } else {
4181 __ LoadConst32(TMP, rhs_imm);
4182 __ Blt(TMP, lhs, label);
4183 }
4184 break;
4185 case kCondB:
4186 if (IsInt<16>(rhs_imm)) {
4187 __ Sltiu(TMP, lhs, rhs_imm);
4188 __ Bnez(TMP, label);
4189 } else {
4190 __ LoadConst32(TMP, rhs_imm);
4191 __ Bltu(lhs, TMP, label);
4192 }
4193 break;
4194 case kCondAE:
4195 if (IsInt<16>(rhs_imm)) {
4196 __ Sltiu(TMP, lhs, rhs_imm);
4197 __ Beqz(TMP, label);
4198 } else {
4199 __ LoadConst32(TMP, rhs_imm);
4200 __ Bgeu(lhs, TMP, label);
4201 }
4202 break;
4203 case kCondBE:
4204 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4205 // Simulate lhs <= rhs via lhs < rhs + 1.
4206 // Note that this only works if rhs + 1 does not overflow
4207 // to 0, hence the check above.
4208 __ Sltiu(TMP, lhs, rhs_imm + 1);
4209 __ Bnez(TMP, label);
4210 } else {
4211 __ LoadConst32(TMP, rhs_imm);
4212 __ Bgeu(TMP, lhs, label);
4213 }
4214 break;
4215 case kCondA:
4216 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4217 // Simulate lhs > rhs via !(lhs < rhs + 1).
4218 // Note that this only works if rhs + 1 does not overflow
4219 // to 0, hence the check above.
4220 __ Sltiu(TMP, lhs, rhs_imm + 1);
4221 __ Beqz(TMP, label);
4222 } else {
4223 __ LoadConst32(TMP, rhs_imm);
4224 __ Bltu(TMP, lhs, label);
4225 }
4226 break;
4227 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004228 }
4229 }
4230}
4231
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004232void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4233 LocationSummary* locations) {
4234 Register dst = locations->Out().AsRegister<Register>();
4235 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4236 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4237 Location rhs_location = locations->InAt(1);
4238 Register rhs_high = ZERO;
4239 Register rhs_low = ZERO;
4240 int64_t imm = 0;
4241 uint32_t imm_high = 0;
4242 uint32_t imm_low = 0;
4243 bool use_imm = rhs_location.IsConstant();
4244 if (use_imm) {
4245 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4246 imm_high = High32Bits(imm);
4247 imm_low = Low32Bits(imm);
4248 } else {
4249 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4250 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4251 }
4252 if (use_imm && imm == 0) {
4253 switch (cond) {
4254 case kCondEQ:
4255 case kCondBE: // <= 0 if zero
4256 __ Or(dst, lhs_high, lhs_low);
4257 __ Sltiu(dst, dst, 1);
4258 break;
4259 case kCondNE:
4260 case kCondA: // > 0 if non-zero
4261 __ Or(dst, lhs_high, lhs_low);
4262 __ Sltu(dst, ZERO, dst);
4263 break;
4264 case kCondLT:
4265 __ Slt(dst, lhs_high, ZERO);
4266 break;
4267 case kCondGE:
4268 __ Slt(dst, lhs_high, ZERO);
4269 __ Xori(dst, dst, 1);
4270 break;
4271 case kCondLE:
4272 __ Or(TMP, lhs_high, lhs_low);
4273 __ Sra(AT, lhs_high, 31);
4274 __ Sltu(dst, AT, TMP);
4275 __ Xori(dst, dst, 1);
4276 break;
4277 case kCondGT:
4278 __ Or(TMP, lhs_high, lhs_low);
4279 __ Sra(AT, lhs_high, 31);
4280 __ Sltu(dst, AT, TMP);
4281 break;
4282 case kCondB: // always false
4283 __ Andi(dst, dst, 0);
4284 break;
4285 case kCondAE: // always true
4286 __ Ori(dst, ZERO, 1);
4287 break;
4288 }
4289 } else if (use_imm) {
4290 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4291 switch (cond) {
4292 case kCondEQ:
4293 __ LoadConst32(TMP, imm_high);
4294 __ Xor(TMP, TMP, lhs_high);
4295 __ LoadConst32(AT, imm_low);
4296 __ Xor(AT, AT, lhs_low);
4297 __ Or(dst, TMP, AT);
4298 __ Sltiu(dst, dst, 1);
4299 break;
4300 case kCondNE:
4301 __ LoadConst32(TMP, imm_high);
4302 __ Xor(TMP, TMP, lhs_high);
4303 __ LoadConst32(AT, imm_low);
4304 __ Xor(AT, AT, lhs_low);
4305 __ Or(dst, TMP, AT);
4306 __ Sltu(dst, ZERO, dst);
4307 break;
4308 case kCondLT:
4309 case kCondGE:
4310 if (dst == lhs_low) {
4311 __ LoadConst32(TMP, imm_low);
4312 __ Sltu(dst, lhs_low, TMP);
4313 }
4314 __ LoadConst32(TMP, imm_high);
4315 __ Slt(AT, lhs_high, TMP);
4316 __ Slt(TMP, TMP, lhs_high);
4317 if (dst != lhs_low) {
4318 __ LoadConst32(dst, imm_low);
4319 __ Sltu(dst, lhs_low, dst);
4320 }
4321 __ Slt(dst, TMP, dst);
4322 __ Or(dst, dst, AT);
4323 if (cond == kCondGE) {
4324 __ Xori(dst, dst, 1);
4325 }
4326 break;
4327 case kCondGT:
4328 case kCondLE:
4329 if (dst == lhs_low) {
4330 __ LoadConst32(TMP, imm_low);
4331 __ Sltu(dst, TMP, lhs_low);
4332 }
4333 __ LoadConst32(TMP, imm_high);
4334 __ Slt(AT, TMP, lhs_high);
4335 __ Slt(TMP, lhs_high, TMP);
4336 if (dst != lhs_low) {
4337 __ LoadConst32(dst, imm_low);
4338 __ Sltu(dst, dst, lhs_low);
4339 }
4340 __ Slt(dst, TMP, dst);
4341 __ Or(dst, dst, AT);
4342 if (cond == kCondLE) {
4343 __ Xori(dst, dst, 1);
4344 }
4345 break;
4346 case kCondB:
4347 case kCondAE:
4348 if (dst == lhs_low) {
4349 __ LoadConst32(TMP, imm_low);
4350 __ Sltu(dst, lhs_low, TMP);
4351 }
4352 __ LoadConst32(TMP, imm_high);
4353 __ Sltu(AT, lhs_high, TMP);
4354 __ Sltu(TMP, TMP, lhs_high);
4355 if (dst != lhs_low) {
4356 __ LoadConst32(dst, imm_low);
4357 __ Sltu(dst, lhs_low, dst);
4358 }
4359 __ Slt(dst, TMP, dst);
4360 __ Or(dst, dst, AT);
4361 if (cond == kCondAE) {
4362 __ Xori(dst, dst, 1);
4363 }
4364 break;
4365 case kCondA:
4366 case kCondBE:
4367 if (dst == lhs_low) {
4368 __ LoadConst32(TMP, imm_low);
4369 __ Sltu(dst, TMP, lhs_low);
4370 }
4371 __ LoadConst32(TMP, imm_high);
4372 __ Sltu(AT, TMP, lhs_high);
4373 __ Sltu(TMP, lhs_high, TMP);
4374 if (dst != lhs_low) {
4375 __ LoadConst32(dst, imm_low);
4376 __ Sltu(dst, dst, lhs_low);
4377 }
4378 __ Slt(dst, TMP, dst);
4379 __ Or(dst, dst, AT);
4380 if (cond == kCondBE) {
4381 __ Xori(dst, dst, 1);
4382 }
4383 break;
4384 }
4385 } else {
4386 switch (cond) {
4387 case kCondEQ:
4388 __ Xor(TMP, lhs_high, rhs_high);
4389 __ Xor(AT, lhs_low, rhs_low);
4390 __ Or(dst, TMP, AT);
4391 __ Sltiu(dst, dst, 1);
4392 break;
4393 case kCondNE:
4394 __ Xor(TMP, lhs_high, rhs_high);
4395 __ Xor(AT, lhs_low, rhs_low);
4396 __ Or(dst, TMP, AT);
4397 __ Sltu(dst, ZERO, dst);
4398 break;
4399 case kCondLT:
4400 case kCondGE:
4401 __ Slt(TMP, rhs_high, lhs_high);
4402 __ Sltu(AT, lhs_low, rhs_low);
4403 __ Slt(TMP, TMP, AT);
4404 __ Slt(AT, lhs_high, rhs_high);
4405 __ Or(dst, AT, TMP);
4406 if (cond == kCondGE) {
4407 __ Xori(dst, dst, 1);
4408 }
4409 break;
4410 case kCondGT:
4411 case kCondLE:
4412 __ Slt(TMP, lhs_high, rhs_high);
4413 __ Sltu(AT, rhs_low, lhs_low);
4414 __ Slt(TMP, TMP, AT);
4415 __ Slt(AT, rhs_high, lhs_high);
4416 __ Or(dst, AT, TMP);
4417 if (cond == kCondLE) {
4418 __ Xori(dst, dst, 1);
4419 }
4420 break;
4421 case kCondB:
4422 case kCondAE:
4423 __ Sltu(TMP, rhs_high, lhs_high);
4424 __ Sltu(AT, lhs_low, rhs_low);
4425 __ Slt(TMP, TMP, AT);
4426 __ Sltu(AT, lhs_high, rhs_high);
4427 __ Or(dst, AT, TMP);
4428 if (cond == kCondAE) {
4429 __ Xori(dst, dst, 1);
4430 }
4431 break;
4432 case kCondA:
4433 case kCondBE:
4434 __ Sltu(TMP, lhs_high, rhs_high);
4435 __ Sltu(AT, rhs_low, lhs_low);
4436 __ Slt(TMP, TMP, AT);
4437 __ Sltu(AT, rhs_high, lhs_high);
4438 __ Or(dst, AT, TMP);
4439 if (cond == kCondBE) {
4440 __ Xori(dst, dst, 1);
4441 }
4442 break;
4443 }
4444 }
4445}
4446
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004447void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4448 LocationSummary* locations,
4449 MipsLabel* label) {
4450 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4451 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4452 Location rhs_location = locations->InAt(1);
4453 Register rhs_high = ZERO;
4454 Register rhs_low = ZERO;
4455 int64_t imm = 0;
4456 uint32_t imm_high = 0;
4457 uint32_t imm_low = 0;
4458 bool use_imm = rhs_location.IsConstant();
4459 if (use_imm) {
4460 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4461 imm_high = High32Bits(imm);
4462 imm_low = Low32Bits(imm);
4463 } else {
4464 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4465 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4466 }
4467
4468 if (use_imm && imm == 0) {
4469 switch (cond) {
4470 case kCondEQ:
4471 case kCondBE: // <= 0 if zero
4472 __ Or(TMP, lhs_high, lhs_low);
4473 __ Beqz(TMP, label);
4474 break;
4475 case kCondNE:
4476 case kCondA: // > 0 if non-zero
4477 __ Or(TMP, lhs_high, lhs_low);
4478 __ Bnez(TMP, label);
4479 break;
4480 case kCondLT:
4481 __ Bltz(lhs_high, label);
4482 break;
4483 case kCondGE:
4484 __ Bgez(lhs_high, label);
4485 break;
4486 case kCondLE:
4487 __ Or(TMP, lhs_high, lhs_low);
4488 __ Sra(AT, lhs_high, 31);
4489 __ Bgeu(AT, TMP, label);
4490 break;
4491 case kCondGT:
4492 __ Or(TMP, lhs_high, lhs_low);
4493 __ Sra(AT, lhs_high, 31);
4494 __ Bltu(AT, TMP, label);
4495 break;
4496 case kCondB: // always false
4497 break;
4498 case kCondAE: // always true
4499 __ B(label);
4500 break;
4501 }
4502 } else if (use_imm) {
4503 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4504 switch (cond) {
4505 case kCondEQ:
4506 __ LoadConst32(TMP, imm_high);
4507 __ Xor(TMP, TMP, lhs_high);
4508 __ LoadConst32(AT, imm_low);
4509 __ Xor(AT, AT, lhs_low);
4510 __ Or(TMP, TMP, AT);
4511 __ Beqz(TMP, label);
4512 break;
4513 case kCondNE:
4514 __ LoadConst32(TMP, imm_high);
4515 __ Xor(TMP, TMP, lhs_high);
4516 __ LoadConst32(AT, imm_low);
4517 __ Xor(AT, AT, lhs_low);
4518 __ Or(TMP, TMP, AT);
4519 __ Bnez(TMP, label);
4520 break;
4521 case kCondLT:
4522 __ LoadConst32(TMP, imm_high);
4523 __ Blt(lhs_high, TMP, label);
4524 __ Slt(TMP, TMP, lhs_high);
4525 __ LoadConst32(AT, imm_low);
4526 __ Sltu(AT, lhs_low, AT);
4527 __ Blt(TMP, AT, label);
4528 break;
4529 case kCondGE:
4530 __ LoadConst32(TMP, imm_high);
4531 __ Blt(TMP, lhs_high, label);
4532 __ Slt(TMP, lhs_high, TMP);
4533 __ LoadConst32(AT, imm_low);
4534 __ Sltu(AT, lhs_low, AT);
4535 __ Or(TMP, TMP, AT);
4536 __ Beqz(TMP, label);
4537 break;
4538 case kCondLE:
4539 __ LoadConst32(TMP, imm_high);
4540 __ Blt(lhs_high, TMP, label);
4541 __ Slt(TMP, TMP, lhs_high);
4542 __ LoadConst32(AT, imm_low);
4543 __ Sltu(AT, AT, lhs_low);
4544 __ Or(TMP, TMP, AT);
4545 __ Beqz(TMP, label);
4546 break;
4547 case kCondGT:
4548 __ LoadConst32(TMP, imm_high);
4549 __ Blt(TMP, lhs_high, label);
4550 __ Slt(TMP, lhs_high, TMP);
4551 __ LoadConst32(AT, imm_low);
4552 __ Sltu(AT, AT, lhs_low);
4553 __ Blt(TMP, AT, label);
4554 break;
4555 case kCondB:
4556 __ LoadConst32(TMP, imm_high);
4557 __ Bltu(lhs_high, TMP, label);
4558 __ Sltu(TMP, TMP, lhs_high);
4559 __ LoadConst32(AT, imm_low);
4560 __ Sltu(AT, lhs_low, AT);
4561 __ Blt(TMP, AT, label);
4562 break;
4563 case kCondAE:
4564 __ LoadConst32(TMP, imm_high);
4565 __ Bltu(TMP, lhs_high, label);
4566 __ Sltu(TMP, lhs_high, TMP);
4567 __ LoadConst32(AT, imm_low);
4568 __ Sltu(AT, lhs_low, AT);
4569 __ Or(TMP, TMP, AT);
4570 __ Beqz(TMP, label);
4571 break;
4572 case kCondBE:
4573 __ LoadConst32(TMP, imm_high);
4574 __ Bltu(lhs_high, TMP, label);
4575 __ Sltu(TMP, TMP, lhs_high);
4576 __ LoadConst32(AT, imm_low);
4577 __ Sltu(AT, AT, lhs_low);
4578 __ Or(TMP, TMP, AT);
4579 __ Beqz(TMP, label);
4580 break;
4581 case kCondA:
4582 __ LoadConst32(TMP, imm_high);
4583 __ Bltu(TMP, lhs_high, label);
4584 __ Sltu(TMP, lhs_high, TMP);
4585 __ LoadConst32(AT, imm_low);
4586 __ Sltu(AT, AT, lhs_low);
4587 __ Blt(TMP, AT, label);
4588 break;
4589 }
4590 } else {
4591 switch (cond) {
4592 case kCondEQ:
4593 __ Xor(TMP, lhs_high, rhs_high);
4594 __ Xor(AT, lhs_low, rhs_low);
4595 __ Or(TMP, TMP, AT);
4596 __ Beqz(TMP, label);
4597 break;
4598 case kCondNE:
4599 __ Xor(TMP, lhs_high, rhs_high);
4600 __ Xor(AT, lhs_low, rhs_low);
4601 __ Or(TMP, TMP, AT);
4602 __ Bnez(TMP, label);
4603 break;
4604 case kCondLT:
4605 __ Blt(lhs_high, rhs_high, label);
4606 __ Slt(TMP, rhs_high, lhs_high);
4607 __ Sltu(AT, lhs_low, rhs_low);
4608 __ Blt(TMP, AT, label);
4609 break;
4610 case kCondGE:
4611 __ Blt(rhs_high, lhs_high, label);
4612 __ Slt(TMP, lhs_high, rhs_high);
4613 __ Sltu(AT, lhs_low, rhs_low);
4614 __ Or(TMP, TMP, AT);
4615 __ Beqz(TMP, label);
4616 break;
4617 case kCondLE:
4618 __ Blt(lhs_high, rhs_high, label);
4619 __ Slt(TMP, rhs_high, lhs_high);
4620 __ Sltu(AT, rhs_low, lhs_low);
4621 __ Or(TMP, TMP, AT);
4622 __ Beqz(TMP, label);
4623 break;
4624 case kCondGT:
4625 __ Blt(rhs_high, lhs_high, label);
4626 __ Slt(TMP, lhs_high, rhs_high);
4627 __ Sltu(AT, rhs_low, lhs_low);
4628 __ Blt(TMP, AT, label);
4629 break;
4630 case kCondB:
4631 __ Bltu(lhs_high, rhs_high, label);
4632 __ Sltu(TMP, rhs_high, lhs_high);
4633 __ Sltu(AT, lhs_low, rhs_low);
4634 __ Blt(TMP, AT, label);
4635 break;
4636 case kCondAE:
4637 __ Bltu(rhs_high, lhs_high, label);
4638 __ Sltu(TMP, lhs_high, rhs_high);
4639 __ Sltu(AT, lhs_low, rhs_low);
4640 __ Or(TMP, TMP, AT);
4641 __ Beqz(TMP, label);
4642 break;
4643 case kCondBE:
4644 __ Bltu(lhs_high, rhs_high, label);
4645 __ Sltu(TMP, rhs_high, lhs_high);
4646 __ Sltu(AT, rhs_low, lhs_low);
4647 __ Or(TMP, TMP, AT);
4648 __ Beqz(TMP, label);
4649 break;
4650 case kCondA:
4651 __ Bltu(rhs_high, lhs_high, label);
4652 __ Sltu(TMP, lhs_high, rhs_high);
4653 __ Sltu(AT, rhs_low, lhs_low);
4654 __ Blt(TMP, AT, label);
4655 break;
4656 }
4657 }
4658}
4659
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004660void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4661 bool gt_bias,
4662 Primitive::Type type,
4663 LocationSummary* locations) {
4664 Register dst = locations->Out().AsRegister<Register>();
4665 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4666 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4667 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4668 if (type == Primitive::kPrimFloat) {
4669 if (isR6) {
4670 switch (cond) {
4671 case kCondEQ:
4672 __ CmpEqS(FTMP, lhs, rhs);
4673 __ Mfc1(dst, FTMP);
4674 __ Andi(dst, dst, 1);
4675 break;
4676 case kCondNE:
4677 __ CmpEqS(FTMP, lhs, rhs);
4678 __ Mfc1(dst, FTMP);
4679 __ Addiu(dst, dst, 1);
4680 break;
4681 case kCondLT:
4682 if (gt_bias) {
4683 __ CmpLtS(FTMP, lhs, rhs);
4684 } else {
4685 __ CmpUltS(FTMP, lhs, rhs);
4686 }
4687 __ Mfc1(dst, FTMP);
4688 __ Andi(dst, dst, 1);
4689 break;
4690 case kCondLE:
4691 if (gt_bias) {
4692 __ CmpLeS(FTMP, lhs, rhs);
4693 } else {
4694 __ CmpUleS(FTMP, lhs, rhs);
4695 }
4696 __ Mfc1(dst, FTMP);
4697 __ Andi(dst, dst, 1);
4698 break;
4699 case kCondGT:
4700 if (gt_bias) {
4701 __ CmpUltS(FTMP, rhs, lhs);
4702 } else {
4703 __ CmpLtS(FTMP, rhs, lhs);
4704 }
4705 __ Mfc1(dst, FTMP);
4706 __ Andi(dst, dst, 1);
4707 break;
4708 case kCondGE:
4709 if (gt_bias) {
4710 __ CmpUleS(FTMP, rhs, lhs);
4711 } else {
4712 __ CmpLeS(FTMP, rhs, lhs);
4713 }
4714 __ Mfc1(dst, FTMP);
4715 __ Andi(dst, dst, 1);
4716 break;
4717 default:
4718 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4719 UNREACHABLE();
4720 }
4721 } else {
4722 switch (cond) {
4723 case kCondEQ:
4724 __ CeqS(0, lhs, rhs);
4725 __ LoadConst32(dst, 1);
4726 __ Movf(dst, ZERO, 0);
4727 break;
4728 case kCondNE:
4729 __ CeqS(0, lhs, rhs);
4730 __ LoadConst32(dst, 1);
4731 __ Movt(dst, ZERO, 0);
4732 break;
4733 case kCondLT:
4734 if (gt_bias) {
4735 __ ColtS(0, lhs, rhs);
4736 } else {
4737 __ CultS(0, lhs, rhs);
4738 }
4739 __ LoadConst32(dst, 1);
4740 __ Movf(dst, ZERO, 0);
4741 break;
4742 case kCondLE:
4743 if (gt_bias) {
4744 __ ColeS(0, lhs, rhs);
4745 } else {
4746 __ CuleS(0, lhs, rhs);
4747 }
4748 __ LoadConst32(dst, 1);
4749 __ Movf(dst, ZERO, 0);
4750 break;
4751 case kCondGT:
4752 if (gt_bias) {
4753 __ CultS(0, rhs, lhs);
4754 } else {
4755 __ ColtS(0, rhs, lhs);
4756 }
4757 __ LoadConst32(dst, 1);
4758 __ Movf(dst, ZERO, 0);
4759 break;
4760 case kCondGE:
4761 if (gt_bias) {
4762 __ CuleS(0, rhs, lhs);
4763 } else {
4764 __ ColeS(0, rhs, lhs);
4765 }
4766 __ LoadConst32(dst, 1);
4767 __ Movf(dst, ZERO, 0);
4768 break;
4769 default:
4770 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4771 UNREACHABLE();
4772 }
4773 }
4774 } else {
4775 DCHECK_EQ(type, Primitive::kPrimDouble);
4776 if (isR6) {
4777 switch (cond) {
4778 case kCondEQ:
4779 __ CmpEqD(FTMP, lhs, rhs);
4780 __ Mfc1(dst, FTMP);
4781 __ Andi(dst, dst, 1);
4782 break;
4783 case kCondNE:
4784 __ CmpEqD(FTMP, lhs, rhs);
4785 __ Mfc1(dst, FTMP);
4786 __ Addiu(dst, dst, 1);
4787 break;
4788 case kCondLT:
4789 if (gt_bias) {
4790 __ CmpLtD(FTMP, lhs, rhs);
4791 } else {
4792 __ CmpUltD(FTMP, lhs, rhs);
4793 }
4794 __ Mfc1(dst, FTMP);
4795 __ Andi(dst, dst, 1);
4796 break;
4797 case kCondLE:
4798 if (gt_bias) {
4799 __ CmpLeD(FTMP, lhs, rhs);
4800 } else {
4801 __ CmpUleD(FTMP, lhs, rhs);
4802 }
4803 __ Mfc1(dst, FTMP);
4804 __ Andi(dst, dst, 1);
4805 break;
4806 case kCondGT:
4807 if (gt_bias) {
4808 __ CmpUltD(FTMP, rhs, lhs);
4809 } else {
4810 __ CmpLtD(FTMP, rhs, lhs);
4811 }
4812 __ Mfc1(dst, FTMP);
4813 __ Andi(dst, dst, 1);
4814 break;
4815 case kCondGE:
4816 if (gt_bias) {
4817 __ CmpUleD(FTMP, rhs, lhs);
4818 } else {
4819 __ CmpLeD(FTMP, rhs, lhs);
4820 }
4821 __ Mfc1(dst, FTMP);
4822 __ Andi(dst, dst, 1);
4823 break;
4824 default:
4825 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4826 UNREACHABLE();
4827 }
4828 } else {
4829 switch (cond) {
4830 case kCondEQ:
4831 __ CeqD(0, lhs, rhs);
4832 __ LoadConst32(dst, 1);
4833 __ Movf(dst, ZERO, 0);
4834 break;
4835 case kCondNE:
4836 __ CeqD(0, lhs, rhs);
4837 __ LoadConst32(dst, 1);
4838 __ Movt(dst, ZERO, 0);
4839 break;
4840 case kCondLT:
4841 if (gt_bias) {
4842 __ ColtD(0, lhs, rhs);
4843 } else {
4844 __ CultD(0, lhs, rhs);
4845 }
4846 __ LoadConst32(dst, 1);
4847 __ Movf(dst, ZERO, 0);
4848 break;
4849 case kCondLE:
4850 if (gt_bias) {
4851 __ ColeD(0, lhs, rhs);
4852 } else {
4853 __ CuleD(0, lhs, rhs);
4854 }
4855 __ LoadConst32(dst, 1);
4856 __ Movf(dst, ZERO, 0);
4857 break;
4858 case kCondGT:
4859 if (gt_bias) {
4860 __ CultD(0, rhs, lhs);
4861 } else {
4862 __ ColtD(0, rhs, lhs);
4863 }
4864 __ LoadConst32(dst, 1);
4865 __ Movf(dst, ZERO, 0);
4866 break;
4867 case kCondGE:
4868 if (gt_bias) {
4869 __ CuleD(0, rhs, lhs);
4870 } else {
4871 __ ColeD(0, rhs, lhs);
4872 }
4873 __ LoadConst32(dst, 1);
4874 __ Movf(dst, ZERO, 0);
4875 break;
4876 default:
4877 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4878 UNREACHABLE();
4879 }
4880 }
4881 }
4882}
4883
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004884bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
4885 bool gt_bias,
4886 Primitive::Type type,
4887 LocationSummary* input_locations,
4888 int cc) {
4889 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
4890 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
4891 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
4892 if (type == Primitive::kPrimFloat) {
4893 switch (cond) {
4894 case kCondEQ:
4895 __ CeqS(cc, lhs, rhs);
4896 return false;
4897 case kCondNE:
4898 __ CeqS(cc, lhs, rhs);
4899 return true;
4900 case kCondLT:
4901 if (gt_bias) {
4902 __ ColtS(cc, lhs, rhs);
4903 } else {
4904 __ CultS(cc, lhs, rhs);
4905 }
4906 return false;
4907 case kCondLE:
4908 if (gt_bias) {
4909 __ ColeS(cc, lhs, rhs);
4910 } else {
4911 __ CuleS(cc, lhs, rhs);
4912 }
4913 return false;
4914 case kCondGT:
4915 if (gt_bias) {
4916 __ CultS(cc, rhs, lhs);
4917 } else {
4918 __ ColtS(cc, rhs, lhs);
4919 }
4920 return false;
4921 case kCondGE:
4922 if (gt_bias) {
4923 __ CuleS(cc, rhs, lhs);
4924 } else {
4925 __ ColeS(cc, rhs, lhs);
4926 }
4927 return false;
4928 default:
4929 LOG(FATAL) << "Unexpected non-floating-point condition";
4930 UNREACHABLE();
4931 }
4932 } else {
4933 DCHECK_EQ(type, Primitive::kPrimDouble);
4934 switch (cond) {
4935 case kCondEQ:
4936 __ CeqD(cc, lhs, rhs);
4937 return false;
4938 case kCondNE:
4939 __ CeqD(cc, lhs, rhs);
4940 return true;
4941 case kCondLT:
4942 if (gt_bias) {
4943 __ ColtD(cc, lhs, rhs);
4944 } else {
4945 __ CultD(cc, lhs, rhs);
4946 }
4947 return false;
4948 case kCondLE:
4949 if (gt_bias) {
4950 __ ColeD(cc, lhs, rhs);
4951 } else {
4952 __ CuleD(cc, lhs, rhs);
4953 }
4954 return false;
4955 case kCondGT:
4956 if (gt_bias) {
4957 __ CultD(cc, rhs, lhs);
4958 } else {
4959 __ ColtD(cc, rhs, lhs);
4960 }
4961 return false;
4962 case kCondGE:
4963 if (gt_bias) {
4964 __ CuleD(cc, rhs, lhs);
4965 } else {
4966 __ ColeD(cc, rhs, lhs);
4967 }
4968 return false;
4969 default:
4970 LOG(FATAL) << "Unexpected non-floating-point condition";
4971 UNREACHABLE();
4972 }
4973 }
4974}
4975
4976bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
4977 bool gt_bias,
4978 Primitive::Type type,
4979 LocationSummary* input_locations,
4980 FRegister dst) {
4981 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
4982 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
4983 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
4984 if (type == Primitive::kPrimFloat) {
4985 switch (cond) {
4986 case kCondEQ:
4987 __ CmpEqS(dst, lhs, rhs);
4988 return false;
4989 case kCondNE:
4990 __ CmpEqS(dst, lhs, rhs);
4991 return true;
4992 case kCondLT:
4993 if (gt_bias) {
4994 __ CmpLtS(dst, lhs, rhs);
4995 } else {
4996 __ CmpUltS(dst, lhs, rhs);
4997 }
4998 return false;
4999 case kCondLE:
5000 if (gt_bias) {
5001 __ CmpLeS(dst, lhs, rhs);
5002 } else {
5003 __ CmpUleS(dst, lhs, rhs);
5004 }
5005 return false;
5006 case kCondGT:
5007 if (gt_bias) {
5008 __ CmpUltS(dst, rhs, lhs);
5009 } else {
5010 __ CmpLtS(dst, rhs, lhs);
5011 }
5012 return false;
5013 case kCondGE:
5014 if (gt_bias) {
5015 __ CmpUleS(dst, rhs, lhs);
5016 } else {
5017 __ CmpLeS(dst, rhs, lhs);
5018 }
5019 return false;
5020 default:
5021 LOG(FATAL) << "Unexpected non-floating-point condition";
5022 UNREACHABLE();
5023 }
5024 } else {
5025 DCHECK_EQ(type, Primitive::kPrimDouble);
5026 switch (cond) {
5027 case kCondEQ:
5028 __ CmpEqD(dst, lhs, rhs);
5029 return false;
5030 case kCondNE:
5031 __ CmpEqD(dst, lhs, rhs);
5032 return true;
5033 case kCondLT:
5034 if (gt_bias) {
5035 __ CmpLtD(dst, lhs, rhs);
5036 } else {
5037 __ CmpUltD(dst, lhs, rhs);
5038 }
5039 return false;
5040 case kCondLE:
5041 if (gt_bias) {
5042 __ CmpLeD(dst, lhs, rhs);
5043 } else {
5044 __ CmpUleD(dst, lhs, rhs);
5045 }
5046 return false;
5047 case kCondGT:
5048 if (gt_bias) {
5049 __ CmpUltD(dst, rhs, lhs);
5050 } else {
5051 __ CmpLtD(dst, rhs, lhs);
5052 }
5053 return false;
5054 case kCondGE:
5055 if (gt_bias) {
5056 __ CmpUleD(dst, rhs, lhs);
5057 } else {
5058 __ CmpLeD(dst, rhs, lhs);
5059 }
5060 return false;
5061 default:
5062 LOG(FATAL) << "Unexpected non-floating-point condition";
5063 UNREACHABLE();
5064 }
5065 }
5066}
5067
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005068void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5069 bool gt_bias,
5070 Primitive::Type type,
5071 LocationSummary* locations,
5072 MipsLabel* label) {
5073 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5074 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5075 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5076 if (type == Primitive::kPrimFloat) {
5077 if (isR6) {
5078 switch (cond) {
5079 case kCondEQ:
5080 __ CmpEqS(FTMP, lhs, rhs);
5081 __ Bc1nez(FTMP, label);
5082 break;
5083 case kCondNE:
5084 __ CmpEqS(FTMP, lhs, rhs);
5085 __ Bc1eqz(FTMP, label);
5086 break;
5087 case kCondLT:
5088 if (gt_bias) {
5089 __ CmpLtS(FTMP, lhs, rhs);
5090 } else {
5091 __ CmpUltS(FTMP, lhs, rhs);
5092 }
5093 __ Bc1nez(FTMP, label);
5094 break;
5095 case kCondLE:
5096 if (gt_bias) {
5097 __ CmpLeS(FTMP, lhs, rhs);
5098 } else {
5099 __ CmpUleS(FTMP, lhs, rhs);
5100 }
5101 __ Bc1nez(FTMP, label);
5102 break;
5103 case kCondGT:
5104 if (gt_bias) {
5105 __ CmpUltS(FTMP, rhs, lhs);
5106 } else {
5107 __ CmpLtS(FTMP, rhs, lhs);
5108 }
5109 __ Bc1nez(FTMP, label);
5110 break;
5111 case kCondGE:
5112 if (gt_bias) {
5113 __ CmpUleS(FTMP, rhs, lhs);
5114 } else {
5115 __ CmpLeS(FTMP, rhs, lhs);
5116 }
5117 __ Bc1nez(FTMP, label);
5118 break;
5119 default:
5120 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005121 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005122 }
5123 } else {
5124 switch (cond) {
5125 case kCondEQ:
5126 __ CeqS(0, lhs, rhs);
5127 __ Bc1t(0, label);
5128 break;
5129 case kCondNE:
5130 __ CeqS(0, lhs, rhs);
5131 __ Bc1f(0, label);
5132 break;
5133 case kCondLT:
5134 if (gt_bias) {
5135 __ ColtS(0, lhs, rhs);
5136 } else {
5137 __ CultS(0, lhs, rhs);
5138 }
5139 __ Bc1t(0, label);
5140 break;
5141 case kCondLE:
5142 if (gt_bias) {
5143 __ ColeS(0, lhs, rhs);
5144 } else {
5145 __ CuleS(0, lhs, rhs);
5146 }
5147 __ Bc1t(0, label);
5148 break;
5149 case kCondGT:
5150 if (gt_bias) {
5151 __ CultS(0, rhs, lhs);
5152 } else {
5153 __ ColtS(0, rhs, lhs);
5154 }
5155 __ Bc1t(0, label);
5156 break;
5157 case kCondGE:
5158 if (gt_bias) {
5159 __ CuleS(0, rhs, lhs);
5160 } else {
5161 __ ColeS(0, rhs, lhs);
5162 }
5163 __ Bc1t(0, label);
5164 break;
5165 default:
5166 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005167 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005168 }
5169 }
5170 } else {
5171 DCHECK_EQ(type, Primitive::kPrimDouble);
5172 if (isR6) {
5173 switch (cond) {
5174 case kCondEQ:
5175 __ CmpEqD(FTMP, lhs, rhs);
5176 __ Bc1nez(FTMP, label);
5177 break;
5178 case kCondNE:
5179 __ CmpEqD(FTMP, lhs, rhs);
5180 __ Bc1eqz(FTMP, label);
5181 break;
5182 case kCondLT:
5183 if (gt_bias) {
5184 __ CmpLtD(FTMP, lhs, rhs);
5185 } else {
5186 __ CmpUltD(FTMP, lhs, rhs);
5187 }
5188 __ Bc1nez(FTMP, label);
5189 break;
5190 case kCondLE:
5191 if (gt_bias) {
5192 __ CmpLeD(FTMP, lhs, rhs);
5193 } else {
5194 __ CmpUleD(FTMP, lhs, rhs);
5195 }
5196 __ Bc1nez(FTMP, label);
5197 break;
5198 case kCondGT:
5199 if (gt_bias) {
5200 __ CmpUltD(FTMP, rhs, lhs);
5201 } else {
5202 __ CmpLtD(FTMP, rhs, lhs);
5203 }
5204 __ Bc1nez(FTMP, label);
5205 break;
5206 case kCondGE:
5207 if (gt_bias) {
5208 __ CmpUleD(FTMP, rhs, lhs);
5209 } else {
5210 __ CmpLeD(FTMP, rhs, lhs);
5211 }
5212 __ Bc1nez(FTMP, label);
5213 break;
5214 default:
5215 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005216 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005217 }
5218 } else {
5219 switch (cond) {
5220 case kCondEQ:
5221 __ CeqD(0, lhs, rhs);
5222 __ Bc1t(0, label);
5223 break;
5224 case kCondNE:
5225 __ CeqD(0, lhs, rhs);
5226 __ Bc1f(0, label);
5227 break;
5228 case kCondLT:
5229 if (gt_bias) {
5230 __ ColtD(0, lhs, rhs);
5231 } else {
5232 __ CultD(0, lhs, rhs);
5233 }
5234 __ Bc1t(0, label);
5235 break;
5236 case kCondLE:
5237 if (gt_bias) {
5238 __ ColeD(0, lhs, rhs);
5239 } else {
5240 __ CuleD(0, lhs, rhs);
5241 }
5242 __ Bc1t(0, label);
5243 break;
5244 case kCondGT:
5245 if (gt_bias) {
5246 __ CultD(0, rhs, lhs);
5247 } else {
5248 __ ColtD(0, rhs, lhs);
5249 }
5250 __ Bc1t(0, label);
5251 break;
5252 case kCondGE:
5253 if (gt_bias) {
5254 __ CuleD(0, rhs, lhs);
5255 } else {
5256 __ ColeD(0, rhs, lhs);
5257 }
5258 __ Bc1t(0, label);
5259 break;
5260 default:
5261 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005262 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005263 }
5264 }
5265 }
5266}
5267
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005268void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005269 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005270 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005271 MipsLabel* false_target) {
5272 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005273
David Brazdil0debae72015-11-12 18:37:00 +00005274 if (true_target == nullptr && false_target == nullptr) {
5275 // Nothing to do. The code always falls through.
5276 return;
5277 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005278 // Constant condition, statically compared against "true" (integer value 1).
5279 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005280 if (true_target != nullptr) {
5281 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005282 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005283 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005284 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005285 if (false_target != nullptr) {
5286 __ B(false_target);
5287 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005288 }
David Brazdil0debae72015-11-12 18:37:00 +00005289 return;
5290 }
5291
5292 // The following code generates these patterns:
5293 // (1) true_target == nullptr && false_target != nullptr
5294 // - opposite condition true => branch to false_target
5295 // (2) true_target != nullptr && false_target == nullptr
5296 // - condition true => branch to true_target
5297 // (3) true_target != nullptr && false_target != nullptr
5298 // - condition true => branch to true_target
5299 // - branch to false_target
5300 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005301 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005302 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005303 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005304 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005305 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5306 } else {
5307 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5308 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005309 } else {
5310 // The condition instruction has not been materialized, use its inputs as
5311 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005312 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005313 Primitive::Type type = condition->InputAt(0)->GetType();
5314 LocationSummary* locations = cond->GetLocations();
5315 IfCondition if_cond = condition->GetCondition();
5316 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005317
David Brazdil0debae72015-11-12 18:37:00 +00005318 if (true_target == nullptr) {
5319 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005320 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005321 }
5322
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005323 switch (type) {
5324 default:
5325 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5326 break;
5327 case Primitive::kPrimLong:
5328 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5329 break;
5330 case Primitive::kPrimFloat:
5331 case Primitive::kPrimDouble:
5332 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5333 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005334 }
5335 }
David Brazdil0debae72015-11-12 18:37:00 +00005336
5337 // If neither branch falls through (case 3), the conditional branch to `true_target`
5338 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5339 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005340 __ B(false_target);
5341 }
5342}
5343
5344void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5345 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005346 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005347 locations->SetInAt(0, Location::RequiresRegister());
5348 }
5349}
5350
5351void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005352 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5353 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5354 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5355 nullptr : codegen_->GetLabelOf(true_successor);
5356 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5357 nullptr : codegen_->GetLabelOf(false_successor);
5358 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005359}
5360
5361void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5362 LocationSummary* locations = new (GetGraph()->GetArena())
5363 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005364 InvokeRuntimeCallingConvention calling_convention;
5365 RegisterSet caller_saves = RegisterSet::Empty();
5366 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5367 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005368 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005369 locations->SetInAt(0, Location::RequiresRegister());
5370 }
5371}
5372
5373void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005374 SlowPathCodeMIPS* slow_path =
5375 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005376 GenerateTestAndBranch(deoptimize,
5377 /* condition_input_index */ 0,
5378 slow_path->GetEntryLabel(),
5379 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005380}
5381
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005382// This function returns true if a conditional move can be generated for HSelect.
5383// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5384// branches and regular moves.
5385//
5386// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5387//
5388// While determining feasibility of a conditional move and setting inputs/outputs
5389// are two distinct tasks, this function does both because they share quite a bit
5390// of common logic.
5391static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5392 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5393 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5394 HCondition* condition = cond->AsCondition();
5395
5396 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5397 Primitive::Type dst_type = select->GetType();
5398
5399 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5400 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5401 bool is_true_value_zero_constant =
5402 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5403 bool is_false_value_zero_constant =
5404 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5405
5406 bool can_move_conditionally = false;
5407 bool use_const_for_false_in = false;
5408 bool use_const_for_true_in = false;
5409
5410 if (!cond->IsConstant()) {
5411 switch (cond_type) {
5412 default:
5413 switch (dst_type) {
5414 default:
5415 // Moving int on int condition.
5416 if (is_r6) {
5417 if (is_true_value_zero_constant) {
5418 // seleqz out_reg, false_reg, cond_reg
5419 can_move_conditionally = true;
5420 use_const_for_true_in = true;
5421 } else if (is_false_value_zero_constant) {
5422 // selnez out_reg, true_reg, cond_reg
5423 can_move_conditionally = true;
5424 use_const_for_false_in = true;
5425 } else if (materialized) {
5426 // Not materializing unmaterialized int conditions
5427 // to keep the instruction count low.
5428 // selnez AT, true_reg, cond_reg
5429 // seleqz TMP, false_reg, cond_reg
5430 // or out_reg, AT, TMP
5431 can_move_conditionally = true;
5432 }
5433 } else {
5434 // movn out_reg, true_reg/ZERO, cond_reg
5435 can_move_conditionally = true;
5436 use_const_for_true_in = is_true_value_zero_constant;
5437 }
5438 break;
5439 case Primitive::kPrimLong:
5440 // Moving long on int condition.
5441 if (is_r6) {
5442 if (is_true_value_zero_constant) {
5443 // seleqz out_reg_lo, false_reg_lo, cond_reg
5444 // seleqz out_reg_hi, false_reg_hi, cond_reg
5445 can_move_conditionally = true;
5446 use_const_for_true_in = true;
5447 } else if (is_false_value_zero_constant) {
5448 // selnez out_reg_lo, true_reg_lo, cond_reg
5449 // selnez out_reg_hi, true_reg_hi, cond_reg
5450 can_move_conditionally = true;
5451 use_const_for_false_in = true;
5452 }
5453 // Other long conditional moves would generate 6+ instructions,
5454 // which is too many.
5455 } else {
5456 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5457 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5458 can_move_conditionally = true;
5459 use_const_for_true_in = is_true_value_zero_constant;
5460 }
5461 break;
5462 case Primitive::kPrimFloat:
5463 case Primitive::kPrimDouble:
5464 // Moving float/double on int condition.
5465 if (is_r6) {
5466 if (materialized) {
5467 // Not materializing unmaterialized int conditions
5468 // to keep the instruction count low.
5469 can_move_conditionally = true;
5470 if (is_true_value_zero_constant) {
5471 // sltu TMP, ZERO, cond_reg
5472 // mtc1 TMP, temp_cond_reg
5473 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5474 use_const_for_true_in = true;
5475 } else if (is_false_value_zero_constant) {
5476 // sltu TMP, ZERO, cond_reg
5477 // mtc1 TMP, temp_cond_reg
5478 // selnez.fmt out_reg, true_reg, temp_cond_reg
5479 use_const_for_false_in = true;
5480 } else {
5481 // sltu TMP, ZERO, cond_reg
5482 // mtc1 TMP, temp_cond_reg
5483 // sel.fmt temp_cond_reg, false_reg, true_reg
5484 // mov.fmt out_reg, temp_cond_reg
5485 }
5486 }
5487 } else {
5488 // movn.fmt out_reg, true_reg, cond_reg
5489 can_move_conditionally = true;
5490 }
5491 break;
5492 }
5493 break;
5494 case Primitive::kPrimLong:
5495 // We don't materialize long comparison now
5496 // and use conditional branches instead.
5497 break;
5498 case Primitive::kPrimFloat:
5499 case Primitive::kPrimDouble:
5500 switch (dst_type) {
5501 default:
5502 // Moving int on float/double condition.
5503 if (is_r6) {
5504 if (is_true_value_zero_constant) {
5505 // mfc1 TMP, temp_cond_reg
5506 // seleqz out_reg, false_reg, TMP
5507 can_move_conditionally = true;
5508 use_const_for_true_in = true;
5509 } else if (is_false_value_zero_constant) {
5510 // mfc1 TMP, temp_cond_reg
5511 // selnez out_reg, true_reg, TMP
5512 can_move_conditionally = true;
5513 use_const_for_false_in = true;
5514 } else {
5515 // mfc1 TMP, temp_cond_reg
5516 // selnez AT, true_reg, TMP
5517 // seleqz TMP, false_reg, TMP
5518 // or out_reg, AT, TMP
5519 can_move_conditionally = true;
5520 }
5521 } else {
5522 // movt out_reg, true_reg/ZERO, cc
5523 can_move_conditionally = true;
5524 use_const_for_true_in = is_true_value_zero_constant;
5525 }
5526 break;
5527 case Primitive::kPrimLong:
5528 // Moving long on float/double condition.
5529 if (is_r6) {
5530 if (is_true_value_zero_constant) {
5531 // mfc1 TMP, temp_cond_reg
5532 // seleqz out_reg_lo, false_reg_lo, TMP
5533 // seleqz out_reg_hi, false_reg_hi, TMP
5534 can_move_conditionally = true;
5535 use_const_for_true_in = true;
5536 } else if (is_false_value_zero_constant) {
5537 // mfc1 TMP, temp_cond_reg
5538 // selnez out_reg_lo, true_reg_lo, TMP
5539 // selnez out_reg_hi, true_reg_hi, TMP
5540 can_move_conditionally = true;
5541 use_const_for_false_in = true;
5542 }
5543 // Other long conditional moves would generate 6+ instructions,
5544 // which is too many.
5545 } else {
5546 // movt out_reg_lo, true_reg_lo/ZERO, cc
5547 // movt out_reg_hi, true_reg_hi/ZERO, cc
5548 can_move_conditionally = true;
5549 use_const_for_true_in = is_true_value_zero_constant;
5550 }
5551 break;
5552 case Primitive::kPrimFloat:
5553 case Primitive::kPrimDouble:
5554 // Moving float/double on float/double condition.
5555 if (is_r6) {
5556 can_move_conditionally = true;
5557 if (is_true_value_zero_constant) {
5558 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5559 use_const_for_true_in = true;
5560 } else if (is_false_value_zero_constant) {
5561 // selnez.fmt out_reg, true_reg, temp_cond_reg
5562 use_const_for_false_in = true;
5563 } else {
5564 // sel.fmt temp_cond_reg, false_reg, true_reg
5565 // mov.fmt out_reg, temp_cond_reg
5566 }
5567 } else {
5568 // movt.fmt out_reg, true_reg, cc
5569 can_move_conditionally = true;
5570 }
5571 break;
5572 }
5573 break;
5574 }
5575 }
5576
5577 if (can_move_conditionally) {
5578 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5579 } else {
5580 DCHECK(!use_const_for_false_in);
5581 DCHECK(!use_const_for_true_in);
5582 }
5583
5584 if (locations_to_set != nullptr) {
5585 if (use_const_for_false_in) {
5586 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5587 } else {
5588 locations_to_set->SetInAt(0,
5589 Primitive::IsFloatingPointType(dst_type)
5590 ? Location::RequiresFpuRegister()
5591 : Location::RequiresRegister());
5592 }
5593 if (use_const_for_true_in) {
5594 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5595 } else {
5596 locations_to_set->SetInAt(1,
5597 Primitive::IsFloatingPointType(dst_type)
5598 ? Location::RequiresFpuRegister()
5599 : Location::RequiresRegister());
5600 }
5601 if (materialized) {
5602 locations_to_set->SetInAt(2, Location::RequiresRegister());
5603 }
5604 // On R6 we don't require the output to be the same as the
5605 // first input for conditional moves unlike on R2.
5606 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5607 if (is_out_same_as_first_in) {
5608 locations_to_set->SetOut(Location::SameAsFirstInput());
5609 } else {
5610 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5611 ? Location::RequiresFpuRegister()
5612 : Location::RequiresRegister());
5613 }
5614 }
5615
5616 return can_move_conditionally;
5617}
5618
5619void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5620 LocationSummary* locations = select->GetLocations();
5621 Location dst = locations->Out();
5622 Location src = locations->InAt(1);
5623 Register src_reg = ZERO;
5624 Register src_reg_high = ZERO;
5625 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5626 Register cond_reg = TMP;
5627 int cond_cc = 0;
5628 Primitive::Type cond_type = Primitive::kPrimInt;
5629 bool cond_inverted = false;
5630 Primitive::Type dst_type = select->GetType();
5631
5632 if (IsBooleanValueOrMaterializedCondition(cond)) {
5633 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5634 } else {
5635 HCondition* condition = cond->AsCondition();
5636 LocationSummary* cond_locations = cond->GetLocations();
5637 IfCondition if_cond = condition->GetCondition();
5638 cond_type = condition->InputAt(0)->GetType();
5639 switch (cond_type) {
5640 default:
5641 DCHECK_NE(cond_type, Primitive::kPrimLong);
5642 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5643 break;
5644 case Primitive::kPrimFloat:
5645 case Primitive::kPrimDouble:
5646 cond_inverted = MaterializeFpCompareR2(if_cond,
5647 condition->IsGtBias(),
5648 cond_type,
5649 cond_locations,
5650 cond_cc);
5651 break;
5652 }
5653 }
5654
5655 DCHECK(dst.Equals(locations->InAt(0)));
5656 if (src.IsRegister()) {
5657 src_reg = src.AsRegister<Register>();
5658 } else if (src.IsRegisterPair()) {
5659 src_reg = src.AsRegisterPairLow<Register>();
5660 src_reg_high = src.AsRegisterPairHigh<Register>();
5661 } else if (src.IsConstant()) {
5662 DCHECK(src.GetConstant()->IsZeroBitPattern());
5663 }
5664
5665 switch (cond_type) {
5666 default:
5667 switch (dst_type) {
5668 default:
5669 if (cond_inverted) {
5670 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5671 } else {
5672 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5673 }
5674 break;
5675 case Primitive::kPrimLong:
5676 if (cond_inverted) {
5677 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5678 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5679 } else {
5680 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5681 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5682 }
5683 break;
5684 case Primitive::kPrimFloat:
5685 if (cond_inverted) {
5686 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5687 } else {
5688 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5689 }
5690 break;
5691 case Primitive::kPrimDouble:
5692 if (cond_inverted) {
5693 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5694 } else {
5695 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5696 }
5697 break;
5698 }
5699 break;
5700 case Primitive::kPrimLong:
5701 LOG(FATAL) << "Unreachable";
5702 UNREACHABLE();
5703 case Primitive::kPrimFloat:
5704 case Primitive::kPrimDouble:
5705 switch (dst_type) {
5706 default:
5707 if (cond_inverted) {
5708 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5709 } else {
5710 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5711 }
5712 break;
5713 case Primitive::kPrimLong:
5714 if (cond_inverted) {
5715 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5716 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5717 } else {
5718 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5719 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5720 }
5721 break;
5722 case Primitive::kPrimFloat:
5723 if (cond_inverted) {
5724 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5725 } else {
5726 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5727 }
5728 break;
5729 case Primitive::kPrimDouble:
5730 if (cond_inverted) {
5731 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5732 } else {
5733 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5734 }
5735 break;
5736 }
5737 break;
5738 }
5739}
5740
5741void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5742 LocationSummary* locations = select->GetLocations();
5743 Location dst = locations->Out();
5744 Location false_src = locations->InAt(0);
5745 Location true_src = locations->InAt(1);
5746 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5747 Register cond_reg = TMP;
5748 FRegister fcond_reg = FTMP;
5749 Primitive::Type cond_type = Primitive::kPrimInt;
5750 bool cond_inverted = false;
5751 Primitive::Type dst_type = select->GetType();
5752
5753 if (IsBooleanValueOrMaterializedCondition(cond)) {
5754 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5755 } else {
5756 HCondition* condition = cond->AsCondition();
5757 LocationSummary* cond_locations = cond->GetLocations();
5758 IfCondition if_cond = condition->GetCondition();
5759 cond_type = condition->InputAt(0)->GetType();
5760 switch (cond_type) {
5761 default:
5762 DCHECK_NE(cond_type, Primitive::kPrimLong);
5763 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5764 break;
5765 case Primitive::kPrimFloat:
5766 case Primitive::kPrimDouble:
5767 cond_inverted = MaterializeFpCompareR6(if_cond,
5768 condition->IsGtBias(),
5769 cond_type,
5770 cond_locations,
5771 fcond_reg);
5772 break;
5773 }
5774 }
5775
5776 if (true_src.IsConstant()) {
5777 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5778 }
5779 if (false_src.IsConstant()) {
5780 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5781 }
5782
5783 switch (dst_type) {
5784 default:
5785 if (Primitive::IsFloatingPointType(cond_type)) {
5786 __ Mfc1(cond_reg, fcond_reg);
5787 }
5788 if (true_src.IsConstant()) {
5789 if (cond_inverted) {
5790 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5791 } else {
5792 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5793 }
5794 } else if (false_src.IsConstant()) {
5795 if (cond_inverted) {
5796 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5797 } else {
5798 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5799 }
5800 } else {
5801 DCHECK_NE(cond_reg, AT);
5802 if (cond_inverted) {
5803 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5804 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5805 } else {
5806 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5807 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5808 }
5809 __ Or(dst.AsRegister<Register>(), AT, TMP);
5810 }
5811 break;
5812 case Primitive::kPrimLong: {
5813 if (Primitive::IsFloatingPointType(cond_type)) {
5814 __ Mfc1(cond_reg, fcond_reg);
5815 }
5816 Register dst_lo = dst.AsRegisterPairLow<Register>();
5817 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5818 if (true_src.IsConstant()) {
5819 Register src_lo = false_src.AsRegisterPairLow<Register>();
5820 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5821 if (cond_inverted) {
5822 __ Selnez(dst_lo, src_lo, cond_reg);
5823 __ Selnez(dst_hi, src_hi, cond_reg);
5824 } else {
5825 __ Seleqz(dst_lo, src_lo, cond_reg);
5826 __ Seleqz(dst_hi, src_hi, cond_reg);
5827 }
5828 } else {
5829 DCHECK(false_src.IsConstant());
5830 Register src_lo = true_src.AsRegisterPairLow<Register>();
5831 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5832 if (cond_inverted) {
5833 __ Seleqz(dst_lo, src_lo, cond_reg);
5834 __ Seleqz(dst_hi, src_hi, cond_reg);
5835 } else {
5836 __ Selnez(dst_lo, src_lo, cond_reg);
5837 __ Selnez(dst_hi, src_hi, cond_reg);
5838 }
5839 }
5840 break;
5841 }
5842 case Primitive::kPrimFloat: {
5843 if (!Primitive::IsFloatingPointType(cond_type)) {
5844 // sel*.fmt tests bit 0 of the condition register, account for that.
5845 __ Sltu(TMP, ZERO, cond_reg);
5846 __ Mtc1(TMP, fcond_reg);
5847 }
5848 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5849 if (true_src.IsConstant()) {
5850 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5851 if (cond_inverted) {
5852 __ SelnezS(dst_reg, src_reg, fcond_reg);
5853 } else {
5854 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5855 }
5856 } else if (false_src.IsConstant()) {
5857 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5858 if (cond_inverted) {
5859 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5860 } else {
5861 __ SelnezS(dst_reg, src_reg, fcond_reg);
5862 }
5863 } else {
5864 if (cond_inverted) {
5865 __ SelS(fcond_reg,
5866 true_src.AsFpuRegister<FRegister>(),
5867 false_src.AsFpuRegister<FRegister>());
5868 } else {
5869 __ SelS(fcond_reg,
5870 false_src.AsFpuRegister<FRegister>(),
5871 true_src.AsFpuRegister<FRegister>());
5872 }
5873 __ MovS(dst_reg, fcond_reg);
5874 }
5875 break;
5876 }
5877 case Primitive::kPrimDouble: {
5878 if (!Primitive::IsFloatingPointType(cond_type)) {
5879 // sel*.fmt tests bit 0 of the condition register, account for that.
5880 __ Sltu(TMP, ZERO, cond_reg);
5881 __ Mtc1(TMP, fcond_reg);
5882 }
5883 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5884 if (true_src.IsConstant()) {
5885 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5886 if (cond_inverted) {
5887 __ SelnezD(dst_reg, src_reg, fcond_reg);
5888 } else {
5889 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5890 }
5891 } else if (false_src.IsConstant()) {
5892 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5893 if (cond_inverted) {
5894 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5895 } else {
5896 __ SelnezD(dst_reg, src_reg, fcond_reg);
5897 }
5898 } else {
5899 if (cond_inverted) {
5900 __ SelD(fcond_reg,
5901 true_src.AsFpuRegister<FRegister>(),
5902 false_src.AsFpuRegister<FRegister>());
5903 } else {
5904 __ SelD(fcond_reg,
5905 false_src.AsFpuRegister<FRegister>(),
5906 true_src.AsFpuRegister<FRegister>());
5907 }
5908 __ MovD(dst_reg, fcond_reg);
5909 }
5910 break;
5911 }
5912 }
5913}
5914
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005915void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5916 LocationSummary* locations = new (GetGraph()->GetArena())
5917 LocationSummary(flag, LocationSummary::kNoCall);
5918 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07005919}
5920
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005921void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5922 __ LoadFromOffset(kLoadWord,
5923 flag->GetLocations()->Out().AsRegister<Register>(),
5924 SP,
5925 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07005926}
5927
David Brazdil74eb1b22015-12-14 11:44:01 +00005928void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
5929 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005930 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00005931}
5932
5933void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005934 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
5935 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
5936 if (is_r6) {
5937 GenConditionalMoveR6(select);
5938 } else {
5939 GenConditionalMoveR2(select);
5940 }
5941 } else {
5942 LocationSummary* locations = select->GetLocations();
5943 MipsLabel false_target;
5944 GenerateTestAndBranch(select,
5945 /* condition_input_index */ 2,
5946 /* true_target */ nullptr,
5947 &false_target);
5948 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
5949 __ Bind(&false_target);
5950 }
David Brazdil74eb1b22015-12-14 11:44:01 +00005951}
5952
David Srbecky0cf44932015-12-09 14:09:59 +00005953void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
5954 new (GetGraph()->GetArena()) LocationSummary(info);
5955}
5956
David Srbeckyd28f4a02016-03-14 17:14:24 +00005957void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
5958 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00005959}
5960
5961void CodeGeneratorMIPS::GenerateNop() {
5962 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00005963}
5964
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005965void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
5966 Primitive::Type field_type = field_info.GetFieldType();
5967 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
5968 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08005969 bool object_field_get_with_read_barrier =
5970 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005971 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08005972 instruction,
5973 generate_volatile
5974 ? LocationSummary::kCallOnMainOnly
5975 : (object_field_get_with_read_barrier
5976 ? LocationSummary::kCallOnSlowPath
5977 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005978
Alexey Frunzec61c0762017-04-10 13:54:23 -07005979 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
5980 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5981 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005982 locations->SetInAt(0, Location::RequiresRegister());
5983 if (generate_volatile) {
5984 InvokeRuntimeCallingConvention calling_convention;
5985 // need A0 to hold base + offset
5986 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5987 if (field_type == Primitive::kPrimLong) {
5988 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
5989 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02005990 // Use Location::Any() to prevent situations when running out of available fp registers.
5991 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005992 // Need some temp core regs since FP results are returned in core registers
5993 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
5994 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
5995 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
5996 }
5997 } else {
5998 if (Primitive::IsFloatingPointType(instruction->GetType())) {
5999 locations->SetOut(Location::RequiresFpuRegister());
6000 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006001 // The output overlaps in the case of an object field get with
6002 // read barriers enabled: we do not want the move to overwrite the
6003 // object's location, as we need it to emit the read barrier.
6004 locations->SetOut(Location::RequiresRegister(),
6005 object_field_get_with_read_barrier
6006 ? Location::kOutputOverlap
6007 : Location::kNoOutputOverlap);
6008 }
6009 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6010 // We need a temporary register for the read barrier marking slow
6011 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
6012 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006013 }
6014 }
6015}
6016
6017void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6018 const FieldInfo& field_info,
6019 uint32_t dex_pc) {
6020 Primitive::Type type = field_info.GetFieldType();
6021 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006022 Location obj_loc = locations->InAt(0);
6023 Register obj = obj_loc.AsRegister<Register>();
6024 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006025 LoadOperandType load_type = kLoadUnsignedByte;
6026 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006027 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006028 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006029
6030 switch (type) {
6031 case Primitive::kPrimBoolean:
6032 load_type = kLoadUnsignedByte;
6033 break;
6034 case Primitive::kPrimByte:
6035 load_type = kLoadSignedByte;
6036 break;
6037 case Primitive::kPrimShort:
6038 load_type = kLoadSignedHalfword;
6039 break;
6040 case Primitive::kPrimChar:
6041 load_type = kLoadUnsignedHalfword;
6042 break;
6043 case Primitive::kPrimInt:
6044 case Primitive::kPrimFloat:
6045 case Primitive::kPrimNot:
6046 load_type = kLoadWord;
6047 break;
6048 case Primitive::kPrimLong:
6049 case Primitive::kPrimDouble:
6050 load_type = kLoadDoubleword;
6051 break;
6052 case Primitive::kPrimVoid:
6053 LOG(FATAL) << "Unreachable type " << type;
6054 UNREACHABLE();
6055 }
6056
6057 if (is_volatile && load_type == kLoadDoubleword) {
6058 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006059 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006060 // Do implicit Null check
6061 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6062 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006063 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006064 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6065 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006066 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006067 if (dst_loc.IsFpuRegister()) {
6068 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006069 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006070 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006071 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006072 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006073 __ StoreToOffset(kStoreWord,
6074 locations->GetTemp(1).AsRegister<Register>(),
6075 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006076 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006077 __ StoreToOffset(kStoreWord,
6078 locations->GetTemp(2).AsRegister<Register>(),
6079 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006080 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006081 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006082 }
6083 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006084 if (type == Primitive::kPrimNot) {
6085 // /* HeapReference<Object> */ dst = *(obj + offset)
6086 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
6087 Location temp_loc = locations->GetTemp(0);
6088 // Note that a potential implicit null check is handled in this
6089 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6090 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6091 dst_loc,
6092 obj,
6093 offset,
6094 temp_loc,
6095 /* needs_null_check */ true);
6096 if (is_volatile) {
6097 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6098 }
6099 } else {
6100 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6101 if (is_volatile) {
6102 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6103 }
6104 // If read barriers are enabled, emit read barriers other than
6105 // Baker's using a slow path (and also unpoison the loaded
6106 // reference, if heap poisoning is enabled).
6107 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6108 }
6109 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006110 Register dst;
6111 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006112 DCHECK(dst_loc.IsRegisterPair());
6113 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006114 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006115 DCHECK(dst_loc.IsRegister());
6116 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006117 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006118 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006119 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006120 DCHECK(dst_loc.IsFpuRegister());
6121 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006122 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006123 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006124 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006125 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006126 }
6127 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006128 }
6129
Alexey Frunze15958152017-02-09 19:08:30 -08006130 // Memory barriers, in the case of references, are handled in the
6131 // previous switch statement.
6132 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006133 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6134 }
6135}
6136
6137void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6138 Primitive::Type field_type = field_info.GetFieldType();
6139 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6140 bool generate_volatile = field_info.IsVolatile() && is_wide;
6141 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006142 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006143
6144 locations->SetInAt(0, Location::RequiresRegister());
6145 if (generate_volatile) {
6146 InvokeRuntimeCallingConvention calling_convention;
6147 // need A0 to hold base + offset
6148 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6149 if (field_type == Primitive::kPrimLong) {
6150 locations->SetInAt(1, Location::RegisterPairLocation(
6151 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6152 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006153 // Use Location::Any() to prevent situations when running out of available fp registers.
6154 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006155 // Pass FP parameters in core registers.
6156 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6157 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6158 }
6159 } else {
6160 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006161 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006162 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006163 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006164 }
6165 }
6166}
6167
6168void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6169 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006170 uint32_t dex_pc,
6171 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006172 Primitive::Type type = field_info.GetFieldType();
6173 LocationSummary* locations = instruction->GetLocations();
6174 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006175 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006176 StoreOperandType store_type = kStoreByte;
6177 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006178 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006179 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006180 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006181
6182 switch (type) {
6183 case Primitive::kPrimBoolean:
6184 case Primitive::kPrimByte:
6185 store_type = kStoreByte;
6186 break;
6187 case Primitive::kPrimShort:
6188 case Primitive::kPrimChar:
6189 store_type = kStoreHalfword;
6190 break;
6191 case Primitive::kPrimInt:
6192 case Primitive::kPrimFloat:
6193 case Primitive::kPrimNot:
6194 store_type = kStoreWord;
6195 break;
6196 case Primitive::kPrimLong:
6197 case Primitive::kPrimDouble:
6198 store_type = kStoreDoubleword;
6199 break;
6200 case Primitive::kPrimVoid:
6201 LOG(FATAL) << "Unreachable type " << type;
6202 UNREACHABLE();
6203 }
6204
6205 if (is_volatile) {
6206 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6207 }
6208
6209 if (is_volatile && store_type == kStoreDoubleword) {
6210 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006211 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006212 // Do implicit Null check.
6213 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6214 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6215 if (type == Primitive::kPrimDouble) {
6216 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006217 if (value_location.IsFpuRegister()) {
6218 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6219 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006220 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006221 value_location.AsFpuRegister<FRegister>());
6222 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006223 __ LoadFromOffset(kLoadWord,
6224 locations->GetTemp(1).AsRegister<Register>(),
6225 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006226 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006227 __ LoadFromOffset(kLoadWord,
6228 locations->GetTemp(2).AsRegister<Register>(),
6229 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006230 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006231 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006232 DCHECK(value_location.IsConstant());
6233 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6234 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006235 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6236 locations->GetTemp(1).AsRegister<Register>(),
6237 value);
6238 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006239 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006240 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006241 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6242 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006243 if (value_location.IsConstant()) {
6244 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6245 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6246 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006247 Register src;
6248 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006249 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006250 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006251 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006252 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006253 if (kPoisonHeapReferences && needs_write_barrier) {
6254 // Note that in the case where `value` is a null reference,
6255 // we do not enter this block, as a null reference does not
6256 // need poisoning.
6257 DCHECK_EQ(type, Primitive::kPrimNot);
6258 __ PoisonHeapReference(TMP, src);
6259 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6260 } else {
6261 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6262 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006263 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006264 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006265 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006266 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006267 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006268 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006269 }
6270 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006271 }
6272
Alexey Frunzec061de12017-02-14 13:27:23 -08006273 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006274 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006275 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006276 }
6277
6278 if (is_volatile) {
6279 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6280 }
6281}
6282
6283void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6284 HandleFieldGet(instruction, instruction->GetFieldInfo());
6285}
6286
6287void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6288 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6289}
6290
6291void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6292 HandleFieldSet(instruction, instruction->GetFieldInfo());
6293}
6294
6295void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006296 HandleFieldSet(instruction,
6297 instruction->GetFieldInfo(),
6298 instruction->GetDexPc(),
6299 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006300}
6301
Alexey Frunze15958152017-02-09 19:08:30 -08006302void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6303 HInstruction* instruction,
6304 Location out,
6305 uint32_t offset,
6306 Location maybe_temp,
6307 ReadBarrierOption read_barrier_option) {
6308 Register out_reg = out.AsRegister<Register>();
6309 if (read_barrier_option == kWithReadBarrier) {
6310 CHECK(kEmitCompilerReadBarrier);
6311 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6312 if (kUseBakerReadBarrier) {
6313 // Load with fast path based Baker's read barrier.
6314 // /* HeapReference<Object> */ out = *(out + offset)
6315 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6316 out,
6317 out_reg,
6318 offset,
6319 maybe_temp,
6320 /* needs_null_check */ false);
6321 } else {
6322 // Load with slow path based read barrier.
6323 // Save the value of `out` into `maybe_temp` before overwriting it
6324 // in the following move operation, as we will need it for the
6325 // read barrier below.
6326 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6327 // /* HeapReference<Object> */ out = *(out + offset)
6328 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6329 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6330 }
6331 } else {
6332 // Plain load with no read barrier.
6333 // /* HeapReference<Object> */ out = *(out + offset)
6334 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6335 __ MaybeUnpoisonHeapReference(out_reg);
6336 }
6337}
6338
6339void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6340 HInstruction* instruction,
6341 Location out,
6342 Location obj,
6343 uint32_t offset,
6344 Location maybe_temp,
6345 ReadBarrierOption read_barrier_option) {
6346 Register out_reg = out.AsRegister<Register>();
6347 Register obj_reg = obj.AsRegister<Register>();
6348 if (read_barrier_option == kWithReadBarrier) {
6349 CHECK(kEmitCompilerReadBarrier);
6350 if (kUseBakerReadBarrier) {
6351 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6352 // Load with fast path based Baker's read barrier.
6353 // /* HeapReference<Object> */ out = *(obj + offset)
6354 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6355 out,
6356 obj_reg,
6357 offset,
6358 maybe_temp,
6359 /* needs_null_check */ false);
6360 } else {
6361 // Load with slow path based read barrier.
6362 // /* HeapReference<Object> */ out = *(obj + offset)
6363 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6364 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6365 }
6366 } else {
6367 // Plain load with no read barrier.
6368 // /* HeapReference<Object> */ out = *(obj + offset)
6369 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6370 __ MaybeUnpoisonHeapReference(out_reg);
6371 }
6372}
6373
6374void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6375 Location root,
6376 Register obj,
6377 uint32_t offset,
6378 ReadBarrierOption read_barrier_option) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07006379 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006380 if (read_barrier_option == kWithReadBarrier) {
6381 DCHECK(kEmitCompilerReadBarrier);
6382 if (kUseBakerReadBarrier) {
6383 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6384 // Baker's read barrier are used:
6385 //
6386 // root = obj.field;
6387 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6388 // if (temp != null) {
6389 // root = temp(root)
6390 // }
6391
6392 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6393 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6394 static_assert(
6395 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6396 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6397 "have different sizes.");
6398 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6399 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6400 "have different sizes.");
6401
6402 // Slow path marking the GC root `root`.
6403 Location temp = Location::RegisterLocation(T9);
6404 SlowPathCodeMIPS* slow_path =
6405 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6406 instruction,
6407 root,
6408 /*entrypoint*/ temp);
6409 codegen_->AddSlowPath(slow_path);
6410
6411 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6412 const int32_t entry_point_offset =
6413 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6414 // Loading the entrypoint does not require a load acquire since it is only changed when
6415 // threads are suspended or running a checkpoint.
6416 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6417 // The entrypoint is null when the GC is not marking, this prevents one load compared to
6418 // checking GetIsGcMarking.
6419 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6420 __ Bind(slow_path->GetExitLabel());
6421 } else {
6422 // GC root loaded through a slow path for read barriers other
6423 // than Baker's.
6424 // /* GcRoot<mirror::Object>* */ root = obj + offset
6425 __ Addiu32(root_reg, obj, offset);
6426 // /* mirror::Object* */ root = root->Read()
6427 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6428 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006429 } else {
6430 // Plain GC root load with no read barrier.
6431 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6432 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6433 // Note that GC roots are not affected by heap poisoning, thus we
6434 // do not have to unpoison `root_reg` here.
6435 }
6436}
6437
Alexey Frunze15958152017-02-09 19:08:30 -08006438void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6439 Location ref,
6440 Register obj,
6441 uint32_t offset,
6442 Location temp,
6443 bool needs_null_check) {
6444 DCHECK(kEmitCompilerReadBarrier);
6445 DCHECK(kUseBakerReadBarrier);
6446
6447 // /* HeapReference<Object> */ ref = *(obj + offset)
6448 Location no_index = Location::NoLocation();
6449 ScaleFactor no_scale_factor = TIMES_1;
6450 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6451 ref,
6452 obj,
6453 offset,
6454 no_index,
6455 no_scale_factor,
6456 temp,
6457 needs_null_check);
6458}
6459
6460void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6461 Location ref,
6462 Register obj,
6463 uint32_t data_offset,
6464 Location index,
6465 Location temp,
6466 bool needs_null_check) {
6467 DCHECK(kEmitCompilerReadBarrier);
6468 DCHECK(kUseBakerReadBarrier);
6469
6470 static_assert(
6471 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6472 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6473 // /* HeapReference<Object> */ ref =
6474 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6475 ScaleFactor scale_factor = TIMES_4;
6476 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6477 ref,
6478 obj,
6479 data_offset,
6480 index,
6481 scale_factor,
6482 temp,
6483 needs_null_check);
6484}
6485
6486void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6487 Location ref,
6488 Register obj,
6489 uint32_t offset,
6490 Location index,
6491 ScaleFactor scale_factor,
6492 Location temp,
6493 bool needs_null_check,
6494 bool always_update_field) {
6495 DCHECK(kEmitCompilerReadBarrier);
6496 DCHECK(kUseBakerReadBarrier);
6497
6498 // In slow path based read barriers, the read barrier call is
6499 // inserted after the original load. However, in fast path based
6500 // Baker's read barriers, we need to perform the load of
6501 // mirror::Object::monitor_ *before* the original reference load.
6502 // This load-load ordering is required by the read barrier.
6503 // The fast path/slow path (for Baker's algorithm) should look like:
6504 //
6505 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6506 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6507 // HeapReference<Object> ref = *src; // Original reference load.
6508 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6509 // if (is_gray) {
6510 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6511 // }
6512 //
6513 // Note: the original implementation in ReadBarrier::Barrier is
6514 // slightly more complex as it performs additional checks that we do
6515 // not do here for performance reasons.
6516
6517 Register ref_reg = ref.AsRegister<Register>();
6518 Register temp_reg = temp.AsRegister<Register>();
6519 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6520
6521 // /* int32_t */ monitor = obj->monitor_
6522 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6523 if (needs_null_check) {
6524 MaybeRecordImplicitNullCheck(instruction);
6525 }
6526 // /* LockWord */ lock_word = LockWord(monitor)
6527 static_assert(sizeof(LockWord) == sizeof(int32_t),
6528 "art::LockWord and int32_t have different sizes.");
6529
6530 __ Sync(0); // Barrier to prevent load-load reordering.
6531
6532 // The actual reference load.
6533 if (index.IsValid()) {
6534 // Load types involving an "index": ArrayGet,
6535 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6536 // intrinsics.
6537 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6538 if (index.IsConstant()) {
6539 size_t computed_offset =
6540 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6541 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6542 } else {
6543 // Handle the special case of the
6544 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6545 // intrinsics, which use a register pair as index ("long
6546 // offset"), of which only the low part contains data.
6547 Register index_reg = index.IsRegisterPair()
6548 ? index.AsRegisterPairLow<Register>()
6549 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006550 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006551 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6552 }
6553 } else {
6554 // /* HeapReference<Object> */ ref = *(obj + offset)
6555 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6556 }
6557
6558 // Object* ref = ref_addr->AsMirrorPtr()
6559 __ MaybeUnpoisonHeapReference(ref_reg);
6560
6561 // Slow path marking the object `ref` when it is gray.
6562 SlowPathCodeMIPS* slow_path;
6563 if (always_update_field) {
6564 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6565 // of the form `obj + field_offset`, where `obj` is a register and
6566 // `field_offset` is a register pair (of which only the lower half
6567 // is used). Thus `offset` and `scale_factor` above are expected
6568 // to be null in this code path.
6569 DCHECK_EQ(offset, 0u);
6570 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6571 slow_path = new (GetGraph()->GetArena())
6572 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6573 ref,
6574 obj,
6575 /* field_offset */ index,
6576 temp_reg);
6577 } else {
6578 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6579 }
6580 AddSlowPath(slow_path);
6581
6582 // if (rb_state == ReadBarrier::GrayState())
6583 // ref = ReadBarrier::Mark(ref);
6584 // Given the numeric representation, it's enough to check the low bit of the
6585 // rb_state. We do that by shifting the bit into the sign bit (31) and
6586 // performing a branch on less than zero.
6587 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6588 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6589 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6590 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6591 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6592 __ Bind(slow_path->GetExitLabel());
6593}
6594
6595void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6596 Location out,
6597 Location ref,
6598 Location obj,
6599 uint32_t offset,
6600 Location index) {
6601 DCHECK(kEmitCompilerReadBarrier);
6602
6603 // Insert a slow path based read barrier *after* the reference load.
6604 //
6605 // If heap poisoning is enabled, the unpoisoning of the loaded
6606 // reference will be carried out by the runtime within the slow
6607 // path.
6608 //
6609 // Note that `ref` currently does not get unpoisoned (when heap
6610 // poisoning is enabled), which is alright as the `ref` argument is
6611 // not used by the artReadBarrierSlow entry point.
6612 //
6613 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6614 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6615 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6616 AddSlowPath(slow_path);
6617
6618 __ B(slow_path->GetEntryLabel());
6619 __ Bind(slow_path->GetExitLabel());
6620}
6621
6622void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6623 Location out,
6624 Location ref,
6625 Location obj,
6626 uint32_t offset,
6627 Location index) {
6628 if (kEmitCompilerReadBarrier) {
6629 // Baker's read barriers shall be handled by the fast path
6630 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
6631 DCHECK(!kUseBakerReadBarrier);
6632 // If heap poisoning is enabled, unpoisoning will be taken care of
6633 // by the runtime within the slow path.
6634 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
6635 } else if (kPoisonHeapReferences) {
6636 __ UnpoisonHeapReference(out.AsRegister<Register>());
6637 }
6638}
6639
6640void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6641 Location out,
6642 Location root) {
6643 DCHECK(kEmitCompilerReadBarrier);
6644
6645 // Insert a slow path based read barrier *after* the GC root load.
6646 //
6647 // Note that GC roots are not affected by heap poisoning, so we do
6648 // not need to do anything special for this here.
6649 SlowPathCodeMIPS* slow_path =
6650 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
6651 AddSlowPath(slow_path);
6652
6653 __ B(slow_path->GetEntryLabel());
6654 __ Bind(slow_path->GetExitLabel());
6655}
6656
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006657void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006658 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
6659 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07006660 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006661 switch (type_check_kind) {
6662 case TypeCheckKind::kExactCheck:
6663 case TypeCheckKind::kAbstractClassCheck:
6664 case TypeCheckKind::kClassHierarchyCheck:
6665 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08006666 call_kind =
6667 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006668 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006669 break;
6670 case TypeCheckKind::kArrayCheck:
6671 case TypeCheckKind::kUnresolvedCheck:
6672 case TypeCheckKind::kInterfaceCheck:
6673 call_kind = LocationSummary::kCallOnSlowPath;
6674 break;
6675 }
6676
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006677 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006678 if (baker_read_barrier_slow_path) {
6679 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6680 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006681 locations->SetInAt(0, Location::RequiresRegister());
6682 locations->SetInAt(1, Location::RequiresRegister());
6683 // The output does overlap inputs.
6684 // Note that TypeCheckSlowPathMIPS uses this register too.
6685 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08006686 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006687}
6688
6689void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006690 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006691 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006692 Location obj_loc = locations->InAt(0);
6693 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006694 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006695 Location out_loc = locations->Out();
6696 Register out = out_loc.AsRegister<Register>();
6697 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6698 DCHECK_LE(num_temps, 1u);
6699 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006700 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6701 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6702 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6703 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006704 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006705 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006706
6707 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006708 // Avoid this check if we know `obj` is not null.
6709 if (instruction->MustDoNullCheck()) {
6710 __ Move(out, ZERO);
6711 __ Beqz(obj, &done);
6712 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006713
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006714 switch (type_check_kind) {
6715 case TypeCheckKind::kExactCheck: {
6716 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006717 GenerateReferenceLoadTwoRegisters(instruction,
6718 out_loc,
6719 obj_loc,
6720 class_offset,
6721 maybe_temp_loc,
6722 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006723 // Classes must be equal for the instanceof to succeed.
6724 __ Xor(out, out, cls);
6725 __ Sltiu(out, out, 1);
6726 break;
6727 }
6728
6729 case TypeCheckKind::kAbstractClassCheck: {
6730 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006731 GenerateReferenceLoadTwoRegisters(instruction,
6732 out_loc,
6733 obj_loc,
6734 class_offset,
6735 maybe_temp_loc,
6736 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006737 // If the class is abstract, we eagerly fetch the super class of the
6738 // object to avoid doing a comparison we know will fail.
6739 MipsLabel loop;
6740 __ Bind(&loop);
6741 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006742 GenerateReferenceLoadOneRegister(instruction,
6743 out_loc,
6744 super_offset,
6745 maybe_temp_loc,
6746 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006747 // If `out` is null, we use it for the result, and jump to `done`.
6748 __ Beqz(out, &done);
6749 __ Bne(out, cls, &loop);
6750 __ LoadConst32(out, 1);
6751 break;
6752 }
6753
6754 case TypeCheckKind::kClassHierarchyCheck: {
6755 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006756 GenerateReferenceLoadTwoRegisters(instruction,
6757 out_loc,
6758 obj_loc,
6759 class_offset,
6760 maybe_temp_loc,
6761 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006762 // Walk over the class hierarchy to find a match.
6763 MipsLabel loop, success;
6764 __ Bind(&loop);
6765 __ Beq(out, cls, &success);
6766 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006767 GenerateReferenceLoadOneRegister(instruction,
6768 out_loc,
6769 super_offset,
6770 maybe_temp_loc,
6771 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006772 __ Bnez(out, &loop);
6773 // If `out` is null, we use it for the result, and jump to `done`.
6774 __ B(&done);
6775 __ Bind(&success);
6776 __ LoadConst32(out, 1);
6777 break;
6778 }
6779
6780 case TypeCheckKind::kArrayObjectCheck: {
6781 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006782 GenerateReferenceLoadTwoRegisters(instruction,
6783 out_loc,
6784 obj_loc,
6785 class_offset,
6786 maybe_temp_loc,
6787 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006788 // Do an exact check.
6789 MipsLabel success;
6790 __ Beq(out, cls, &success);
6791 // Otherwise, we need to check that the object's class is a non-primitive array.
6792 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08006793 GenerateReferenceLoadOneRegister(instruction,
6794 out_loc,
6795 component_offset,
6796 maybe_temp_loc,
6797 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006798 // If `out` is null, we use it for the result, and jump to `done`.
6799 __ Beqz(out, &done);
6800 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
6801 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
6802 __ Sltiu(out, out, 1);
6803 __ B(&done);
6804 __ Bind(&success);
6805 __ LoadConst32(out, 1);
6806 break;
6807 }
6808
6809 case TypeCheckKind::kArrayCheck: {
6810 // No read barrier since the slow path will retry upon failure.
6811 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006812 GenerateReferenceLoadTwoRegisters(instruction,
6813 out_loc,
6814 obj_loc,
6815 class_offset,
6816 maybe_temp_loc,
6817 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006818 DCHECK(locations->OnlyCallsOnSlowPath());
6819 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6820 /* is_fatal */ false);
6821 codegen_->AddSlowPath(slow_path);
6822 __ Bne(out, cls, slow_path->GetEntryLabel());
6823 __ LoadConst32(out, 1);
6824 break;
6825 }
6826
6827 case TypeCheckKind::kUnresolvedCheck:
6828 case TypeCheckKind::kInterfaceCheck: {
6829 // Note that we indeed only call on slow path, but we always go
6830 // into the slow path for the unresolved and interface check
6831 // cases.
6832 //
6833 // We cannot directly call the InstanceofNonTrivial runtime
6834 // entry point without resorting to a type checking slow path
6835 // here (i.e. by calling InvokeRuntime directly), as it would
6836 // require to assign fixed registers for the inputs of this
6837 // HInstanceOf instruction (following the runtime calling
6838 // convention), which might be cluttered by the potential first
6839 // read barrier emission at the beginning of this method.
6840 //
6841 // TODO: Introduce a new runtime entry point taking the object
6842 // to test (instead of its class) as argument, and let it deal
6843 // with the read barrier issues. This will let us refactor this
6844 // case of the `switch` code as it was previously (with a direct
6845 // call to the runtime not using a type checking slow path).
6846 // This should also be beneficial for the other cases above.
6847 DCHECK(locations->OnlyCallsOnSlowPath());
6848 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6849 /* is_fatal */ false);
6850 codegen_->AddSlowPath(slow_path);
6851 __ B(slow_path->GetEntryLabel());
6852 break;
6853 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006854 }
6855
6856 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006857
6858 if (slow_path != nullptr) {
6859 __ Bind(slow_path->GetExitLabel());
6860 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006861}
6862
6863void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
6864 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6865 locations->SetOut(Location::ConstantLocation(constant));
6866}
6867
6868void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
6869 // Will be generated at use site.
6870}
6871
6872void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
6873 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6874 locations->SetOut(Location::ConstantLocation(constant));
6875}
6876
6877void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
6878 // Will be generated at use site.
6879}
6880
6881void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
6882 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
6883 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
6884}
6885
6886void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6887 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006888 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006889 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006890 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006891}
6892
6893void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6894 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
6895 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006896 Location receiver = invoke->GetLocations()->InAt(0);
6897 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006898 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006899
6900 // Set the hidden argument.
6901 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
6902 invoke->GetDexMethodIndex());
6903
6904 // temp = object->GetClass();
6905 if (receiver.IsStackSlot()) {
6906 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
6907 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
6908 } else {
6909 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
6910 }
6911 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006912 // Instead of simply (possibly) unpoisoning `temp` here, we should
6913 // emit a read barrier for the previous class reference load.
6914 // However this is not required in practice, as this is an
6915 // intermediate/temporary reference and because the current
6916 // concurrent copying collector keeps the from-space memory
6917 // intact/accessible until the end of the marking phase (the
6918 // concurrent copying collector may not in the future).
6919 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006920 __ LoadFromOffset(kLoadWord, temp, temp,
6921 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
6922 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006923 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006924 // temp = temp->GetImtEntryAt(method_offset);
6925 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
6926 // T9 = temp->GetEntryPoint();
6927 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
6928 // T9();
6929 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07006930 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006931 DCHECK(!codegen_->IsLeafMethod());
6932 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
6933}
6934
6935void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07006936 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
6937 if (intrinsic.TryDispatch(invoke)) {
6938 return;
6939 }
6940
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006941 HandleInvoke(invoke);
6942}
6943
6944void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00006945 // Explicit clinit checks triggered by static invokes must have been pruned by
6946 // art::PrepareForRegisterAllocation.
6947 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006948
Alexey Frunze6b892cd2017-01-03 17:11:38 -08006949 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
6950 bool has_extra_input = invoke->HasPcRelativeDexCache() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07006951
Chris Larsen701566a2015-10-27 15:29:13 -07006952 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
6953 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07006954 if (invoke->GetLocations()->CanCall() && has_extra_input) {
6955 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
6956 }
Chris Larsen701566a2015-10-27 15:29:13 -07006957 return;
6958 }
6959
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006960 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07006961
6962 // Add the extra input register if either the dex cache array base register
6963 // or the PC-relative base register for accessing literals is needed.
6964 if (has_extra_input) {
6965 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
6966 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006967}
6968
Orion Hodsonac141392017-01-13 11:53:47 +00006969void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
6970 HandleInvoke(invoke);
6971}
6972
6973void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
6974 codegen_->GenerateInvokePolymorphicCall(invoke);
6975}
6976
Chris Larsen701566a2015-10-27 15:29:13 -07006977static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006978 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07006979 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
6980 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006981 return true;
6982 }
6983 return false;
6984}
6985
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006986HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07006987 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08006988 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07006989 // is incompatible with it.
Vladimir Markoaad75c62016-10-03 08:46:48 +00006990 // TODO: Create as many MipsDexCacheArraysBase instructions as needed for methods
6991 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07006992 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08006993 bool is_r6 = GetInstructionSetFeatures().IsR6();
6994 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07006995 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07006996 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006997 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07006998 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07006999 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007000 case HLoadString::LoadKind::kBootImageAddress:
7001 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007002 case HLoadString::LoadKind::kJitTableAddress:
7003 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007004 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007005 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007006 case HLoadString::LoadKind::kDexCacheViaMethod:
7007 fallback_load = false;
7008 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007009 }
7010 if (fallback_load) {
7011 desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
7012 }
7013 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007014}
7015
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007016HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7017 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007018 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007019 // is incompatible with it.
7020 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007021 bool is_r6 = GetInstructionSetFeatures().IsR6();
7022 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007023 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007024 case HLoadClass::LoadKind::kInvalid:
7025 LOG(FATAL) << "UNREACHABLE";
7026 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007027 case HLoadClass::LoadKind::kReferrersClass:
7028 fallback_load = false;
7029 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007030 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007031 case HLoadClass::LoadKind::kBssEntry:
7032 DCHECK(!Runtime::Current()->UseJitCompilation());
7033 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007034 case HLoadClass::LoadKind::kBootImageAddress:
7035 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007036 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007037 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007038 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007039 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007040 case HLoadClass::LoadKind::kDexCacheViaMethod:
7041 fallback_load = false;
7042 break;
7043 }
7044 if (fallback_load) {
7045 desired_class_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
7046 }
7047 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007048}
7049
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007050Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7051 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007052 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007053 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7054 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7055 if (!invoke->GetLocations()->Intrinsified()) {
7056 return location.AsRegister<Register>();
7057 }
7058 // For intrinsics we allow any location, so it may be on the stack.
7059 if (!location.IsRegister()) {
7060 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7061 return temp;
7062 }
7063 // For register locations, check if the register was saved. If so, get it from the stack.
7064 // Note: There is a chance that the register was saved but not overwritten, so we could
7065 // save one load. However, since this is just an intrinsic slow path we prefer this
7066 // simple and more robust approach rather that trying to determine if that's the case.
7067 SlowPathCode* slow_path = GetCurrentSlowPath();
7068 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7069 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7070 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7071 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7072 return temp;
7073 }
7074 return location.AsRegister<Register>();
7075}
7076
Vladimir Markodc151b22015-10-15 18:02:30 +01007077HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7078 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007079 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007080 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007081 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007082 // is incompatible with it.
7083 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007084 bool is_r6 = GetInstructionSetFeatures().IsR6();
7085 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007086 switch (dispatch_info.method_load_kind) {
Vladimir Markodc151b22015-10-15 18:02:30 +01007087 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007088 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007089 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007090 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007091 break;
7092 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007093 if (fallback_load) {
7094 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
7095 dispatch_info.method_load_data = 0;
7096 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007097 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007098}
7099
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007100void CodeGeneratorMIPS::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
7101 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007102 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007103 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7104 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007105 bool is_r6 = GetInstructionSetFeatures().IsR6();
7106 Register base_reg = (invoke->HasPcRelativeDexCache() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007107 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7108 : ZERO;
7109
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007110 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007111 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007112 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007113 uint32_t offset =
7114 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007115 __ LoadFromOffset(kLoadWord,
7116 temp.AsRegister<Register>(),
7117 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007118 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007119 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007120 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007121 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007122 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007123 break;
7124 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7125 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7126 break;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007127 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
7128 if (is_r6) {
7129 uint32_t offset = invoke->GetDexCacheArrayOffset();
7130 CodeGeneratorMIPS::PcRelativePatchInfo* info =
7131 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset);
7132 bool reordering = __ SetReorder(false);
7133 EmitPcRelativeAddressPlaceholderHigh(info, TMP, ZERO);
7134 __ Lw(temp.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
7135 __ SetReorder(reordering);
7136 } else {
7137 HMipsDexCacheArraysBase* base =
7138 invoke->InputAt(invoke->GetSpecialInputIndex())->AsMipsDexCacheArraysBase();
7139 int32_t offset =
7140 invoke->GetDexCacheArrayOffset() - base->GetElementOffset() - kDexCacheArrayLwOffset;
7141 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), base_reg, offset);
7142 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007143 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007144 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00007145 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007146 Register reg = temp.AsRegister<Register>();
7147 Register method_reg;
7148 if (current_method.IsRegister()) {
7149 method_reg = current_method.AsRegister<Register>();
7150 } else {
7151 // TODO: use the appropriate DCHECK() here if possible.
7152 // DCHECK(invoke->GetLocations()->Intrinsified());
7153 DCHECK(!current_method.IsValid());
7154 method_reg = reg;
7155 __ Lw(reg, SP, kCurrentMethodStackOffset);
7156 }
7157
7158 // temp = temp->dex_cache_resolved_methods_;
7159 __ LoadFromOffset(kLoadWord,
7160 reg,
7161 method_reg,
7162 ArtMethod::DexCacheResolvedMethodsOffset(kMipsPointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01007163 // temp = temp[index_in_cache];
7164 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
7165 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007166 __ LoadFromOffset(kLoadWord,
7167 reg,
7168 reg,
7169 CodeGenerator::GetCachePointerOffset(index_in_cache));
7170 break;
7171 }
7172 }
7173
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007174 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007175 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007176 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007177 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007178 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7179 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007180 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007181 T9,
7182 callee_method.AsRegister<Register>(),
7183 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007184 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007185 // T9()
7186 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007187 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007188 break;
7189 }
7190 DCHECK(!IsLeafMethod());
7191}
7192
7193void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007194 // Explicit clinit checks triggered by static invokes must have been pruned by
7195 // art::PrepareForRegisterAllocation.
7196 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007197
7198 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7199 return;
7200 }
7201
7202 LocationSummary* locations = invoke->GetLocations();
7203 codegen_->GenerateStaticOrDirectCall(invoke,
7204 locations->HasTemps()
7205 ? locations->GetTemp(0)
7206 : Location::NoLocation());
7207 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7208}
7209
Chris Larsen3acee732015-11-18 13:31:08 -08007210void CodeGeneratorMIPS::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007211 // Use the calling convention instead of the location of the receiver, as
7212 // intrinsics may have put the receiver in a different register. In the intrinsics
7213 // slow path, the arguments have been moved to the right place, so here we are
7214 // guaranteed that the receiver is the first register of the calling convention.
7215 InvokeDexCallingConvention calling_convention;
7216 Register receiver = calling_convention.GetRegisterAt(0);
7217
Chris Larsen3acee732015-11-18 13:31:08 -08007218 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007219 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7220 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7221 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007222 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007223
7224 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007225 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007226 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007227 // Instead of simply (possibly) unpoisoning `temp` here, we should
7228 // emit a read barrier for the previous class reference load.
7229 // However this is not required in practice, as this is an
7230 // intermediate/temporary reference and because the current
7231 // concurrent copying collector keeps the from-space memory
7232 // intact/accessible until the end of the marking phase (the
7233 // concurrent copying collector may not in the future).
7234 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007235 // temp = temp->GetMethodAt(method_offset);
7236 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7237 // T9 = temp->GetEntryPoint();
7238 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7239 // T9();
7240 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007241 __ NopIfNoReordering();
Chris Larsen3acee732015-11-18 13:31:08 -08007242}
7243
7244void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7245 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7246 return;
7247 }
7248
7249 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007250 DCHECK(!codegen_->IsLeafMethod());
7251 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7252}
7253
7254void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007255 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
7256 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007257 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007258 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7259 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007260 return;
7261 }
Vladimir Marko41559982017-01-06 14:04:23 +00007262 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007263 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007264 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7265 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007266 ? LocationSummary::kCallOnSlowPath
7267 : LocationSummary::kNoCall;
7268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007269 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7270 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7271 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007272 switch (load_kind) {
7273 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007274 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007275 case HLoadClass::LoadKind::kBootImageAddress:
7276 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007277 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007278 break;
7279 }
7280 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007281 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007282 locations->SetInAt(0, Location::RequiresRegister());
7283 break;
7284 default:
7285 break;
7286 }
7287 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007288 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7289 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7290 // Rely on the type resolution or initialization and marking to save everything we need.
7291 // Request a temp to hold the BSS entry location for the slow path on R2
7292 // (no benefit for R6).
7293 if (!isR6) {
7294 locations->AddTemp(Location::RequiresRegister());
7295 }
7296 RegisterSet caller_saves = RegisterSet::Empty();
7297 InvokeRuntimeCallingConvention calling_convention;
7298 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7299 locations->SetCustomSlowPathCallerSaves(caller_saves);
7300 } else {
7301 // For non-Baker read barriers we have a temp-clobbering call.
7302 }
7303 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007304}
7305
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007306// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7307// move.
7308void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007309 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
7310 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
7311 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007312 return;
7313 }
Vladimir Marko41559982017-01-06 14:04:23 +00007314 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007315
Vladimir Marko41559982017-01-06 14:04:23 +00007316 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007317 Location out_loc = locations->Out();
7318 Register out = out_loc.AsRegister<Register>();
7319 Register base_or_current_method_reg;
7320 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7321 switch (load_kind) {
7322 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007323 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007324 case HLoadClass::LoadKind::kBootImageAddress:
7325 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007326 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7327 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007328 case HLoadClass::LoadKind::kReferrersClass:
7329 case HLoadClass::LoadKind::kDexCacheViaMethod:
7330 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7331 break;
7332 default:
7333 base_or_current_method_reg = ZERO;
7334 break;
7335 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007336
Alexey Frunze15958152017-02-09 19:08:30 -08007337 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7338 ? kWithoutReadBarrier
7339 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007340 bool generate_null_check = false;
7341 switch (load_kind) {
7342 case HLoadClass::LoadKind::kReferrersClass: {
7343 DCHECK(!cls->CanCallRuntime());
7344 DCHECK(!cls->MustGenerateClinitCheck());
7345 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7346 GenerateGcRootFieldLoad(cls,
7347 out_loc,
7348 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007349 ArtMethod::DeclaringClassOffset().Int32Value(),
7350 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007351 break;
7352 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007353 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007354 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007355 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007356 CodeGeneratorMIPS::PcRelativePatchInfo* info =
7357 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007358 bool reordering = __ SetReorder(false);
7359 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7360 __ Addiu(out, out, /* placeholder */ 0x5678);
7361 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007362 break;
7363 }
7364 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007365 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007366 uint32_t address = dchecked_integral_cast<uint32_t>(
7367 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7368 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007369 __ LoadLiteral(out,
7370 base_or_current_method_reg,
7371 codegen_->DeduplicateBootImageAddressLiteral(address));
7372 break;
7373 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007374 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007375 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko1998cd02017-01-13 13:02:58 +00007376 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007377 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
7378 if (isR6 || non_baker_read_barrier) {
7379 bool reordering = __ SetReorder(false);
7380 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7381 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
7382 __ SetReorder(reordering);
7383 } else {
7384 // On R2 save the BSS entry address in a temporary register instead of
7385 // recalculating it in the slow path.
7386 Register temp = locations->GetTemp(0).AsRegister<Register>();
7387 bool reordering = __ SetReorder(false);
7388 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, temp, base_or_current_method_reg);
7389 __ Addiu(temp, temp, /* placeholder */ 0x5678);
7390 __ SetReorder(reordering);
7391 GenerateGcRootFieldLoad(cls, out_loc, temp, /* offset */ 0, read_barrier_option);
7392 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007393 generate_null_check = true;
7394 break;
7395 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007396 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007397 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7398 cls->GetTypeIndex(),
7399 cls->GetClass());
7400 bool reordering = __ SetReorder(false);
7401 __ Bind(&info->high_label);
7402 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007403 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007404 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007405 break;
7406 }
Vladimir Marko41559982017-01-06 14:04:23 +00007407 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007408 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007409 LOG(FATAL) << "UNREACHABLE";
7410 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007411 }
7412
7413 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7414 DCHECK(cls->CanCallRuntime());
7415 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
7416 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
7417 codegen_->AddSlowPath(slow_path);
7418 if (generate_null_check) {
7419 __ Beqz(out, slow_path->GetEntryLabel());
7420 }
7421 if (cls->MustGenerateClinitCheck()) {
7422 GenerateClassInitializationCheck(slow_path, out);
7423 } else {
7424 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007425 }
7426 }
7427}
7428
7429static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007430 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007431}
7432
7433void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7434 LocationSummary* locations =
7435 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7436 locations->SetOut(Location::RequiresRegister());
7437}
7438
7439void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7440 Register out = load->GetLocations()->Out().AsRegister<Register>();
7441 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7442}
7443
7444void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7445 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7446}
7447
7448void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7449 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7450}
7451
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007452void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007453 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007454 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007455 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007456 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007457 switch (load_kind) {
7458 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007459 case HLoadString::LoadKind::kBootImageAddress:
7460 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007461 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007462 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007463 break;
7464 }
7465 FALLTHROUGH_INTENDED;
7466 // We need an extra register for PC-relative dex cache accesses.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007467 case HLoadString::LoadKind::kDexCacheViaMethod:
7468 locations->SetInAt(0, Location::RequiresRegister());
7469 break;
7470 default:
7471 break;
7472 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007473 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
7474 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007475 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007476 } else {
7477 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007478 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7479 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7480 // Rely on the pResolveString and marking to save everything we need.
7481 // Request a temp to hold the BSS entry location for the slow path on R2
7482 // (no benefit for R6).
7483 if (!isR6) {
7484 locations->AddTemp(Location::RequiresRegister());
7485 }
7486 RegisterSet caller_saves = RegisterSet::Empty();
7487 InvokeRuntimeCallingConvention calling_convention;
7488 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7489 locations->SetCustomSlowPathCallerSaves(caller_saves);
7490 } else {
7491 // For non-Baker read barriers we have a temp-clobbering call.
7492 }
7493 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007494 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007495}
7496
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007497// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7498// move.
7499void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007500 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007501 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007502 Location out_loc = locations->Out();
7503 Register out = out_loc.AsRegister<Register>();
7504 Register base_or_current_method_reg;
7505 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7506 switch (load_kind) {
7507 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007508 case HLoadString::LoadKind::kBootImageAddress:
7509 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007510 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007511 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7512 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007513 default:
7514 base_or_current_method_reg = ZERO;
7515 break;
7516 }
7517
7518 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007519 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007520 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007521 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007522 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007523 bool reordering = __ SetReorder(false);
7524 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7525 __ Addiu(out, out, /* placeholder */ 0x5678);
7526 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007527 return; // No dex cache slow path.
7528 }
7529 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007530 uint32_t address = dchecked_integral_cast<uint32_t>(
7531 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7532 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007533 __ LoadLiteral(out,
7534 base_or_current_method_reg,
7535 codegen_->DeduplicateBootImageAddressLiteral(address));
7536 return; // No dex cache slow path.
7537 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007538 case HLoadString::LoadKind::kBssEntry: {
7539 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7540 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007541 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007542 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
7543 if (isR6 || non_baker_read_barrier) {
7544 bool reordering = __ SetReorder(false);
7545 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7546 GenerateGcRootFieldLoad(load,
7547 out_loc,
7548 out,
7549 /* placeholder */ 0x5678,
7550 kCompilerReadBarrierOption);
7551 __ SetReorder(reordering);
7552 } else {
7553 // On R2 save the BSS entry address in a temporary register instead of
7554 // recalculating it in the slow path.
7555 Register temp = locations->GetTemp(0).AsRegister<Register>();
7556 bool reordering = __ SetReorder(false);
7557 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, temp, base_or_current_method_reg);
7558 __ Addiu(temp, temp, /* placeholder */ 0x5678);
7559 __ SetReorder(reordering);
7560 GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kCompilerReadBarrierOption);
7561 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007562 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load);
7563 codegen_->AddSlowPath(slow_path);
7564 __ Beqz(out, slow_path->GetEntryLabel());
7565 __ Bind(slow_path->GetExitLabel());
7566 return;
7567 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007568 case HLoadString::LoadKind::kJitTableAddress: {
7569 CodeGeneratorMIPS::JitPatchInfo* info =
7570 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7571 load->GetStringIndex(),
7572 load->GetString());
7573 bool reordering = __ SetReorder(false);
7574 __ Bind(&info->high_label);
7575 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007576 GenerateGcRootFieldLoad(load,
7577 out_loc,
7578 out,
7579 /* placeholder */ 0x5678,
7580 kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007581 __ SetReorder(reordering);
7582 return;
7583 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007584 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007585 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007586 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007587
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007588 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Markoaad75c62016-10-03 08:46:48 +00007589 DCHECK(load_kind == HLoadString::LoadKind::kDexCacheViaMethod);
7590 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007591 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007592 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007593 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7594 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007595}
7596
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007597void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7598 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7599 locations->SetOut(Location::ConstantLocation(constant));
7600}
7601
7602void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7603 // Will be generated at use site.
7604}
7605
7606void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7607 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007608 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007609 InvokeRuntimeCallingConvention calling_convention;
7610 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7611}
7612
7613void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7614 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007615 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007616 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7617 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007618 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007619 }
7620 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7621}
7622
7623void LocationsBuilderMIPS::VisitMul(HMul* mul) {
7624 LocationSummary* locations =
7625 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
7626 switch (mul->GetResultType()) {
7627 case Primitive::kPrimInt:
7628 case Primitive::kPrimLong:
7629 locations->SetInAt(0, Location::RequiresRegister());
7630 locations->SetInAt(1, Location::RequiresRegister());
7631 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7632 break;
7633
7634 case Primitive::kPrimFloat:
7635 case Primitive::kPrimDouble:
7636 locations->SetInAt(0, Location::RequiresFpuRegister());
7637 locations->SetInAt(1, Location::RequiresFpuRegister());
7638 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7639 break;
7640
7641 default:
7642 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
7643 }
7644}
7645
7646void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
7647 Primitive::Type type = instruction->GetType();
7648 LocationSummary* locations = instruction->GetLocations();
7649 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7650
7651 switch (type) {
7652 case Primitive::kPrimInt: {
7653 Register dst = locations->Out().AsRegister<Register>();
7654 Register lhs = locations->InAt(0).AsRegister<Register>();
7655 Register rhs = locations->InAt(1).AsRegister<Register>();
7656
7657 if (isR6) {
7658 __ MulR6(dst, lhs, rhs);
7659 } else {
7660 __ MulR2(dst, lhs, rhs);
7661 }
7662 break;
7663 }
7664 case Primitive::kPrimLong: {
7665 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7666 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7667 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7668 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
7669 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
7670 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
7671
7672 // Extra checks to protect caused by the existance of A1_A2.
7673 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
7674 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
7675 DCHECK_NE(dst_high, lhs_low);
7676 DCHECK_NE(dst_high, rhs_low);
7677
7678 // A_B * C_D
7679 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
7680 // dst_lo: [ low(B*D) ]
7681 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
7682
7683 if (isR6) {
7684 __ MulR6(TMP, lhs_high, rhs_low);
7685 __ MulR6(dst_high, lhs_low, rhs_high);
7686 __ Addu(dst_high, dst_high, TMP);
7687 __ MuhuR6(TMP, lhs_low, rhs_low);
7688 __ Addu(dst_high, dst_high, TMP);
7689 __ MulR6(dst_low, lhs_low, rhs_low);
7690 } else {
7691 __ MulR2(TMP, lhs_high, rhs_low);
7692 __ MulR2(dst_high, lhs_low, rhs_high);
7693 __ Addu(dst_high, dst_high, TMP);
7694 __ MultuR2(lhs_low, rhs_low);
7695 __ Mfhi(TMP);
7696 __ Addu(dst_high, dst_high, TMP);
7697 __ Mflo(dst_low);
7698 }
7699 break;
7700 }
7701 case Primitive::kPrimFloat:
7702 case Primitive::kPrimDouble: {
7703 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7704 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
7705 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
7706 if (type == Primitive::kPrimFloat) {
7707 __ MulS(dst, lhs, rhs);
7708 } else {
7709 __ MulD(dst, lhs, rhs);
7710 }
7711 break;
7712 }
7713 default:
7714 LOG(FATAL) << "Unexpected mul type " << type;
7715 }
7716}
7717
7718void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
7719 LocationSummary* locations =
7720 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
7721 switch (neg->GetResultType()) {
7722 case Primitive::kPrimInt:
7723 case Primitive::kPrimLong:
7724 locations->SetInAt(0, Location::RequiresRegister());
7725 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7726 break;
7727
7728 case Primitive::kPrimFloat:
7729 case Primitive::kPrimDouble:
7730 locations->SetInAt(0, Location::RequiresFpuRegister());
7731 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7732 break;
7733
7734 default:
7735 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
7736 }
7737}
7738
7739void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
7740 Primitive::Type type = instruction->GetType();
7741 LocationSummary* locations = instruction->GetLocations();
7742
7743 switch (type) {
7744 case Primitive::kPrimInt: {
7745 Register dst = locations->Out().AsRegister<Register>();
7746 Register src = locations->InAt(0).AsRegister<Register>();
7747 __ Subu(dst, ZERO, src);
7748 break;
7749 }
7750 case Primitive::kPrimLong: {
7751 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7752 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7753 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7754 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7755 __ Subu(dst_low, ZERO, src_low);
7756 __ Sltu(TMP, ZERO, dst_low);
7757 __ Subu(dst_high, ZERO, src_high);
7758 __ Subu(dst_high, dst_high, TMP);
7759 break;
7760 }
7761 case Primitive::kPrimFloat:
7762 case Primitive::kPrimDouble: {
7763 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7764 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
7765 if (type == Primitive::kPrimFloat) {
7766 __ NegS(dst, src);
7767 } else {
7768 __ NegD(dst, src);
7769 }
7770 break;
7771 }
7772 default:
7773 LOG(FATAL) << "Unexpected neg type " << type;
7774 }
7775}
7776
7777void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
7778 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007779 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007780 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007781 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007782 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7783 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007784}
7785
7786void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007787 // Note: if heap poisoning is enabled, the entry point takes care
7788 // of poisoning the reference.
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007789 codegen_->InvokeRuntime(kQuickAllocArrayResolved, instruction, instruction->GetDexPc());
7790 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007791}
7792
7793void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
7794 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007795 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007796 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00007797 if (instruction->IsStringAlloc()) {
7798 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
7799 } else {
7800 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00007801 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007802 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
7803}
7804
7805void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007806 // Note: if heap poisoning is enabled, the entry point takes care
7807 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00007808 if (instruction->IsStringAlloc()) {
7809 // String is allocated through StringFactory. Call NewEmptyString entry point.
7810 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07007811 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00007812 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
7813 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
7814 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007815 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00007816 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
7817 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007818 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00007819 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00007820 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007821}
7822
7823void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
7824 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7825 locations->SetInAt(0, Location::RequiresRegister());
7826 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7827}
7828
7829void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
7830 Primitive::Type type = instruction->GetType();
7831 LocationSummary* locations = instruction->GetLocations();
7832
7833 switch (type) {
7834 case Primitive::kPrimInt: {
7835 Register dst = locations->Out().AsRegister<Register>();
7836 Register src = locations->InAt(0).AsRegister<Register>();
7837 __ Nor(dst, src, ZERO);
7838 break;
7839 }
7840
7841 case Primitive::kPrimLong: {
7842 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7843 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7844 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7845 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7846 __ Nor(dst_high, src_high, ZERO);
7847 __ Nor(dst_low, src_low, ZERO);
7848 break;
7849 }
7850
7851 default:
7852 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
7853 }
7854}
7855
7856void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7857 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7858 locations->SetInAt(0, Location::RequiresRegister());
7859 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7860}
7861
7862void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7863 LocationSummary* locations = instruction->GetLocations();
7864 __ Xori(locations->Out().AsRegister<Register>(),
7865 locations->InAt(0).AsRegister<Register>(),
7866 1);
7867}
7868
7869void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007870 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
7871 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007872}
7873
Calin Juravle2ae48182016-03-16 14:05:09 +00007874void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
7875 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007876 return;
7877 }
7878 Location obj = instruction->GetLocations()->InAt(0);
7879
7880 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00007881 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007882}
7883
Calin Juravle2ae48182016-03-16 14:05:09 +00007884void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007885 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00007886 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007887
7888 Location obj = instruction->GetLocations()->InAt(0);
7889
7890 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
7891}
7892
7893void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00007894 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007895}
7896
7897void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
7898 HandleBinaryOp(instruction);
7899}
7900
7901void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
7902 HandleBinaryOp(instruction);
7903}
7904
7905void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
7906 LOG(FATAL) << "Unreachable";
7907}
7908
7909void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
7910 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
7911}
7912
7913void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
7914 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7915 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
7916 if (location.IsStackSlot()) {
7917 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7918 } else if (location.IsDoubleStackSlot()) {
7919 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7920 }
7921 locations->SetOut(location);
7922}
7923
7924void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
7925 ATTRIBUTE_UNUSED) {
7926 // Nothing to do, the parameter is already at its location.
7927}
7928
7929void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
7930 LocationSummary* locations =
7931 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7932 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
7933}
7934
7935void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
7936 ATTRIBUTE_UNUSED) {
7937 // Nothing to do, the method is already at its location.
7938}
7939
7940void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
7941 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01007942 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007943 locations->SetInAt(i, Location::Any());
7944 }
7945 locations->SetOut(Location::Any());
7946}
7947
7948void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
7949 LOG(FATAL) << "Unreachable";
7950}
7951
7952void LocationsBuilderMIPS::VisitRem(HRem* rem) {
7953 Primitive::Type type = rem->GetResultType();
7954 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007955 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007956 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
7957
7958 switch (type) {
7959 case Primitive::kPrimInt:
7960 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08007961 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007962 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7963 break;
7964
7965 case Primitive::kPrimLong: {
7966 InvokeRuntimeCallingConvention calling_convention;
7967 locations->SetInAt(0, Location::RegisterPairLocation(
7968 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
7969 locations->SetInAt(1, Location::RegisterPairLocation(
7970 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
7971 locations->SetOut(calling_convention.GetReturnLocation(type));
7972 break;
7973 }
7974
7975 case Primitive::kPrimFloat:
7976 case Primitive::kPrimDouble: {
7977 InvokeRuntimeCallingConvention calling_convention;
7978 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
7979 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
7980 locations->SetOut(calling_convention.GetReturnLocation(type));
7981 break;
7982 }
7983
7984 default:
7985 LOG(FATAL) << "Unexpected rem type " << type;
7986 }
7987}
7988
7989void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
7990 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007991
7992 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08007993 case Primitive::kPrimInt:
7994 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007995 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007996 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01007997 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007998 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
7999 break;
8000 }
8001 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008002 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008003 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008004 break;
8005 }
8006 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008007 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008008 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008009 break;
8010 }
8011 default:
8012 LOG(FATAL) << "Unexpected rem type " << type;
8013 }
8014}
8015
Igor Murashkind01745e2017-04-05 16:40:31 -07008016void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8017 constructor_fence->SetLocations(nullptr);
8018}
8019
8020void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8021 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8022 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8023}
8024
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008025void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8026 memory_barrier->SetLocations(nullptr);
8027}
8028
8029void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8030 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8031}
8032
8033void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8034 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8035 Primitive::Type return_type = ret->InputAt(0)->GetType();
8036 locations->SetInAt(0, MipsReturnLocation(return_type));
8037}
8038
8039void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8040 codegen_->GenerateFrameExit();
8041}
8042
8043void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8044 ret->SetLocations(nullptr);
8045}
8046
8047void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8048 codegen_->GenerateFrameExit();
8049}
8050
Alexey Frunze92d90602015-12-18 18:16:36 -08008051void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8052 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008053}
8054
Alexey Frunze92d90602015-12-18 18:16:36 -08008055void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8056 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008057}
8058
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008059void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8060 HandleShift(shl);
8061}
8062
8063void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8064 HandleShift(shl);
8065}
8066
8067void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8068 HandleShift(shr);
8069}
8070
8071void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8072 HandleShift(shr);
8073}
8074
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008075void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8076 HandleBinaryOp(instruction);
8077}
8078
8079void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8080 HandleBinaryOp(instruction);
8081}
8082
8083void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8084 HandleFieldGet(instruction, instruction->GetFieldInfo());
8085}
8086
8087void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8088 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8089}
8090
8091void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8092 HandleFieldSet(instruction, instruction->GetFieldInfo());
8093}
8094
8095void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008096 HandleFieldSet(instruction,
8097 instruction->GetFieldInfo(),
8098 instruction->GetDexPc(),
8099 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008100}
8101
8102void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8103 HUnresolvedInstanceFieldGet* instruction) {
8104 FieldAccessCallingConventionMIPS calling_convention;
8105 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8106 instruction->GetFieldType(),
8107 calling_convention);
8108}
8109
8110void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8111 HUnresolvedInstanceFieldGet* instruction) {
8112 FieldAccessCallingConventionMIPS calling_convention;
8113 codegen_->GenerateUnresolvedFieldAccess(instruction,
8114 instruction->GetFieldType(),
8115 instruction->GetFieldIndex(),
8116 instruction->GetDexPc(),
8117 calling_convention);
8118}
8119
8120void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8121 HUnresolvedInstanceFieldSet* instruction) {
8122 FieldAccessCallingConventionMIPS calling_convention;
8123 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8124 instruction->GetFieldType(),
8125 calling_convention);
8126}
8127
8128void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8129 HUnresolvedInstanceFieldSet* instruction) {
8130 FieldAccessCallingConventionMIPS calling_convention;
8131 codegen_->GenerateUnresolvedFieldAccess(instruction,
8132 instruction->GetFieldType(),
8133 instruction->GetFieldIndex(),
8134 instruction->GetDexPc(),
8135 calling_convention);
8136}
8137
8138void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8139 HUnresolvedStaticFieldGet* instruction) {
8140 FieldAccessCallingConventionMIPS calling_convention;
8141 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8142 instruction->GetFieldType(),
8143 calling_convention);
8144}
8145
8146void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8147 HUnresolvedStaticFieldGet* instruction) {
8148 FieldAccessCallingConventionMIPS calling_convention;
8149 codegen_->GenerateUnresolvedFieldAccess(instruction,
8150 instruction->GetFieldType(),
8151 instruction->GetFieldIndex(),
8152 instruction->GetDexPc(),
8153 calling_convention);
8154}
8155
8156void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8157 HUnresolvedStaticFieldSet* instruction) {
8158 FieldAccessCallingConventionMIPS calling_convention;
8159 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8160 instruction->GetFieldType(),
8161 calling_convention);
8162}
8163
8164void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8165 HUnresolvedStaticFieldSet* instruction) {
8166 FieldAccessCallingConventionMIPS calling_convention;
8167 codegen_->GenerateUnresolvedFieldAccess(instruction,
8168 instruction->GetFieldType(),
8169 instruction->GetFieldIndex(),
8170 instruction->GetDexPc(),
8171 calling_convention);
8172}
8173
8174void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008175 LocationSummary* locations =
8176 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01008177 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008178}
8179
8180void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8181 HBasicBlock* block = instruction->GetBlock();
8182 if (block->GetLoopInformation() != nullptr) {
8183 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8184 // The back edge will generate the suspend check.
8185 return;
8186 }
8187 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8188 // The goto will generate the suspend check.
8189 return;
8190 }
8191 GenerateSuspendCheck(instruction, nullptr);
8192}
8193
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008194void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8195 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008196 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008197 InvokeRuntimeCallingConvention calling_convention;
8198 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8199}
8200
8201void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008202 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008203 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8204}
8205
8206void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8207 Primitive::Type input_type = conversion->GetInputType();
8208 Primitive::Type result_type = conversion->GetResultType();
8209 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008210 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008211
8212 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8213 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8214 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8215 }
8216
8217 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008218 if (!isR6 &&
8219 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8220 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008221 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008222 }
8223
8224 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8225
8226 if (call_kind == LocationSummary::kNoCall) {
8227 if (Primitive::IsFloatingPointType(input_type)) {
8228 locations->SetInAt(0, Location::RequiresFpuRegister());
8229 } else {
8230 locations->SetInAt(0, Location::RequiresRegister());
8231 }
8232
8233 if (Primitive::IsFloatingPointType(result_type)) {
8234 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8235 } else {
8236 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8237 }
8238 } else {
8239 InvokeRuntimeCallingConvention calling_convention;
8240
8241 if (Primitive::IsFloatingPointType(input_type)) {
8242 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8243 } else {
8244 DCHECK_EQ(input_type, Primitive::kPrimLong);
8245 locations->SetInAt(0, Location::RegisterPairLocation(
8246 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8247 }
8248
8249 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8250 }
8251}
8252
8253void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8254 LocationSummary* locations = conversion->GetLocations();
8255 Primitive::Type result_type = conversion->GetResultType();
8256 Primitive::Type input_type = conversion->GetInputType();
8257 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008258 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008259
8260 DCHECK_NE(input_type, result_type);
8261
8262 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8263 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8264 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8265 Register src = locations->InAt(0).AsRegister<Register>();
8266
Alexey Frunzea871ef12016-06-27 15:20:11 -07008267 if (dst_low != src) {
8268 __ Move(dst_low, src);
8269 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008270 __ Sra(dst_high, src, 31);
8271 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8272 Register dst = locations->Out().AsRegister<Register>();
8273 Register src = (input_type == Primitive::kPrimLong)
8274 ? locations->InAt(0).AsRegisterPairLow<Register>()
8275 : locations->InAt(0).AsRegister<Register>();
8276
8277 switch (result_type) {
8278 case Primitive::kPrimChar:
8279 __ Andi(dst, src, 0xFFFF);
8280 break;
8281 case Primitive::kPrimByte:
8282 if (has_sign_extension) {
8283 __ Seb(dst, src);
8284 } else {
8285 __ Sll(dst, src, 24);
8286 __ Sra(dst, dst, 24);
8287 }
8288 break;
8289 case Primitive::kPrimShort:
8290 if (has_sign_extension) {
8291 __ Seh(dst, src);
8292 } else {
8293 __ Sll(dst, src, 16);
8294 __ Sra(dst, dst, 16);
8295 }
8296 break;
8297 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008298 if (dst != src) {
8299 __ Move(dst, src);
8300 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008301 break;
8302
8303 default:
8304 LOG(FATAL) << "Unexpected type conversion from " << input_type
8305 << " to " << result_type;
8306 }
8307 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008308 if (input_type == Primitive::kPrimLong) {
8309 if (isR6) {
8310 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8311 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8312 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8313 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8314 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8315 __ Mtc1(src_low, FTMP);
8316 __ Mthc1(src_high, FTMP);
8317 if (result_type == Primitive::kPrimFloat) {
8318 __ Cvtsl(dst, FTMP);
8319 } else {
8320 __ Cvtdl(dst, FTMP);
8321 }
8322 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008323 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8324 : kQuickL2d;
8325 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008326 if (result_type == Primitive::kPrimFloat) {
8327 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8328 } else {
8329 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8330 }
8331 }
8332 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008333 Register src = locations->InAt(0).AsRegister<Register>();
8334 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8335 __ Mtc1(src, FTMP);
8336 if (result_type == Primitive::kPrimFloat) {
8337 __ Cvtsw(dst, FTMP);
8338 } else {
8339 __ Cvtdw(dst, FTMP);
8340 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008341 }
8342 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8343 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008344
8345 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8346 // value of the output type if the input is outside of the range after the truncation or
8347 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8348 // results. This matches the desired float/double-to-int/long conversion exactly.
8349 //
8350 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8351 // value when the input is either a NaN or is outside of the range of the output type
8352 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8353 // the same result.
8354 //
8355 // The code takes care of the different behaviors by first comparing the input to the
8356 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8357 // If the input is greater than or equal to the minimum, it procedes to the truncate
8358 // instruction, which will handle such an input the same way irrespective of NAN2008.
8359 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8360 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008361 if (result_type == Primitive::kPrimLong) {
8362 if (isR6) {
8363 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8364 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8365 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8366 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8367 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008368
8369 if (input_type == Primitive::kPrimFloat) {
8370 __ TruncLS(FTMP, src);
8371 } else {
8372 __ TruncLD(FTMP, src);
8373 }
8374 __ Mfc1(dst_low, FTMP);
8375 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008376 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008377 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8378 : kQuickD2l;
8379 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008380 if (input_type == Primitive::kPrimFloat) {
8381 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8382 } else {
8383 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8384 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008385 }
8386 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008387 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8388 Register dst = locations->Out().AsRegister<Register>();
8389 MipsLabel truncate;
8390 MipsLabel done;
8391
Lena Djokicf4e23a82017-05-09 15:43:45 +02008392 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008393 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008394 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8395 __ LoadConst32(TMP, min_val);
8396 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008397 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008398 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8399 __ LoadConst32(TMP, High32Bits(min_val));
8400 __ Mtc1(ZERO, FTMP);
8401 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008402 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008403
8404 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008405 __ ColeS(0, FTMP, src);
8406 } else {
8407 __ ColeD(0, FTMP, src);
8408 }
8409 __ Bc1t(0, &truncate);
8410
8411 if (input_type == Primitive::kPrimFloat) {
8412 __ CeqS(0, src, src);
8413 } else {
8414 __ CeqD(0, src, src);
8415 }
8416 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8417 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008418
8419 __ B(&done);
8420
8421 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008422 }
8423
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008424 if (input_type == Primitive::kPrimFloat) {
8425 __ TruncWS(FTMP, src);
8426 } else {
8427 __ TruncWD(FTMP, src);
8428 }
8429 __ Mfc1(dst, FTMP);
8430
Lena Djokicf4e23a82017-05-09 15:43:45 +02008431 if (!isR6) {
8432 __ Bind(&done);
8433 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008434 }
8435 } else if (Primitive::IsFloatingPointType(result_type) &&
8436 Primitive::IsFloatingPointType(input_type)) {
8437 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8438 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8439 if (result_type == Primitive::kPrimFloat) {
8440 __ Cvtsd(dst, src);
8441 } else {
8442 __ Cvtds(dst, src);
8443 }
8444 } else {
8445 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8446 << " to " << result_type;
8447 }
8448}
8449
8450void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8451 HandleShift(ushr);
8452}
8453
8454void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8455 HandleShift(ushr);
8456}
8457
8458void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8459 HandleBinaryOp(instruction);
8460}
8461
8462void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8463 HandleBinaryOp(instruction);
8464}
8465
8466void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8467 // Nothing to do, this should be removed during prepare for register allocator.
8468 LOG(FATAL) << "Unreachable";
8469}
8470
8471void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8472 // Nothing to do, this should be removed during prepare for register allocator.
8473 LOG(FATAL) << "Unreachable";
8474}
8475
8476void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008477 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008478}
8479
8480void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008481 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008482}
8483
8484void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008485 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008486}
8487
8488void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008489 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008490}
8491
8492void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008493 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008494}
8495
8496void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008497 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008498}
8499
8500void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008501 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008502}
8503
8504void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008505 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008506}
8507
8508void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008509 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008510}
8511
8512void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008513 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008514}
8515
8516void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008517 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008518}
8519
8520void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008521 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008522}
8523
8524void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008525 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008526}
8527
8528void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008529 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008530}
8531
8532void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008533 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008534}
8535
8536void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008537 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008538}
8539
8540void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008541 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008542}
8543
8544void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008545 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008546}
8547
8548void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008549 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008550}
8551
8552void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008553 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008554}
8555
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008556void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8557 LocationSummary* locations =
8558 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8559 locations->SetInAt(0, Location::RequiresRegister());
8560}
8561
Alexey Frunze96b66822016-09-10 02:32:44 -07008562void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8563 int32_t lower_bound,
8564 uint32_t num_entries,
8565 HBasicBlock* switch_block,
8566 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008567 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008568 Register temp_reg = TMP;
8569 __ Addiu32(temp_reg, value_reg, -lower_bound);
8570 // Jump to default if index is negative
8571 // Note: We don't check the case that index is positive while value < lower_bound, because in
8572 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8573 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8574
Alexey Frunze96b66822016-09-10 02:32:44 -07008575 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008576 // Jump to successors[0] if value == lower_bound.
8577 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8578 int32_t last_index = 0;
8579 for (; num_entries - last_index > 2; last_index += 2) {
8580 __ Addiu(temp_reg, temp_reg, -2);
8581 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8582 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8583 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8584 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8585 }
8586 if (num_entries - last_index == 2) {
8587 // The last missing case_value.
8588 __ Addiu(temp_reg, temp_reg, -1);
8589 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008590 }
8591
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008592 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008593 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008594 __ B(codegen_->GetLabelOf(default_block));
8595 }
8596}
8597
Alexey Frunze96b66822016-09-10 02:32:44 -07008598void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8599 Register constant_area,
8600 int32_t lower_bound,
8601 uint32_t num_entries,
8602 HBasicBlock* switch_block,
8603 HBasicBlock* default_block) {
8604 // Create a jump table.
8605 std::vector<MipsLabel*> labels(num_entries);
8606 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8607 for (uint32_t i = 0; i < num_entries; i++) {
8608 labels[i] = codegen_->GetLabelOf(successors[i]);
8609 }
8610 JumpTable* table = __ CreateJumpTable(std::move(labels));
8611
8612 // Is the value in range?
8613 __ Addiu32(TMP, value_reg, -lower_bound);
8614 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
8615 __ Sltiu(AT, TMP, num_entries);
8616 __ Beqz(AT, codegen_->GetLabelOf(default_block));
8617 } else {
8618 __ LoadConst32(AT, num_entries);
8619 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
8620 }
8621
8622 // We are in the range of the table.
8623 // Load the target address from the jump table, indexing by the value.
8624 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07008625 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07008626 __ Lw(TMP, TMP, 0);
8627 // Compute the absolute target address by adding the table start address
8628 // (the table contains offsets to targets relative to its start).
8629 __ Addu(TMP, TMP, AT);
8630 // And jump.
8631 __ Jr(TMP);
8632 __ NopIfNoReordering();
8633}
8634
8635void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8636 int32_t lower_bound = switch_instr->GetStartValue();
8637 uint32_t num_entries = switch_instr->GetNumEntries();
8638 LocationSummary* locations = switch_instr->GetLocations();
8639 Register value_reg = locations->InAt(0).AsRegister<Register>();
8640 HBasicBlock* switch_block = switch_instr->GetBlock();
8641 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8642
8643 if (codegen_->GetInstructionSetFeatures().IsR6() &&
8644 num_entries > kPackedSwitchJumpTableThreshold) {
8645 // R6 uses PC-relative addressing to access the jump table.
8646 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
8647 // the jump table and it is implemented by changing HPackedSwitch to
8648 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
8649 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
8650 GenTableBasedPackedSwitch(value_reg,
8651 ZERO,
8652 lower_bound,
8653 num_entries,
8654 switch_block,
8655 default_block);
8656 } else {
8657 GenPackedSwitchWithCompares(value_reg,
8658 lower_bound,
8659 num_entries,
8660 switch_block,
8661 default_block);
8662 }
8663}
8664
8665void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8666 LocationSummary* locations =
8667 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8668 locations->SetInAt(0, Location::RequiresRegister());
8669 // Constant area pointer (HMipsComputeBaseMethodAddress).
8670 locations->SetInAt(1, Location::RequiresRegister());
8671}
8672
8673void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8674 int32_t lower_bound = switch_instr->GetStartValue();
8675 uint32_t num_entries = switch_instr->GetNumEntries();
8676 LocationSummary* locations = switch_instr->GetLocations();
8677 Register value_reg = locations->InAt(0).AsRegister<Register>();
8678 Register constant_area = locations->InAt(1).AsRegister<Register>();
8679 HBasicBlock* switch_block = switch_instr->GetBlock();
8680 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8681
8682 // This is an R2-only path. HPackedSwitch has been changed to
8683 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
8684 // required to address the jump table relative to PC.
8685 GenTableBasedPackedSwitch(value_reg,
8686 constant_area,
8687 lower_bound,
8688 num_entries,
8689 switch_block,
8690 default_block);
8691}
8692
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008693void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
8694 HMipsComputeBaseMethodAddress* insn) {
8695 LocationSummary* locations =
8696 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
8697 locations->SetOut(Location::RequiresRegister());
8698}
8699
8700void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
8701 HMipsComputeBaseMethodAddress* insn) {
8702 LocationSummary* locations = insn->GetLocations();
8703 Register reg = locations->Out().AsRegister<Register>();
8704
8705 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
8706
8707 // Generate a dummy PC-relative call to obtain PC.
8708 __ Nal();
8709 // Grab the return address off RA.
8710 __ Move(reg, RA);
Alexey Frunze06a46c42016-07-19 15:00:40 -07008711 // TODO: Can we share this code with that of VisitMipsDexCacheArraysBase()?
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008712
8713 // Remember this offset (the obtained PC value) for later use with constant area.
8714 __ BindPcRelBaseLabel();
8715}
8716
8717void LocationsBuilderMIPS::VisitMipsDexCacheArraysBase(HMipsDexCacheArraysBase* base) {
8718 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(base);
8719 locations->SetOut(Location::RequiresRegister());
8720}
8721
8722void InstructionCodeGeneratorMIPS::VisitMipsDexCacheArraysBase(HMipsDexCacheArraysBase* base) {
8723 Register reg = base->GetLocations()->Out().AsRegister<Register>();
8724 CodeGeneratorMIPS::PcRelativePatchInfo* info =
8725 codegen_->NewPcRelativeDexCacheArrayPatch(base->GetDexFile(), base->GetElementOffset());
Alexey Frunze6b892cd2017-01-03 17:11:38 -08008726 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
8727 bool reordering = __ SetReorder(false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00008728 // TODO: Reuse MipsComputeBaseMethodAddress on R2 instead of passing ZERO to force emitting NAL.
Alexey Frunze6b892cd2017-01-03 17:11:38 -08008729 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, reg, ZERO);
8730 __ Addiu(reg, reg, /* placeholder */ 0x5678);
8731 __ SetReorder(reordering);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008732}
8733
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008734void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8735 // The trampoline uses the same calling convention as dex calling conventions,
8736 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
8737 // the method_idx.
8738 HandleInvoke(invoke);
8739}
8740
8741void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8742 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
8743}
8744
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008745void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8746 LocationSummary* locations =
8747 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8748 locations->SetInAt(0, Location::RequiresRegister());
8749 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008750}
8751
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008752void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8753 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00008754 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008755 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008756 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008757 __ LoadFromOffset(kLoadWord,
8758 locations->Out().AsRegister<Register>(),
8759 locations->InAt(0).AsRegister<Register>(),
8760 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008761 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008762 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00008763 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008764 __ LoadFromOffset(kLoadWord,
8765 locations->Out().AsRegister<Register>(),
8766 locations->InAt(0).AsRegister<Register>(),
8767 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008768 __ LoadFromOffset(kLoadWord,
8769 locations->Out().AsRegister<Register>(),
8770 locations->Out().AsRegister<Register>(),
8771 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008772 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008773}
8774
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008775#undef __
8776#undef QUICK_ENTRY_POINT
8777
8778} // namespace mips
8779} // namespace art